mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-11-25 14:56:27 +00:00
Compare commits
1 Commits
dependabot
...
NormannK-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
447f7d05be |
@@ -1,8 +1,8 @@
|
|||||||
.git/
|
.git/
|
||||||
.github/
|
.github/
|
||||||
**/__pycache__/
|
eos-data/
|
||||||
**/*.pyc
|
mariadb-data/
|
||||||
**/*.egg-info/
|
test_data/
|
||||||
.dockerignore
|
.dockerignore
|
||||||
.env
|
.env
|
||||||
.gitignore
|
.gitignore
|
||||||
@@ -12,4 +12,4 @@ LICENSE
|
|||||||
Makefile
|
Makefile
|
||||||
NOTICE
|
NOTICE
|
||||||
README.md
|
README.md
|
||||||
.venv/
|
.venv
|
||||||
|
|||||||
4
.env
4
.env
@@ -1,5 +1,5 @@
|
|||||||
EOS_VERSION=main
|
EOS_VERSION=main
|
||||||
EOS_SERVER__PORT=8503
|
EOS_PORT=8503
|
||||||
EOS_SERVER__EOSDASH_PORT=8504
|
EOSDASH_PORT=8504
|
||||||
|
|
||||||
PYTHON_VERSION=3.12.6
|
PYTHON_VERSION=3.12.6
|
||||||
|
|||||||
9
.github/dependabot.yml
vendored
9
.github/dependabot.yml
vendored
@@ -5,7 +5,16 @@
|
|||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
|
# Update dependencies on the main branch
|
||||||
- package-ecosystem: "pip" # See documentation for possible values
|
- package-ecosystem: "pip" # See documentation for possible values
|
||||||
directory: "/" # Location of package manifests
|
directory: "/" # Location of package manifests
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
target-branch: "main" # Target the main branch
|
||||||
|
|
||||||
|
# Update dependencies on the feature/config-nested branch
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
target-branch: "feature/config-nested" # Target the specific feature branch
|
||||||
|
|||||||
5
.github/workflows/docker-build.yml
vendored
5
.github/workflows/docker-build.yml
vendored
@@ -7,11 +7,13 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'main'
|
- 'main'
|
||||||
|
- 'feature/config-overhaul'
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- 'v*'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- '**'
|
- 'main'
|
||||||
|
- 'feature/config-overhaul'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DOCKERHUB_REPO: akkudoktor/eos
|
DOCKERHUB_REPO: akkudoktor/eos
|
||||||
@@ -195,7 +197,6 @@ jobs:
|
|||||||
type=ref,event=pr
|
type=ref,event=pr
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
type=raw,value=latest,enable={{is_default_branch}}
|
|
||||||
labels: |
|
labels: |
|
||||||
org.opencontainers.image.licenses=${{ env.EOS_LICENSE }}
|
org.opencontainers.image.licenses=${{ env.EOS_LICENSE }}
|
||||||
annotations: |
|
annotations: |
|
||||||
|
|||||||
35
.github/workflows/stale.yml
vendored
35
.github/workflows/stale.yml
vendored
@@ -1,35 +0,0 @@
|
|||||||
name: "Close stale pull requests/issues"
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "16 00 * * *"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
stale:
|
|
||||||
name: Find Stale issues and PRs
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
if: github.repository == 'Akkudoktor-EOS/EOS'
|
|
||||||
permissions:
|
|
||||||
pull-requests: write # to comment on stale pull requests
|
|
||||||
issues: write # to comment on stale issues
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
|
|
||||||
with:
|
|
||||||
stale-pr-message: 'This pull request has been marked as stale because it has been open (more
|
|
||||||
than) 90 days with no activity. Remove the stale label or add a comment saying that you
|
|
||||||
would like to have the label removed otherwise this pull request will automatically be
|
|
||||||
closed in 30 days. Note, that you can always re-open a closed pull request at any time.'
|
|
||||||
stale-issue-message: 'This issue has been marked as stale because it has been open (more
|
|
||||||
than) 90 days with no activity. Remove the stale label or add a comment saying that you
|
|
||||||
would like to have the label removed otherwise this issue will automatically be closed in
|
|
||||||
30 days. Note, that you can always re-open a closed issue at any time.'
|
|
||||||
days-before-stale: 90
|
|
||||||
days-before-close: 30
|
|
||||||
stale-issue-label: 'stale'
|
|
||||||
stale-pr-label: 'stale'
|
|
||||||
exempt-pr-labels: 'in progress'
|
|
||||||
exempt-issue-labels: 'feature request, enhancement'
|
|
||||||
operations-per-run: 400
|
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -179,7 +179,7 @@ cython_debug/
|
|||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
.idea/
|
#.idea/
|
||||||
|
|
||||||
# General
|
# General
|
||||||
.DS_Store
|
.DS_Store
|
||||||
@@ -260,6 +260,3 @@ tests/testdata/new_optimize_result*
|
|||||||
tests/testdata/openapi-new.json
|
tests/testdata/openapi-new.json
|
||||||
tests/testdata/openapi-new.md
|
tests/testdata/openapi-new.md
|
||||||
tests/testdata/config-new.md
|
tests/testdata/config-new.md
|
||||||
|
|
||||||
# FastHTML session key
|
|
||||||
.sesskey
|
|
||||||
|
|||||||
35
.gitlint
35
.gitlint
@@ -1,35 +0,0 @@
|
|||||||
[general]
|
|
||||||
# verbosity should be a value between 1 and 3, the commandline -v flags take precedence over this
|
|
||||||
verbosity = 3
|
|
||||||
|
|
||||||
regex-style-search=true
|
|
||||||
|
|
||||||
# Ignore rules, reference them by id or name (comma-separated)
|
|
||||||
ignore=title-trailing-punctuation, T3
|
|
||||||
|
|
||||||
# Enable specific community contributed rules
|
|
||||||
contrib=contrib-title-conventional-commits,CC1
|
|
||||||
|
|
||||||
# Set the extra-path where gitlint will search for user defined rules
|
|
||||||
extra-path=scripts/gitlint
|
|
||||||
|
|
||||||
[title-max-length]
|
|
||||||
line-length=80
|
|
||||||
|
|
||||||
[title-min-length]
|
|
||||||
min-length=5
|
|
||||||
|
|
||||||
[ignore-by-title]
|
|
||||||
# Match commit titles starting with "Release"
|
|
||||||
regex=^Release(.*)
|
|
||||||
ignore=title-max-length,body-min-length
|
|
||||||
|
|
||||||
[ignore-by-body]
|
|
||||||
# Match commits message bodies that have a line that contains 'release'
|
|
||||||
regex=(.*)release(.*)
|
|
||||||
ignore=all
|
|
||||||
|
|
||||||
[ignore-by-author-name]
|
|
||||||
# Match commits by author name (e.g. ignore dependabot commits)
|
|
||||||
regex=dependabot
|
|
||||||
ignore=all
|
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
# Exclude some file types from automatic code style
|
# Exclude some file types from automatic code style
|
||||||
exclude: \.(json|csv)$
|
exclude: \.(json|csv)$
|
||||||
repos:
|
repos:
|
||||||
# --- Basic sanity checks ---
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v6.0.0
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: check-toml
|
- id: check-toml
|
||||||
@@ -11,38 +10,31 @@ repos:
|
|||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
exclude: '\.rst$' # Exclude .rst files from whitespace cleanup
|
exclude: '\.rst$' # Exclude .rst files
|
||||||
|
|
||||||
# --- Import sorting ---
|
|
||||||
- repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/PyCQA/isort
|
||||||
rev: 7.0.0
|
rev: 5.13.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
|
name: isort
|
||||||
# --- Linting + Formatting via Ruff ---
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.14.1
|
rev: v0.6.8
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter and fix simple isssues automatically
|
# Run the linter and fix simple issues automatically
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [--fix]
|
args: [--fix]
|
||||||
# Run the formatter
|
# Run the formatter.
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
|
|
||||||
# --- Static type checking ---
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: v1.18.2
|
rev: 'v1.13.0'
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- types-requests==2.32.4.20250913
|
- "types-requests==2.32.0.20241016"
|
||||||
- pandas-stubs==2.3.2.250926
|
- "pandas-stubs==2.2.3.241009"
|
||||||
- tokenize-rt==3.2.0
|
- "numpy==2.1.3"
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
|
|
||||||
# --- Markdown linter ---
|
|
||||||
- repo: https://github.com/jackdewinter/pymarkdown
|
- repo: https://github.com/jackdewinter/pymarkdown
|
||||||
rev: v0.9.32
|
rev: main
|
||||||
hooks:
|
hooks:
|
||||||
- id: pymarkdown
|
- id: pymarkdown
|
||||||
files: ^docs/
|
files: ^docs/
|
||||||
@@ -50,31 +42,3 @@ repos:
|
|||||||
args:
|
args:
|
||||||
- --config=docs/pymarkdown.json
|
- --config=docs/pymarkdown.json
|
||||||
- scan
|
- scan
|
||||||
|
|
||||||
# --- Commit message linting ---
|
|
||||||
# - Local cross-platform hooks
|
|
||||||
- repo: local
|
|
||||||
hooks:
|
|
||||||
# Validate commit messages (using Python wrapper)
|
|
||||||
- id: commitizen-commit
|
|
||||||
name: Commitizen (venv-aware)
|
|
||||||
entry: python3 scripts/cz_check_commit_message.py
|
|
||||||
language: system
|
|
||||||
stages: [commit-msg]
|
|
||||||
pass_filenames: false
|
|
||||||
|
|
||||||
# Branch name check on push (using Python wrapper)
|
|
||||||
- id: commitizen-branch
|
|
||||||
name: Commitizen branch check
|
|
||||||
entry: python3 scripts/cz_check_branch.py
|
|
||||||
language: system
|
|
||||||
stages: [pre-push]
|
|
||||||
pass_filenames: false
|
|
||||||
|
|
||||||
# Validate new commit messages before push (using Python wrapper)
|
|
||||||
- id: commitizen-new-commits
|
|
||||||
name: Commitizen (check new commits only, .venv aware)
|
|
||||||
entry: python3 -m scripts.cz_check_new_commits
|
|
||||||
language: system
|
|
||||||
stages: [pre-push]
|
|
||||||
pass_filenames: false
|
|
||||||
|
|||||||
277
CHANGELOG.md
277
CHANGELOG.md
@@ -1,277 +0,0 @@
|
|||||||
# Changelog
|
|
||||||
|
|
||||||
All notable changes to the akkudoktoreos project will be documented in this file.
|
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
||||||
|
|
||||||
## 0.2.0 (2025-11-09)
|
|
||||||
|
|
||||||
The most important new feature is **automatic optimization**.
|
|
||||||
EOS can now independently perform optimization at regular intervals.
|
|
||||||
This is based on the configured system parameters and forecasts, and also uses supplied
|
|
||||||
measurement data, such as the current battery SoC.
|
|
||||||
The result is an energy-management plan as well as the optimization output.
|
|
||||||
The existing optimization interface using `POST /optimize` remains available and can still
|
|
||||||
be used as before.
|
|
||||||
|
|
||||||
In addition, bugs were fixed and new features were added:
|
|
||||||
|
|
||||||
- Automatic optimization creates a **default configuration** if none is provided.
|
|
||||||
This is intended to make it easier to create a custom configuration by adapting the default.
|
|
||||||
- The parameters of the genetic optimization algorithm (number of generations, etc.) are now
|
|
||||||
configurable.
|
|
||||||
- For home appliances, start windows can now be specified (experimental).
|
|
||||||
- Configuration files from previous versions are converted to the current format on first launch.
|
|
||||||
- There are now measurement keys that are permanently assigned to a specific device simulation.
|
|
||||||
This simplifies providing measurement values for device simulations (e.g. battery SoC).
|
|
||||||
- The infrastructure and first applications for **feed-in tariff forecasting**
|
|
||||||
(currently only fixed tariffs) are now integrated.
|
|
||||||
- EOSdash has been expanded with new tabs for displaying the **energy-management plan**
|
|
||||||
and **predictions**.
|
|
||||||
- The documentation has been updated and expanded in many places.
|
|
||||||
|
|
||||||
### Feat
|
|
||||||
|
|
||||||
- Energy-management plan generation based on S2 standard instructions
|
|
||||||
- Feed-in-tariff prediction support (incl. tests & docs)
|
|
||||||
- `LoadAkkudoktorAdjusted` load prediction variant
|
|
||||||
- Standardized measurement keys for battery/EV SoC
|
|
||||||
- Measurement keys configurable via EOS configuration
|
|
||||||
- Setup default device configuration for automatic optimization
|
|
||||||
- Health endpoints show version + last optimization timestamps
|
|
||||||
- Configuration of genetic algorithm parameters
|
|
||||||
- Configuration options for home-appliance time windows
|
|
||||||
- Mitigation of legacy configuration
|
|
||||||
- Config backup enhancements:
|
|
||||||
|
|
||||||
- Timestamp-based backup IDs
|
|
||||||
- API to list backups
|
|
||||||
- API to revert to a specific backup
|
|
||||||
- EOSdash Admin tab integration
|
|
||||||
|
|
||||||
- Pendulum date types via `pydantic_extra_types.pendulum_dt`
|
|
||||||
- `Time`, `TimeWindow`, `TimeWindowSequence`, and `to_time` helpers in `datetimeutil`
|
|
||||||
- Extended `DataRecord` with configurable field-like semantics
|
|
||||||
- EOSdash: Solution view now displays genetic optimization results and aggregated totals
|
|
||||||
- EOSdash UI:
|
|
||||||
|
|
||||||
- Plan tab
|
|
||||||
- Predictions tab
|
|
||||||
- Cache management in Admin tab
|
|
||||||
- About tab
|
|
||||||
|
|
||||||
- Pydantic merge model tests
|
|
||||||
- Developer profiling entry in Makefile
|
|
||||||
- Changelog & docs updated for commitizen release flow
|
|
||||||
- Developer documentation updated
|
|
||||||
- Improved install & development documentation
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Battery simulation
|
|
||||||
|
|
||||||
- Performance improvements
|
|
||||||
- Charge + start times now reflect realistic simulation
|
|
||||||
|
|
||||||
- Appliance simulation:
|
|
||||||
|
|
||||||
- Time windows may roll over to next day
|
|
||||||
|
|
||||||
- Revised load prediction by splitting original `LoadAkkudoktor` into:
|
|
||||||
|
|
||||||
- `LoadAkkudoktor`
|
|
||||||
- `LoadAkkudoktorAdjusted`
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Correct URL/path for Akkudoktor forum in README
|
|
||||||
- Automatic optimization:
|
|
||||||
|
|
||||||
- Reuses previous start solution
|
|
||||||
- Interval execution + locking + new endpoints
|
|
||||||
- Properly loads required data
|
|
||||||
- EV charge-rate migration for proper availability
|
|
||||||
|
|
||||||
- Genetic common settings consistently available
|
|
||||||
- Config markdown generation
|
|
||||||
- Recognize environment variables on EOS server startup
|
|
||||||
- Remove `0.0.0.0 → localhost` translation on Windows
|
|
||||||
- Allow hostnames as well as IPs
|
|
||||||
- Access Pydantic model fields via class instead of instance
|
|
||||||
- Down-sampling in `key_to_array`
|
|
||||||
- `/v1/admin/cache/clear` clears all cache files; added `/clear-expired`
|
|
||||||
- Use `tzfpy` instead of timezonefinder for more accurate EU timezones
|
|
||||||
- Explicit provider settings in config instead of union
|
|
||||||
- ClearOutside weather prediction irradiance calculation
|
|
||||||
- Test config file priority without `config_eos` fixture
|
|
||||||
- Complete optimization sample-request documentation
|
|
||||||
- Replace gitlint with commitizen
|
|
||||||
- Synchronize pre-commit config with real dependencies
|
|
||||||
- Add missing `babel` to requirements
|
|
||||||
- Fix documentation, tests, and implementation around optimization + predictions
|
|
||||||
|
|
||||||
### Chore
|
|
||||||
|
|
||||||
- Use memory cache for inverter interpolation
|
|
||||||
- Refactor genetic modules (split config, remove device singleton)
|
|
||||||
- Rename memory cache to `CacheEnergyManagementStore`
|
|
||||||
- Use class properties for config/EMS/prediction mixins
|
|
||||||
- Skip matplotlib debug logs
|
|
||||||
- Auto-sync Bokeh JS CDN version
|
|
||||||
- Rename `hello.py` → `about.py` in EOSdash
|
|
||||||
- Remove EOSdash demo page
|
|
||||||
- Split server test from system test
|
|
||||||
- Move doc utils to `generate_config_md.py`
|
|
||||||
- Improve documentation for pydantic merge models
|
|
||||||
- Remove pendulum warning from README
|
|
||||||
- Drop GitHub Discussions from contributing docs
|
|
||||||
- Rename or reorganize files / classes during refactors
|
|
||||||
|
|
||||||
### BREAKING CHANGES
|
|
||||||
|
|
||||||
EOS configuration + v1 API have changed:
|
|
||||||
|
|
||||||
- `available_charge_rates_percent` removed → replaced by `charge_rate`
|
|
||||||
- Optimization parameter `hours` → renamed to `horizon_hours`
|
|
||||||
- Device config must explicitly list devices + properties
|
|
||||||
- Prediction providers now explicit (instead of union)
|
|
||||||
- Measurement keys provided as lists
|
|
||||||
- Feed-in-tariff providers must be explicitly configured
|
|
||||||
- `/v1/measurement/loadxxx` endpoints removed → use generic measurement endpoints
|
|
||||||
- `/v1/admin/cache/clear` now clears **all*- cache files;
|
|
||||||
`/v1/admin/cache/clear-expired` only clears expired entries
|
|
||||||
|
|
||||||
## v0.1.0 (2025-09-30)
|
|
||||||
|
|
||||||
### Feat
|
|
||||||
|
|
||||||
- added Changelog for 0.0.0 and 0.1.0
|
|
||||||
|
|
||||||
## v0.0.0 (2025-09-30)
|
|
||||||
|
|
||||||
This version represents one year of development of EOS (Energy Optimization System). From this point forward, release management will be introduced.
|
|
||||||
|
|
||||||
### Feat
|
|
||||||
|
|
||||||
#### Core Features
|
|
||||||
- energy Management System (EMS) with battery optimization
|
|
||||||
- PV (Photovoltaic) forecast integration with multiple providers
|
|
||||||
- load prediction and forecasting capabilities
|
|
||||||
- electricity price integration
|
|
||||||
- VRM API integration for load and PV forecasting
|
|
||||||
- battery State of Charge (SoC) prediction and optimization
|
|
||||||
- inverter class with AC/DC charging logic
|
|
||||||
- electric vehicle (EV) charging optimization with configurable currents
|
|
||||||
- home appliance scheduling optimization
|
|
||||||
- horizon validation for shading calculations
|
|
||||||
|
|
||||||
#### API & Server
|
|
||||||
- migration from Flask to FastAPI
|
|
||||||
- RESTful API with comprehensive endpoints
|
|
||||||
- EOSdash web interface for configuration and visualization
|
|
||||||
- Docker support with multi-architecture builds
|
|
||||||
- web-based visualization with interactive charts
|
|
||||||
- OpenAPI/Swagger documentation
|
|
||||||
- configurable server settings (port, host)
|
|
||||||
|
|
||||||
#### Configuration & Data Management
|
|
||||||
- JSON-based configuration system with nested support
|
|
||||||
- configuration validation with Pydantic
|
|
||||||
- device registry for managing multiple devices
|
|
||||||
- persistent caching for predictions and prices
|
|
||||||
- manual prediction updates
|
|
||||||
- timezone support with automatic detection
|
|
||||||
- configurable VAT rates for electricity prices
|
|
||||||
|
|
||||||
#### Optimization
|
|
||||||
- DEAP-based genetic algorithm optimization
|
|
||||||
- multi-objective optimization (cost, battery usage, self-consumption)
|
|
||||||
- 48-hour prediction and optimization window
|
|
||||||
- AC/DC charging decision optimization
|
|
||||||
- discharge hour optimization
|
|
||||||
- start solution enforcement
|
|
||||||
- fitness visualization with violin plots
|
|
||||||
- self-consumption probability interpolator
|
|
||||||
|
|
||||||
#### Testing & Quality
|
|
||||||
- comprehensive test suite with pytest
|
|
||||||
- unit tests for major components (EMS, battery, inverter, load, optimization)
|
|
||||||
- integration tests for server endpoints
|
|
||||||
- pre-commit hooks for code quality
|
|
||||||
- type checking with mypy
|
|
||||||
- code formatting with ruff and isort
|
|
||||||
- markdown linting
|
|
||||||
|
|
||||||
#### Documentation
|
|
||||||
- conceptual documentation
|
|
||||||
- API documentation with Sphinx
|
|
||||||
- ReadTheDocs integration
|
|
||||||
- Docker setup instructions
|
|
||||||
- contributing guidelines
|
|
||||||
- English README translation
|
|
||||||
|
|
||||||
#### Providers & Integrations
|
|
||||||
- PVForecast.Akkudoktor provider
|
|
||||||
- BrightSky weather provider
|
|
||||||
- ClearOutside weather provider
|
|
||||||
- electricity price provider
|
|
||||||
|
|
||||||
### Refactor
|
|
||||||
|
|
||||||
- optimized Inverter class for improved SCR calculation performance
|
|
||||||
- improved caching mechanisms for better performance
|
|
||||||
- enhanced visualization with proper timestamp handling
|
|
||||||
- updated dependency management with automatic Dependabot updates
|
|
||||||
- restructured code into logical submodules
|
|
||||||
- package directory structure reorganization
|
|
||||||
- improved error handling and logging
|
|
||||||
- Windows compatibility improvements
|
|
||||||
|
|
||||||
### Fix
|
|
||||||
|
|
||||||
- cross-site scripting (XSS) vulnerabilities
|
|
||||||
- ReDoS vulnerability in duration parsing
|
|
||||||
- timezone and daylight saving time handling
|
|
||||||
- BrightSky provider with None humidity data
|
|
||||||
- negative values in load mean adjusted calculations
|
|
||||||
- SoC calculation bugs
|
|
||||||
- AC charge efficiency in price calculations
|
|
||||||
- optimization timing bugs
|
|
||||||
- Docker BuildKit compatibility
|
|
||||||
- float value handling in user horizon configuration
|
|
||||||
- circular runtime import issues
|
|
||||||
- load simulation data return issues
|
|
||||||
- multiple optimization-related bugs
|
|
||||||
|
|
||||||
### Build
|
|
||||||
|
|
||||||
- Python version requirement updated to 3.10+
|
|
||||||
- added Bandit security checks
|
|
||||||
- improved credential management with environment variables
|
|
||||||
|
|
||||||
#### Dependencies
|
|
||||||
Major dependencies included in this release:
|
|
||||||
- FastAPI 0.115.14
|
|
||||||
- Pydantic 2.11.9
|
|
||||||
- NumPy 2.3.3
|
|
||||||
- Pandas 2.3.2
|
|
||||||
- Scikit-learn 1.7.2
|
|
||||||
- Uvicorn 0.36.0
|
|
||||||
- Bokeh 3.8.0
|
|
||||||
- Matplotlib 3.10.6
|
|
||||||
- PVLib 0.13.1
|
|
||||||
- Python-FastHTML 0.12.29
|
|
||||||
|
|
||||||
### Notes
|
|
||||||
|
|
||||||
#### Development Notes
|
|
||||||
This version encompasses all development from the initial commit (February 16, 2024) through September 29, 2025. The project evolved from a basic energy optimization concept to a comprehensive energy management system with:
|
|
||||||
- 698+ commits
|
|
||||||
- multiple contributor involvement
|
|
||||||
- continuous integration/deployment setup
|
|
||||||
- automated dependency updates
|
|
||||||
- comprehensive testing infrastructure
|
|
||||||
|
|
||||||
#### Migration Notes
|
|
||||||
As this is the initial versioned release, no migration is required. Future releases will include migration guides as needed.
|
|
||||||
@@ -14,20 +14,20 @@ Please report flaws or vulnerabilities in the [GitHub Issue Tracker](https://git
|
|||||||
|
|
||||||
## Ideas & Features
|
## Ideas & Features
|
||||||
|
|
||||||
Issues in the [GitHub Issue Tracker](https://github.com/Akkudoktor-EOS/EOS/issues) are also fine
|
Please first discuss the idea in a [GitHub Discussion](https://github.com/Akkudoktor-EOS/EOS/discussions) or the [Akkudoktor Forum](https://www.akkudoktor.net/forum/diy-energie-optimierungssystem-opensource-projekt/) before opening an issue.
|
||||||
to discuss ideas and features.
|
|
||||||
|
|
||||||
You may first discuss the idea in the [Akkudoktor Forum](https://www.akkudoktor.net/forum/diy-energie-optimierungssystem-opensource-projekt/) before opening an issue.
|
There are just too many possibilities and the project would drown in tickets otherwise.
|
||||||
|
|
||||||
## Code Contributions
|
## Code Contributions
|
||||||
|
|
||||||
We welcome code contributions and bug fixes via [Pull Requests](https://github.com/Akkudoktor-EOS/EOS/pulls).
|
We welcome code contributions and bug fixes via [Pull Requests](https://github.com/Akkudoktor-EOS/EOS/pulls).
|
||||||
To make collaboration easier, we require pull requests to pass code style, unit tests, and commit
|
To make collaboration easier, we require pull requests to pass code style and unit tests.
|
||||||
message style checks.
|
|
||||||
|
|
||||||
### Setup development environment
|
### Setup development environment
|
||||||
|
|
||||||
Setup virtual environment, then activate virtual environment and install development dependencies.
|
Setup virtual environment, then activate virtual environment and install development dependencies.
|
||||||
|
See also [README.md](README.md).
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
@@ -60,7 +60,6 @@ To run formatting automatically before every commit:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
pre-commit install
|
pre-commit install
|
||||||
pre-commit install --hook-type commit-msg --hook-type pre-push
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Or run them manually:
|
Or run them manually:
|
||||||
@@ -76,18 +75,3 @@ Use `pytest` to run tests locally:
|
|||||||
```bash
|
```bash
|
||||||
python -m pytest -vs --cov src --cov-report term-missing tests/
|
python -m pytest -vs --cov src --cov-report term-missing tests/
|
||||||
```
|
```
|
||||||
|
|
||||||
### Commit message style
|
|
||||||
|
|
||||||
Our commit message checks use
|
|
||||||
[`commitizen`](https://commitizen-tools.github.io/commitizen/#pre-commit-integration). The checks
|
|
||||||
enforce the [`Conventional Commits`](https://www.conventionalcommits.org) commit message style.
|
|
||||||
|
|
||||||
You may use [`commitizen`](https://commitizen-tools.github.io/commitizen) also to create a
|
|
||||||
commit message and commit your change.
|
|
||||||
|
|
||||||
## Thank you!
|
|
||||||
|
|
||||||
And last but not least thanks to all our contributors
|
|
||||||
|
|
||||||
[](https://github.com/Akkudoktor-EOS/EOS/graphs/contributors)
|
|
||||||
|
|||||||
28
Dockerfile
28
Dockerfile
@@ -1,9 +1,10 @@
|
|||||||
# syntax=docker/dockerfile:1.7
|
|
||||||
ARG PYTHON_VERSION=3.12.7
|
ARG PYTHON_VERSION=3.12.7
|
||||||
FROM python:${PYTHON_VERSION}-slim
|
FROM python:${PYTHON_VERSION}-slim
|
||||||
|
|
||||||
LABEL source="https://github.com/Akkudoktor-EOS/EOS"
|
LABEL source="https://github.com/Akkudoktor-EOS/EOS"
|
||||||
|
|
||||||
|
ENV VIRTUAL_ENV="/opt/venv"
|
||||||
|
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
|
||||||
ENV MPLCONFIGDIR="/tmp/mplconfigdir"
|
ENV MPLCONFIGDIR="/tmp/mplconfigdir"
|
||||||
ENV EOS_DIR="/opt/eos"
|
ENV EOS_DIR="/opt/eos"
|
||||||
ENV EOS_CACHE_DIR="${EOS_DIR}/cache"
|
ENV EOS_CACHE_DIR="${EOS_DIR}/cache"
|
||||||
@@ -13,13 +14,6 @@ ENV EOS_CONFIG_DIR="${EOS_DIR}/config"
|
|||||||
# Overwrite when starting the container in a production environment
|
# Overwrite when starting the container in a production environment
|
||||||
ENV EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
ENV EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
||||||
|
|
||||||
# Set environment variables to reduce threading needs
|
|
||||||
ENV OPENBLAS_NUM_THREADS=1
|
|
||||||
ENV OMP_NUM_THREADS=1
|
|
||||||
ENV MKL_NUM_THREADS=1
|
|
||||||
ENV PIP_PROGRESS_BAR=off
|
|
||||||
ENV PIP_NO_COLOR=1
|
|
||||||
|
|
||||||
WORKDIR ${EOS_DIR}
|
WORKDIR ${EOS_DIR}
|
||||||
|
|
||||||
RUN adduser --system --group --no-create-home eos \
|
RUN adduser --system --group --no-create-home eos \
|
||||||
@@ -35,32 +29,20 @@ RUN adduser --system --group --no-create-home eos \
|
|||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
pip install --no-cache-dir -r requirements.txt
|
pip install -r requirements.txt
|
||||||
|
|
||||||
COPY pyproject.toml .
|
COPY pyproject.toml .
|
||||||
RUN mkdir -p src && pip install --no-cache-dir -e .
|
RUN mkdir -p src && pip install -e .
|
||||||
|
|
||||||
COPY src src
|
COPY src src
|
||||||
|
|
||||||
# Create minimal default configuration for Docker to fix EOSDash accessibility (#629)
|
|
||||||
# This ensures EOSDash binds to 0.0.0.0 instead of 127.0.0.1 in containers
|
|
||||||
RUN echo '{\n\
|
|
||||||
"server": {\n\
|
|
||||||
"host": "0.0.0.0",\n\
|
|
||||||
"port": 8503,\n\
|
|
||||||
"startup_eosdash": true,\n\
|
|
||||||
"eosdash_host": "0.0.0.0",\n\
|
|
||||||
"eosdash_port": 8504\n\
|
|
||||||
}\n\
|
|
||||||
}' > "${EOS_CONFIG_DIR}/EOS.config.json" \
|
|
||||||
&& chown eos:eos "${EOS_CONFIG_DIR}/EOS.config.json"
|
|
||||||
|
|
||||||
USER eos
|
USER eos
|
||||||
ENTRYPOINT []
|
ENTRYPOINT []
|
||||||
|
|
||||||
EXPOSE 8503
|
EXPOSE 8503
|
||||||
EXPOSE 8504
|
EXPOSE 8504
|
||||||
|
|
||||||
|
ENV server_eosdash_host=0.0.0.0
|
||||||
CMD ["python", "src/akkudoktoreos/server/eos.py", "--host", "0.0.0.0"]
|
CMD ["python", "src/akkudoktoreos/server/eos.py", "--host", "0.0.0.0"]
|
||||||
|
|
||||||
VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"]
|
VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"]
|
||||||
|
|||||||
67
Makefile
67
Makefile
@@ -1,5 +1,5 @@
|
|||||||
# Define the targets
|
# Define the targets
|
||||||
.PHONY: help venv pip install dist test test-full test-system test-ci test-profile docker-run docker-build docs read-docs clean format gitlint mypy run run-dev run-dash run-dash-dev bumps
|
.PHONY: help venv pip install dist test test-full docker-run docker-build docs read-docs clean format mypy run run-dev
|
||||||
|
|
||||||
# Default target
|
# Default target
|
||||||
all: help
|
all: help
|
||||||
@@ -11,7 +11,6 @@ help:
|
|||||||
@echo " pip - Install dependencies from requirements.txt."
|
@echo " pip - Install dependencies from requirements.txt."
|
||||||
@echo " pip-dev - Install dependencies from requirements-dev.txt."
|
@echo " pip-dev - Install dependencies from requirements-dev.txt."
|
||||||
@echo " format - Format source code."
|
@echo " format - Format source code."
|
||||||
@echo " gitlint - Lint last commit message."
|
|
||||||
@echo " mypy - Run mypy."
|
@echo " mypy - Run mypy."
|
||||||
@echo " install - Install EOS in editable form (development mode) into virtual environment."
|
@echo " install - Install EOS in editable form (development mode) into virtual environment."
|
||||||
@echo " docker-run - Run entire setup on docker"
|
@echo " docker-run - Run entire setup on docker"
|
||||||
@@ -20,18 +19,10 @@ help:
|
|||||||
@echo " read-docs - Read HTML documentation in your browser."
|
@echo " read-docs - Read HTML documentation in your browser."
|
||||||
@echo " gen-docs - Generate openapi.json and docs/_generated/*."
|
@echo " gen-docs - Generate openapi.json and docs/_generated/*."
|
||||||
@echo " clean-docs - Remove generated documentation."
|
@echo " clean-docs - Remove generated documentation."
|
||||||
@echo " run - Run EOS production server in virtual environment."
|
@echo " run - Run EOS production server in the virtual environment."
|
||||||
@echo " run-dev - Run EOS development server in virtual environment (automatically reloads)."
|
@echo " run-dev - Run EOS development server in the virtual environment (automatically reloads)."
|
||||||
@echo " run-dash - Run EOSdash production server in virtual environment."
|
|
||||||
@echo " run-dash-dev - Run EOSdash development server in virtual environment (automatically reloads)."
|
|
||||||
@echo " test - Run tests."
|
|
||||||
@echo " test-full - Run tests with full optimization."
|
|
||||||
@echo " test-system - Run tests with system tests enabled."
|
|
||||||
@echo " test-ci - Run tests as CI does. No user config file allowed."
|
|
||||||
@echo " test-profile - Run single test optimization with profiling."
|
|
||||||
@echo " dist - Create distribution (in dist/)."
|
@echo " dist - Create distribution (in dist/)."
|
||||||
@echo " clean - Remove generated documentation, distribution and virtual environment."
|
@echo " clean - Remove generated documentation, distribution and virtual environment."
|
||||||
@echo " bump - Bump version to next release version."
|
|
||||||
|
|
||||||
# Target to set up a Python 3 virtual environment
|
# Target to set up a Python 3 virtual environment
|
||||||
venv:
|
venv:
|
||||||
@@ -50,7 +41,7 @@ pip-dev: pip
|
|||||||
@echo "Dependencies installed from requirements-dev.txt."
|
@echo "Dependencies installed from requirements-dev.txt."
|
||||||
|
|
||||||
# Target to install EOS in editable form (development mode) into virtual environment.
|
# Target to install EOS in editable form (development mode) into virtual environment.
|
||||||
install: pip-dev
|
install: pip
|
||||||
.venv/bin/pip install build
|
.venv/bin/pip install build
|
||||||
.venv/bin/pip install -e .
|
.venv/bin/pip install -e .
|
||||||
@echo "EOS installed in editable form (development mode)."
|
@echo "EOS installed in editable form (development mode)."
|
||||||
@@ -79,11 +70,6 @@ read-docs: docs
|
|||||||
@echo "Read the documentation in your browser"
|
@echo "Read the documentation in your browser"
|
||||||
.venv/bin/python -m webbrowser build/docs/html/index.html
|
.venv/bin/python -m webbrowser build/docs/html/index.html
|
||||||
|
|
||||||
# Clean Python bytecode
|
|
||||||
clean-bytecode:
|
|
||||||
find . -type d -name "__pycache__" -exec rm -r {} +
|
|
||||||
find . -type f -name "*.pyc" -delete
|
|
||||||
|
|
||||||
# Clean target to remove generated documentation and documentation artefacts
|
# Clean target to remove generated documentation and documentation artefacts
|
||||||
clean-docs:
|
clean-docs:
|
||||||
@echo "Searching and deleting all '_autosum' directories in docs..."
|
@echo "Searching and deleting all '_autosum' directories in docs..."
|
||||||
@@ -99,19 +85,11 @@ clean: clean-docs
|
|||||||
|
|
||||||
run:
|
run:
|
||||||
@echo "Starting EOS production server, please wait..."
|
@echo "Starting EOS production server, please wait..."
|
||||||
.venv/bin/python -m akkudoktoreos.server.eos
|
.venv/bin/python src/akkudoktoreos/server/eos.py
|
||||||
|
|
||||||
run-dev:
|
run-dev:
|
||||||
@echo "Starting EOS development server, please wait..."
|
@echo "Starting EOS development server, please wait..."
|
||||||
.venv/bin/python -m akkudoktoreos.server.eos --host localhost --port 8503 --log_level DEBUG --startup_eosdash false --reload true
|
.venv/bin/python src/akkudoktoreos/server/eos.py --host localhost --port 8503 --reload true
|
||||||
|
|
||||||
run-dash:
|
|
||||||
@echo "Starting EOSdash production server, please wait..."
|
|
||||||
.venv/bin/python -m akkudoktoreos.server.eosdash
|
|
||||||
|
|
||||||
run-dash-dev:
|
|
||||||
@echo "Starting EOSdash development server, please wait..."
|
|
||||||
.venv/bin/python -m akkudoktoreos.server.eosdash --host localhost --port 8504 --log_level DEBUG --reload true
|
|
||||||
|
|
||||||
# Target to setup tests.
|
# Target to setup tests.
|
||||||
test-setup: pip-dev
|
test-setup: pip-dev
|
||||||
@@ -122,34 +100,15 @@ test:
|
|||||||
@echo "Running tests..."
|
@echo "Running tests..."
|
||||||
.venv/bin/pytest -vs --cov src --cov-report term-missing
|
.venv/bin/pytest -vs --cov src --cov-report term-missing
|
||||||
|
|
||||||
# Target to run tests as done by CI on Github.
|
|
||||||
test-ci:
|
|
||||||
@echo "Running tests as CI..."
|
|
||||||
.venv/bin/pytest --full-run --check-config-side-effect -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
# Target to run tests including the system tests.
|
|
||||||
test-system:
|
|
||||||
@echo "Running tests incl. system tests..."
|
|
||||||
.venv/bin/pytest --system-test -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
# Target to run all tests.
|
# Target to run all tests.
|
||||||
test-full:
|
test-full:
|
||||||
@echo "Running all tests..."
|
@echo "Running all tests..."
|
||||||
.venv/bin/pytest --full-run
|
.venv/bin/pytest --full-run
|
||||||
|
|
||||||
# Target to run tests including the single test optimization with profiling.
|
|
||||||
test-profile:
|
|
||||||
@echo "Running single test optimization with profiling..."
|
|
||||||
.venv/bin/python tests/single_test_optimization.py --profile
|
|
||||||
|
|
||||||
# Target to format code.
|
# Target to format code.
|
||||||
format:
|
format:
|
||||||
.venv/bin/pre-commit run --all-files
|
.venv/bin/pre-commit run --all-files
|
||||||
|
|
||||||
# Target to trigger gitlint using pre-commit for the latest commit messages
|
|
||||||
gitlint:
|
|
||||||
.venv/bin/cz check --rev-range main..HEAD
|
|
||||||
|
|
||||||
# Target to format code.
|
# Target to format code.
|
||||||
mypy:
|
mypy:
|
||||||
.venv/bin/mypy
|
.venv/bin/mypy
|
||||||
@@ -160,17 +119,3 @@ docker-run:
|
|||||||
|
|
||||||
docker-build:
|
docker-build:
|
||||||
@docker compose build --pull
|
@docker compose build --pull
|
||||||
|
|
||||||
# Bump Akkudoktoreos version
|
|
||||||
VERSION ?= 0.2.0+dev
|
|
||||||
NEW_VERSION ?= $(VERSION)+dev
|
|
||||||
|
|
||||||
bump: pip-dev
|
|
||||||
@echo "Bumping akkudoktoreos version from $(VERSION) to $(NEW_VERSION) (dry-run: $(EXTRA_ARGS))"
|
|
||||||
.venv/bin/python scripts/convert_lightweight_tags.py
|
|
||||||
.venv/bin/python scripts/bump_version.py $(VERSION) $(NEW_VERSION) $(EXTRA_ARGS)
|
|
||||||
|
|
||||||
bump-dry: pip-dev
|
|
||||||
@echo "Bumping akkudoktoreos version from $(VERSION) to $(NEW_VERSION) (dry-run: --dry-run)"
|
|
||||||
.venv/bin/python scripts/convert_lightweight_tags.py
|
|
||||||
.venv/bin/python scripts/bump_version.py $(VERSION) $(NEW_VERSION) --dry-run
|
|
||||||
|
|||||||
180
README.md
180
README.md
@@ -1,158 +1,116 @@
|
|||||||

|
# Energy System Simulation and Optimization
|
||||||

|
|
||||||
|
|
||||||
**Build optimized energy management plans for your home automation**
|
This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.
|
||||||
|
|
||||||
AkkudoktorEOS is a comprehensive solution for simulating and optimizing energy systems based on
|
Documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedocs.io/en/latest/).
|
||||||
renewable sources. Optimize your photovoltaic systems, battery storage, load management, and
|
|
||||||
electric vehicles while considering real-time electricity pricing.
|
|
||||||
|
|
||||||
## Why use AkkudoktorEOS?
|
## Getting Involved
|
||||||
|
|
||||||
AkkudoktorEOS can be used to build energy management plans that are optimized for your specific
|
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||||
setup of PV system, battery, electric vehicle, household load and electricity pricing. It can
|
|
||||||
be integrated into home automation systems such as NodeRED, Home Assistant, EVCC.
|
|
||||||
|
|
||||||
## 🏘️ Community
|
## System requirements
|
||||||
|
|
||||||
We are an open-source community-driven project and we love to hear from you. Here are some ways to
|
- Python >= 3.11, < 3.13
|
||||||
get involved:
|
- Architecture: amd64, aarch64 (armv8)
|
||||||
|
- OS: Linux, Windows, macOS
|
||||||
|
|
||||||
- [GitHub Issue Tracker](https://github.com/Akkudoktor-EOS/EOS/issues): discuss ideas and features,
|
Note: For Python 3.13 some dependencies (e.g. [Pendulum](https://github.com/python-pendulum/Pendulum)) are not yet available on https://pypi.org and have to be manually compiled (a recent [Rust](https://www.rust-lang.org/tools/install) installation is required).
|
||||||
and report bugs.
|
|
||||||
|
|
||||||
- [Akkudoktor Forum](https://www.akkudoktor.net/c/der-akkudoktor/eos): get direct suppport from the
|
Other architectures (e.g. armv6, armv7) are unsupported for now, because a multitude of dependencies are not available on https://piwheels.org and have to be built manually (a recent Rust installation and [GCC](https://gcc.gnu.org/) are required, Python 3.11 is recommended).
|
||||||
cummunity.
|
|
||||||
|
|
||||||
## What do people build with AkkudoktorEOS
|
|
||||||
|
|
||||||
The community uses AkkudoktorEOS to minimize grid energy consumption and to maximize the revenue
|
|
||||||
from grid energy feed in with their home automation system.
|
|
||||||
|
|
||||||
- Andreas Schmitz, [the Akkudoktor](https://www.youtube.com/@Akkudoktor), uses
|
|
||||||
EOS integrated in his NodeRED home automation system for
|
|
||||||
[OpenSource Energieoptimierung](https://www.youtube.com/watch?v=sHtv0JCxAYk).
|
|
||||||
- Jörg, [meintechblog](https://www.youtube.com/@meintechblog), uses EOS for
|
|
||||||
day-ahead optimization for time-variable energy prices. See:
|
|
||||||
[So installiere ich EOS von Andreas Schmitz](https://www.youtube.com/watch?v=9XCPNU9UqSs)
|
|
||||||
|
|
||||||
## Why not use AkkudoktorEOS?
|
|
||||||
|
|
||||||
AkkudoktorEOS does not control your home automation assets. It must be integrated into a home
|
|
||||||
automation system. If you do not use a home automation system or you feel uncomfortable with
|
|
||||||
the configuration effort needed for the integration you should better use other solutions.
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
Run EOS with Docker (access dashboard at `http://localhost:8504`):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d \
|
|
||||||
--name akkudoktoreos \
|
|
||||||
-p 8503:8503 \
|
|
||||||
-p 8504:8504 \
|
|
||||||
-e OPENBLAS_NUM_THREADS=1 \
|
|
||||||
-e OMP_NUM_THREADS=1 \
|
|
||||||
-e MKL_NUM_THREADS=1 \
|
|
||||||
-e EOS_SERVER__HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__EOSDASH_HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__EOSDASH_PORT=8504 \
|
|
||||||
--ulimit nproc=65535:65535 \
|
|
||||||
--ulimit nofile=65535:65535 \
|
|
||||||
--security-opt seccomp=unconfined \
|
|
||||||
akkudoktor/eos:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## System Requirements
|
|
||||||
|
|
||||||
- **Python**: 3.11 or higher
|
|
||||||
- **Architecture**: amd64, aarch64 (armv8)
|
|
||||||
- **OS**: Linux, Windows, macOS
|
|
||||||
|
|
||||||
> **Note**: Other architectures (armv6, armv7) require manual compilation of dependencies with Rust and GCC.
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Docker (Recommended)
|
Docker images (amd64/aarch64) can be found at [akkudoktor/eos](https://hub.docker.com/r/akkudoktor/eos).
|
||||||
|
|
||||||
```bash
|
Following sections describe how to locally start the EOS server on `http://localhost:8503`.
|
||||||
docker pull akkudoktor/eos:latest
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
Access the API at `http://localhost:8503` (docs at `http://localhost:8503/docs`)
|
### Run from source
|
||||||
|
|
||||||
### From Source
|
Install dependencies in virtual environment:
|
||||||
|
|
||||||
```bash
|
Linux:
|
||||||
git clone https://github.com/Akkudoktor-EOS/EOS.git
|
|
||||||
cd EOS
|
|
||||||
```
|
|
||||||
|
|
||||||
**Linux:**
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.venv/bin/pip install -r requirements.txt
|
.venv/bin/pip install -r requirements.txt
|
||||||
.venv/bin/pip install -e .
|
.venv/bin/pip install -e .
|
||||||
.venv/bin/python -m akkudoktoreos.server.eos
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Windows:**
|
Windows:
|
||||||
|
|
||||||
```cmd
|
```cmd
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.venv\Scripts\pip install -r requirements.txt
|
.venv\Scripts\pip install -r requirements.txt
|
||||||
.venv\Scripts\pip install -e .
|
.venv\Scripts\pip install -e .
|
||||||
.venv\Scripts\python -m akkudoktoreos.server.eos
|
```
|
||||||
|
|
||||||
|
Finally, start the EOS server to access it at `http://localhost:8503` (API docs at `http://localhost:8503/docs`):
|
||||||
|
|
||||||
|
Linux:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
.venv/bin/python src/akkudoktoreos/server/eos.py
|
||||||
|
```
|
||||||
|
|
||||||
|
Windows:
|
||||||
|
|
||||||
|
```cmd
|
||||||
|
.venv\Scripts\python src/akkudoktoreos/server/eos.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker
|
||||||
|
|
||||||
|
Start EOS with following command to access it at `http://localhost:8503` (API docs at `http://localhost:8503/docs`):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose up
|
||||||
```
|
```
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
EOS uses `EOS.config.json` for configuration. If the file doesn't exist, a default configuration is
|
This project uses the `EOS.config.json` file to manage configuration settings.
|
||||||
created automatically.
|
|
||||||
|
|
||||||
### Custom Configuration Directory
|
### Default Configuration
|
||||||
|
|
||||||
```bash
|
A default configuration file `default.config.json` is provided. This file contains all the necessary configuration keys with their default values.
|
||||||
export EOS_DIR=/path/to/your/config
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configuration Methods
|
### Custom Configuration
|
||||||
|
|
||||||
1. **EOSdash** (Recommended) - Web interface at `http://localhost:8504`
|
Users can specify a custom configuration directory by setting the environment variable `EOS_DIR`.
|
||||||
2. **Manual** - Edit `EOS.config.json` directly
|
|
||||||
3. **API** - Use the [Server API](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json)
|
|
||||||
|
|
||||||
See the [documentation](https://akkudoktor-eos.readthedocs.io/) for all configuration options.
|
- If the directory specified by `EOS_DIR` contains an existing `config.json` file, the application will use this configuration file.
|
||||||
|
- If the `EOS.config.json` file does not exist in the specified directory, the `default.config.json` file will be copied to the directory as `EOS.config.json`.
|
||||||
|
|
||||||
## Port Configuration
|
### Configuration Updates
|
||||||
|
|
||||||
**Default ports**: 8503 (API), 8504 (Dashboard)
|
If the configuration keys in the `EOS.config.json` file are missing or different from those in `default.config.json`, they will be automatically updated to match the default settings, ensuring that all required keys are present.
|
||||||
|
|
||||||
If running on shared systems (e.g., Synology NAS), these ports may conflict with system services. Reconfigure port mappings as needed:
|
## Classes and Functionalities
|
||||||
|
|
||||||
```bash
|
This project uses various classes to simulate and optimize the components of an energy system. Each class represents a specific aspect of the system, as described below:
|
||||||
docker run -p 8505:8503 -p 8506:8504 ...
|
|
||||||
```
|
|
||||||
|
|
||||||
## API Documentation
|
- `Battery`: Simulates a battery storage system, including capacity, state of charge, and now charge and discharge losses.
|
||||||
|
|
||||||
Interactive API docs available at:
|
- `PVForecast`: Provides forecast data for photovoltaic generation, based on weather data and historical generation data.
|
||||||
- Swagger UI: `http://localhost:8503/docs`
|
|
||||||
- OpenAPI Spec: [View Online](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json)
|
|
||||||
|
|
||||||
## Resources
|
- `Load`: Models the load requirements of a household or business, enabling the prediction of future energy demand.
|
||||||
|
|
||||||
- [Full Documentation](https://akkudoktor-eos.readthedocs.io/)
|
- `Heatpump`: Simulates a heat pump, including its energy consumption and efficiency under various operating conditions.
|
||||||
- [Installation Guide (German)](https://www.youtube.com/watch?v=9XCPNU9UqSs)
|
|
||||||
|
|
||||||
## Contributing
|
- `Strompreis`: Provides information on electricity prices, enabling optimization of energy consumption and generation based on tariff information.
|
||||||
|
|
||||||
We welcome contributions! See [CONTRIBUTING](CONTRIBUTING.md) for guidelines.
|
- `EMS`: The Energy Management System (EMS) coordinates the interaction between the various components, performs optimization, and simulates the operation of the entire energy system.
|
||||||
|
|
||||||
[](https://github.com/Akkudoktor-EOS/EOS/graphs/contributors)
|
These classes work together to enable a detailed simulation and optimization of the energy system. For each class, specific parameters and settings can be adjusted to test different scenarios and strategies.
|
||||||
|
|
||||||
## License
|
### Customization and Extension
|
||||||
|
|
||||||
This project is licensed under the Apache License 2.0 - see the LICENSE file for details.
|
Each class is designed to be easily customized and extended to integrate additional functions or improvements. For example, new methods can be added for more accurate modeling of PV system or battery behavior. Developers are invited to modify and extend the system according to their needs.
|
||||||
|
|
||||||
|
## Server API
|
||||||
|
|
||||||
|
See the Swagger API documentation for detailed information: [EOS OpenAPI Spec](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json)
|
||||||
|
|
||||||
|
## Further resources
|
||||||
|
|
||||||
|
- [Installation guide (de)](https://meintechblog.de/2024/09/05/andreas-schmitz-joerg-installiert-mein-energieoptimierungssystem/)
|
||||||
|
|||||||
@@ -5,53 +5,20 @@ networks:
|
|||||||
services:
|
services:
|
||||||
eos:
|
eos:
|
||||||
image: "akkudoktor/eos:${EOS_VERSION}"
|
image: "akkudoktor/eos:${EOS_VERSION}"
|
||||||
container_name: "akkudoktoreos"
|
|
||||||
read_only: true
|
read_only: true
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: "Dockerfile"
|
dockerfile: "Dockerfile"
|
||||||
args:
|
args:
|
||||||
PYTHON_VERSION: "${PYTHON_VERSION}"
|
PYTHON_VERSION: "${PYTHON_VERSION}"
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
environment:
|
||||||
- OPENBLAS_NUM_THREADS=1
|
|
||||||
- OMP_NUM_THREADS=1
|
|
||||||
- MKL_NUM_THREADS=1
|
|
||||||
- PIP_PROGRESS_BAR=off
|
|
||||||
- PIP_NO_COLOR=1
|
|
||||||
- EOS_CONFIG_DIR=config
|
- EOS_CONFIG_DIR=config
|
||||||
|
- latitude=52.2
|
||||||
|
- longitude=13.4
|
||||||
|
- elecprice_provider=ElecPriceAkkudoktor
|
||||||
|
- elecprice_charges_kwh=0.21
|
||||||
- EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
- EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
||||||
- EOS_SERVER__HOST=0.0.0.0
|
|
||||||
- EOS_SERVER__PORT=8503
|
|
||||||
- EOS_SERVER__EOSDASH_HOST=0.0.0.0
|
|
||||||
- EOS_SERVER__EOSDASH_PORT=8504
|
|
||||||
ulimits:
|
|
||||||
nproc: 65535
|
|
||||||
nofile: 65535
|
|
||||||
security_opt:
|
|
||||||
- seccomp:unconfined
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
ports:
|
||||||
# Configure what ports to expose on host
|
# Configure what ports to expose on host
|
||||||
- "${EOS_SERVER__PORT}:8503"
|
- "${EOS_PORT}:8503"
|
||||||
- "${EOS_SERVER__EOSDASH_PORT}:8504"
|
- "${EOSDASH_PORT}:8504"
|
||||||
|
|
||||||
# Volume mount configuration (optional)
|
|
||||||
# IMPORTANT: When mounting local directories, the default config won't be available.
|
|
||||||
# You must create an EOS.config.json file in your local config directory with:
|
|
||||||
# {
|
|
||||||
# "server": {
|
|
||||||
# "host": "0.0.0.0", # Required for Docker container accessibility
|
|
||||||
# "port": 8503,
|
|
||||||
# "startup_eosdash": true,
|
|
||||||
# "eosdash_host": "0.0.0.0", # Required for Docker container accessibility
|
|
||||||
# "eosdash_port": 8504
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
#
|
|
||||||
# Example volume mounts (uncomment to use):
|
|
||||||
# volumes:
|
|
||||||
# - ./config:/opt/eos/config # Mount local config directory
|
|
||||||
# - ./cache:/opt/eos/cache # Mount local cache directory
|
|
||||||
# - ./output:/opt/eos/output # Mount local output directory
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
3
docs/_static/eos.css
vendored
3
docs/_static/eos.css
vendored
@@ -1,3 +0,0 @@
|
|||||||
.wy-nav-content {
|
|
||||||
max-width: 90% !important;
|
|
||||||
}
|
|
||||||
BIN
docs/_static/logo_dark.png
vendored
BIN
docs/_static/logo_dark.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 7.5 KiB |
@@ -28,7 +28,7 @@ management.
|
|||||||
Energy management is the overall process to provide planning data for scheduling the different
|
Energy management is the overall process to provide planning data for scheduling the different
|
||||||
devices in your system in an optimal way. Energy management cares for the update of predictions and
|
devices in your system in an optimal way. Energy management cares for the update of predictions and
|
||||||
the optimization of the planning based on the simulated behavior of the devices. The planning is on
|
the optimization of the planning based on the simulated behavior of the devices. The planning is on
|
||||||
the hour.
|
the hour. Sub-hour energy management is left
|
||||||
|
|
||||||
### Optimization
|
### Optimization
|
||||||
|
|
||||||
|
|||||||
@@ -1,849 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(configtimewindow-page)=
|
|
||||||
|
|
||||||
# Time Window Sequence Configuration
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
The `TimeWindowSequence` model is used to configure allowed time slots for home appliance runs.
|
|
||||||
It contains a collection of `TimeWindow` objects that define when appliances can operate.
|
|
||||||
|
|
||||||
## Basic Structure
|
|
||||||
|
|
||||||
A `TimeWindowSequence` is configured as a JSON object with a `windows` array:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT2H",
|
|
||||||
"day_of_week": null,
|
|
||||||
"date": null,
|
|
||||||
"locale": null
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## TimeWindow Fields
|
|
||||||
|
|
||||||
Each `TimeWindow` object has the following fields:
|
|
||||||
|
|
||||||
- **`start_time`** (required): Time when the window begins
|
|
||||||
- **`duration`** (required): How long the window lasts
|
|
||||||
- **`day_of_week`** (optional): Restrict to specific day of week
|
|
||||||
- **`date`** (optional): Restrict to specific calendar date
|
|
||||||
- **`locale`** (optional): Language for day name parsing
|
|
||||||
|
|
||||||
## Time Formats
|
|
||||||
|
|
||||||
### Start Time (`start_time`)
|
|
||||||
|
|
||||||
The `start_time` field accepts various time formats:
|
|
||||||
|
|
||||||
#### 24-Hour Format
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14:30" // 2:30 PM
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 12-Hour Format with AM/PM
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "2:30 PM" // 2:30 PM
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Compact Format
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "1430" // 2:30 PM
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### With Seconds
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14:30:45" // 2:30:45 PM
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### With Microseconds
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14:30:45.123456"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### European Format
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14h30" // 2:30 PM
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Short Formats
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14" // 2:00 PM
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "2PM" // 2:00 PM
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Decimal Time
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14.5" // 2:30 PM (14:30)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### With Timezones
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14:30 UTC"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "2:30 PM EST"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"start_time": "14:30 +05:30"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Duration (`duration`)
|
|
||||||
|
|
||||||
The `duration` field supports multiple formats for maximum flexibility:
|
|
||||||
|
|
||||||
#### ISO 8601 Duration Format (Recommended)
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "PT2H30M" // 2 hours 30 minutes
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "PT3H" // 3 hours
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "PT90M" // 90 minutes
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "PT1H30M45S" // 1 hour 30 minutes 45 seconds
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Human-Readable String Format
|
|
||||||
|
|
||||||
The system accepts natural language duration strings:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "2 hours 30 minutes"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "3 hours"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "90 minutes"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "1 hour 30 minutes 45 seconds"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "2 days 5 hours"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "1 day 2 hours 30 minutes"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Singular and Plural Forms
|
|
||||||
|
|
||||||
Both singular and plural forms are supported:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "1 day" // Singular
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "2 days" // Plural
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "1 hour" // Singular
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "5 hours" // Plural
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Numeric Formats
|
|
||||||
|
|
||||||
##### Seconds as Integer
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": 3600 // 3600 seconds = 1 hour
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": 1800 // 1800 seconds = 30 minutes
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Seconds as Float
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": 3600.5 // 3600.5 seconds = 1 hour 0.5 seconds
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Tuple Format [days, hours, minutes, seconds]
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": [0, 2, 30, 0] // 0 days, 2 hours, 30 minutes, 0 seconds
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": [1, 0, 0, 0] // 1 day
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": [0, 0, 45, 30] // 45 minutes 30 seconds
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": [2, 5, 15, 45] // 2 days, 5 hours, 15 minutes, 45 seconds
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Mixed Time Units
|
|
||||||
|
|
||||||
You can combine different time units in string format:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "1 day 4 hours 30 minutes 15 seconds"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "3 days 2 hours"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "45 minutes 30 seconds"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Common Duration Examples
|
|
||||||
|
|
||||||
##### Short Durations
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "30 minutes" // Quick appliance cycle
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "PT30M" // ISO format equivalent
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": 1800 // Numeric equivalent (seconds)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Medium Durations
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "2 hours 15 minutes"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "PT2H15M" // ISO format equivalent
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": [0, 2, 15, 0] // Tuple format equivalent
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Long Durations
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "1 day 8 hours" // All-day appliance window
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": "PT32H" // ISO format equivalent
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"duration": [1, 8, 0, 0] // Tuple format equivalent
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Validation Rules for Duration
|
|
||||||
|
|
||||||
- **ISO 8601 format**: Must start with `PT` and use valid duration specifiers (H, M, S)
|
|
||||||
- **String format**: Must contain valid time units (day/days, hour/hours, minute/minutes, second/seconds)
|
|
||||||
- **Numeric format**: Must be a positive number representing seconds
|
|
||||||
- **Tuple format**: Must be exactly 4 elements: [days, hours, minutes, seconds]
|
|
||||||
- **All formats**: Duration must be positive (greater than 0)
|
|
||||||
|
|
||||||
#### Duration Format Recommendations
|
|
||||||
|
|
||||||
1. **Use ISO 8601 format** for API consistency: `"PT2H30M"`
|
|
||||||
2. **Use human-readable strings** for configuration files: `"2 hours 30 minutes"`
|
|
||||||
3. **Use numeric format** for programmatic calculations: `9000` (seconds)
|
|
||||||
4. **Use tuple format** for structured data: `[0, 2, 30, 0]`
|
|
||||||
|
|
||||||
#### Error Handling for Duration
|
|
||||||
|
|
||||||
Common duration errors and solutions:
|
|
||||||
|
|
||||||
- **Invalid ISO format**: Ensure proper `PT` prefix and valid specifiers
|
|
||||||
- **Unknown time units**: Use day/days, hour/hours, minute/minutes, second/seconds
|
|
||||||
- **Negative duration**: All durations must be positive
|
|
||||||
- **Invalid tuple length**: Tuple must have exactly 4 elements
|
|
||||||
- **String too long**: Duration strings have a maximum length limit for security
|
|
||||||
|
|
||||||
## Day of Week Restrictions
|
|
||||||
|
|
||||||
### Using Numbers (0=Monday, 6=Sunday)
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"day_of_week": 0 // Monday
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"day_of_week": 6 // Sunday
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using English Day Names
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"day_of_week": "Monday"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"day_of_week": "sunday" // Case insensitive
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using Localized Day Names
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"day_of_week": "Montag", // German for Monday
|
|
||||||
"locale": "de"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"day_of_week": "Lundi", // French for Monday
|
|
||||||
"locale": "fr"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Date Restrictions
|
|
||||||
|
|
||||||
### Specific Date
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"date": "2024-12-25" // Christmas Day 2024
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note**: When `date` is specified, `day_of_week` is ignored.
|
|
||||||
|
|
||||||
## Complete Examples
|
|
||||||
|
|
||||||
### Example 1: Basic Daily Window
|
|
||||||
|
|
||||||
Allow appliance to run between 9:00 AM and 11:00 AM every day:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT2H"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 2: Weekday Only
|
|
||||||
|
|
||||||
Allow appliance to run between 8:00 AM and 6:00 PM on weekdays:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT10H",
|
|
||||||
"day_of_week": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT10H",
|
|
||||||
"day_of_week": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT10H",
|
|
||||||
"day_of_week": 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT10H",
|
|
||||||
"day_of_week": 3
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT10H",
|
|
||||||
"day_of_week": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 3: Multiple Daily Windows
|
|
||||||
|
|
||||||
Allow appliance to run during morning and evening hours:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "06:00",
|
|
||||||
"duration": "PT3H"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "18:00",
|
|
||||||
"duration": "PT4H"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 4: Weekend Special Hours
|
|
||||||
|
|
||||||
Different hours for weekdays and weekends:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": "Monday"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": "Tuesday"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": "Wednesday"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": "Thursday"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": "Friday"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "10:00",
|
|
||||||
"duration": "PT6H",
|
|
||||||
"day_of_week": "Saturday"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "10:00",
|
|
||||||
"duration": "PT6H",
|
|
||||||
"day_of_week": "Sunday"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 5: Holiday Schedule
|
|
||||||
|
|
||||||
Special schedule for a specific date:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "10:00",
|
|
||||||
"duration": "PT4H",
|
|
||||||
"date": "2024-12-25"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 6: Localized Configuration
|
|
||||||
|
|
||||||
Using German day names:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "14:00",
|
|
||||||
"duration": "PT2H",
|
|
||||||
"day_of_week": "Montag",
|
|
||||||
"locale": "de"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "14:00",
|
|
||||||
"duration": "PT2H",
|
|
||||||
"day_of_week": "Mittwoch",
|
|
||||||
"locale": "de"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "14:00",
|
|
||||||
"duration": "PT2H",
|
|
||||||
"day_of_week": "Freitag",
|
|
||||||
"locale": "de"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 7: Complex Schedule with Timezones
|
|
||||||
|
|
||||||
Multiple windows with different timezones:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "09:00 UTC",
|
|
||||||
"duration": "PT4H",
|
|
||||||
"day_of_week": "Monday"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "2:00 PM EST",
|
|
||||||
"duration": "PT3H",
|
|
||||||
"day_of_week": "Friday"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 8: Night Shift Schedule
|
|
||||||
|
|
||||||
Crossing midnight (note: each window is within a single day):
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "22:00",
|
|
||||||
"duration": "PT2H"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "00:00",
|
|
||||||
"duration": "PT6H"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Advanced Usage Patterns
|
|
||||||
|
|
||||||
### Off-Peak Hours
|
|
||||||
|
|
||||||
Configure appliance to run during off-peak electricity hours:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "23:00",
|
|
||||||
"duration": "PT1H"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "00:00",
|
|
||||||
"duration": "PT7H"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Workday Lunch Break
|
|
||||||
|
|
||||||
Allow appliance to run during lunch break on workdays:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "12:00",
|
|
||||||
"duration": "PT1H",
|
|
||||||
"day_of_week": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "12:00",
|
|
||||||
"duration": "PT1H",
|
|
||||||
"day_of_week": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "12:00",
|
|
||||||
"duration": "PT1H",
|
|
||||||
"day_of_week": 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "12:00",
|
|
||||||
"duration": "PT1H",
|
|
||||||
"day_of_week": 3
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "12:00",
|
|
||||||
"duration": "PT1H",
|
|
||||||
"day_of_week": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Seasonal Schedule
|
|
||||||
|
|
||||||
Different schedules for different dates:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "PT10H",
|
|
||||||
"date": "2024-06-21"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"date": "2024-12-21"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Common Patterns
|
|
||||||
|
|
||||||
### 1. Always Available
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "00:00",
|
|
||||||
"duration": "PT24H"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Business Hours
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": 3
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "09:00",
|
|
||||||
"duration": "PT8H",
|
|
||||||
"day_of_week": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Never Available
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"windows": []
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Validation Rules
|
|
||||||
|
|
||||||
- `start_time` must be a valid time format
|
|
||||||
- `duration` must be a positive duration
|
|
||||||
- `day_of_week` must be 0-6 (integer) or valid day name (string)
|
|
||||||
- `date` must be a valid ISO date format (YYYY-MM-DD)
|
|
||||||
- If `date` is specified, `day_of_week` is ignored
|
|
||||||
- `locale` must be a valid locale code when using localized day names
|
|
||||||
|
|
||||||
## Tips and Best Practices
|
|
||||||
|
|
||||||
1. **Use 24-hour format** for clarity: `"14:30"` instead of `"2:30 PM"`
|
|
||||||
2. **Keep durations reasonable** for appliance operation cycles
|
|
||||||
3. **Test timezone handling** if using timezone-aware times
|
|
||||||
4. **Use specific dates** for holiday schedules
|
|
||||||
5. **Consider overlapping windows** for flexibility
|
|
||||||
6. **Use localization** for international deployments
|
|
||||||
7. **Document your patterns** for maintenance
|
|
||||||
|
|
||||||
## Error Handling
|
|
||||||
|
|
||||||
Common errors and solutions:
|
|
||||||
|
|
||||||
- **Invalid time format**: Use supported time formats listed above
|
|
||||||
- **Invalid duration**: Use ISO 8601 duration format (PT1H30M)
|
|
||||||
- **Invalid day name**: Check spelling and locale settings
|
|
||||||
- **Invalid date**: Use YYYY-MM-DD format
|
|
||||||
- **Unknown locale**: Use standard locale codes (en, de, fr, etc.)
|
|
||||||
|
|
||||||
## Integration Examples
|
|
||||||
|
|
||||||
### Python Usage
|
|
||||||
|
|
||||||
```python
|
|
||||||
from pydantic import ValidationError
|
|
||||||
|
|
||||||
try:
|
|
||||||
config = TimeWindowSequence.model_validate_json(json_string)
|
|
||||||
print(f"Configured {len(config.windows)} time windows")
|
|
||||||
except ValidationError as e:
|
|
||||||
print(f"Configuration error: {e}")
|
|
||||||
```
|
|
||||||
|
|
||||||
### API Configuration
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"device_id": "dishwasher_01",
|
|
||||||
"time_windows": {
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "22:00",
|
|
||||||
"duration": "PT2H"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "06:00",
|
|
||||||
"duration": "PT2H"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
(configuration-page)=
|
|
||||||
|
|
||||||
# Configuration Guideline
|
# Configuration
|
||||||
|
|
||||||
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy
|
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy
|
||||||
management.
|
management.
|
||||||
|
|
||||||
## Storing Configuration
|
## Storing Configuration
|
||||||
|
|
||||||
EOS stores configuration data in a `nested structure`. Note that configuration changes inside EOS
|
EOS stores configuration data in a **key-value store**, where a `configuration key` refers to the
|
||||||
are updated in memory, meaning all changes will be lost upon restarting the EOS REST server if not
|
unique identifier used to store and retrieve specific configuration data. Note that the key-value
|
||||||
saved to the `EOS configuration file`.
|
store is memory-based, meaning all stored data will be lost upon restarting the EOS REST server if
|
||||||
|
not saved to the `EOS configuration file`.
|
||||||
|
|
||||||
Some `configuration keys` are read-only and cannot be altered. These keys are either set up by other
|
Some `configuration keys` are read-only and cannot be altered. These keys are either set up by other
|
||||||
means, such as environment variables, or determined from other information.
|
means, such as environment variables, or determined from other information.
|
||||||
@@ -25,8 +25,7 @@ Use endpoint `PUT /v1/config/file` to save the current configuration to the
|
|||||||
|
|
||||||
### Load Configuration File
|
### Load Configuration File
|
||||||
|
|
||||||
Use endpoint `POST /v1/config/reset` to reset the configuration to the values in the
|
Use endpoint `POST /v1/config/update` to update the configuration from the `EOS configuration file`.
|
||||||
`EOS configuration file`.
|
|
||||||
|
|
||||||
## Configuration Sources and Priorities
|
## Configuration Sources and Priorities
|
||||||
|
|
||||||
@@ -37,25 +36,26 @@ The configuration sources and their priorities are as follows:
|
|||||||
3. `EOS Configuration File`: Read at startup of the REST server and on request
|
3. `EOS Configuration File`: Read at startup of the REST server and on request
|
||||||
4. `Default Values`
|
4. `Default Values`
|
||||||
|
|
||||||
### Runtime Config Updates
|
### Settings
|
||||||
|
|
||||||
The EOS configuration can be updated at runtime. Note that those updates are not persistent
|
Settings are sets of configuration data that take precedence over all other configuration data from
|
||||||
automatically. However it is possible to save the configuration to the `EOS configuration file`.
|
different sources. Note that settings are not persistent. To make the current configuration with the
|
||||||
|
current settings persistent, save the configuration to the `EOS configuration file`.
|
||||||
|
|
||||||
Use the following endpoints to change the current runtime configuration:
|
Use the following endpoints to change the current configuration settings:
|
||||||
|
|
||||||
- `PUT /v1/config`: Update the entire or parts of the configuration.
|
- `PUT /v1/config`: Replaces the entire configuration settings.
|
||||||
|
- `PUT /v1/config/value`: Sets a specific configuration option.
|
||||||
|
|
||||||
### Environment Variables
|
### Environment Variables
|
||||||
|
|
||||||
All `configuration keys` can be set by environment variables prefixed with `EOS_` and separated by
|
All `configuration keys` can be set by environment variables with the same name. EOS recognizes the
|
||||||
`__` for nested structures. Environment variables are case insensitive.
|
following special environment variables:
|
||||||
|
|
||||||
EOS recognizes the following special environment variables (case sensitive):
|
|
||||||
|
|
||||||
- `EOS_CONFIG_DIR`: The directory to search for an EOS configuration file.
|
- `EOS_CONFIG_DIR`: The directory to search for an EOS configuration file.
|
||||||
- `EOS_DIR`: The directory used by EOS for data, which will also be searched for an EOS
|
- `EOS_DIR`: The directory used by EOS for data, which will also be searched for an EOS
|
||||||
configuration file.
|
configuration file.
|
||||||
|
- `EOS_LOGGING_LEVEL`: The logging level to use in EOS.
|
||||||
|
|
||||||
### EOS Configuration File
|
### EOS Configuration File
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ If you do not have a configuration file, it will be automatically created on the
|
|||||||
the REST server in a system-dependent location.
|
the REST server in a system-dependent location.
|
||||||
|
|
||||||
To determine the location of the configuration file used by EOS, ask the REST server. The endpoint
|
To determine the location of the configuration file used by EOS, ask the REST server. The endpoint
|
||||||
`GET /v1/config` provides the `general.config_file_path` configuration key.
|
`GET /v1/config` provides the `config_file_path` configuration key.
|
||||||
|
|
||||||
EOS searches for the configuration file in the following order:
|
EOS searches for the configuration file in the following order:
|
||||||
|
|
||||||
@@ -75,15 +75,9 @@ EOS searches for the configuration file in the following order:
|
|||||||
3. A platform-specific default directory for EOS
|
3. A platform-specific default directory for EOS
|
||||||
4. The current working directory
|
4. The current working directory
|
||||||
|
|
||||||
The first configuration file available in these directories is loaded. If no configuration file is
|
The first available configuration file found in these directories is loaded. If no configuration
|
||||||
found, a default configuration file is created, and the default settings are written to it. The
|
file is found, a default configuration file is created in the platform-specific default directory,
|
||||||
location of the created configuration file follows the same order in which EOS searches for
|
and default settings are loaded into it.
|
||||||
configuration files, and it depends on whether the relevant environment variables are set.
|
|
||||||
|
|
||||||
Use the following endpoints to interact with the configuration file:
|
|
||||||
|
|
||||||
- `PUT /v1/config/file`: Save the current configuration to the configuration file.
|
|
||||||
- `PUT /v1/config/reset`: Reload the configuration file, all unsaved runtime configuration is reset.
|
|
||||||
|
|
||||||
### Default Values
|
### Default Values
|
||||||
|
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ Andreas Schmitz uses [Node-RED](https://nodered.org/) as part of his home automa
|
|||||||
|
|
||||||
### Node-Red Resources
|
### Node-Red Resources
|
||||||
|
|
||||||
- [Installation Guide (German)](https://www.youtube.com/playlist?list=PL8_vk9A-s7zLD865Oou6y3EeQLlNtu-Hn)
|
- [Installation Guide (German)](https://meintechblog.de/2024/09/05/andreas-schmitz-joerg-installiert-mein-energieoptimierungssystem/)
|
||||||
\— A detailed guide on integrating EOS with `Node-RED`.
|
\— A detailed guide on integrating an early version of EOS with `Node-RED`.
|
||||||
|
|
||||||
## Home Assistant
|
## Home Assistant
|
||||||
|
|
||||||
@@ -32,9 +32,5 @@ emphasizes local control and user privacy.
|
|||||||
|
|
||||||
### Home Assistant Resources
|
### Home Assistant Resources
|
||||||
|
|
||||||
- Duetting's [EOS Home Assistant Addon](https://github.com/Duetting/ha_eos_addon).
|
- Duetting's [EOS Home Assistant Addon](https://github.com/Duetting/ha_eos_addon) — Additional
|
||||||
|
details can be found in this [discussion thread](https://github.com/Akkudoktor-EOS/EOS/discussions/294).
|
||||||
## EOS Connect
|
|
||||||
|
|
||||||
[EOS connect](https://github.com/ohAnd/EOS_connect) uses `EOS` for energy management and optimization,
|
|
||||||
and connects to smart home platforms to monitor, forecast, and control energy flows.
|
|
||||||
|
|||||||
@@ -139,7 +139,7 @@ of components.
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
However, the components are not integrated by the EOS itself, but must be integrated by
|
However, the components are not integrated by the EOS itself, but must be intergrated by
|
||||||
the user using an integration solution and currently requires some effort and technical
|
the user using an integration solution and currently requires some effort and technical
|
||||||
know-how.
|
know-how.
|
||||||
|
|
||||||
@@ -153,7 +153,7 @@ Node-RED offers a large number of types of nodes that allow access via the proto
|
|||||||
commonly used in this area, such as Modbus or MQTT. Access to any existing databases,
|
commonly used in this area, such as Modbus or MQTT. Access to any existing databases,
|
||||||
such as InfluxDB or PostgreSQL, is also possible via nodes provided by Node-RED.
|
such as InfluxDB or PostgreSQL, is also possible via nodes provided by Node-RED.
|
||||||
|
|
||||||
It becomes easier if a smart home solution like Home Assistant, openHAB or ioBroker or
|
It becomes easier if a smart home solution like Homa Assistant, openHAB or ioBroker or
|
||||||
solutions such as evcc or openWB are already in use. In this case, these smart home
|
solutions such as evcc or openWB are already in use. In this case, these smart home
|
||||||
solutions already take over the technical integration and communication with the components
|
solutions already take over the technical integration and communication with the components
|
||||||
at a technical level and Node-RED offers nodes for accessing these solutions, so that the
|
at a technical level and Node-RED offers nodes for accessing these solutions, so that the
|
||||||
@@ -161,7 +161,7 @@ corresponding sources can be easily integrated into a flow.
|
|||||||
|
|
||||||
In Home Assistant you could use an automation to prepare the input payload for EOS and
|
In Home Assistant you could use an automation to prepare the input payload for EOS and
|
||||||
then use the RESTful integration to call EOS. Based on this concept there is already a
|
then use the RESTful integration to call EOS. Based on this concept there is already a
|
||||||
Home Assistant add-on created by [Duetting](#duetting-solution).
|
home assistand add-on created by [Duetting](#duetting-solution).
|
||||||
|
|
||||||
The plan created by EOS must also be executed via the chosen integration solution,
|
The plan created by EOS must also be executed via the chosen integration solution,
|
||||||
with the respective devices receiving their instructions according to the plan.
|
with the respective devices receiving their instructions according to the plan.
|
||||||
@@ -174,7 +174,7 @@ but usually find good local optima very quickly in a large solution space.
|
|||||||
|
|
||||||
## Links
|
## Links
|
||||||
|
|
||||||
- [German Videos explaining the basic concept and installation process of EOS (YouTube)](https://www.youtube.com/playlist?list=PL8_vk9A-s7zLD865Oou6y3EeQLlNtu-Hn)
|
- [German Video explaining the basic concept and installation process for the early version of EOS (YouTube)](https://www.youtube.com/live/ftQULW4-1ts?si=oDdBBifCpUmiCXaY)
|
||||||
- [German Forum of Akkudoktor EOS](https://akkudoktor.net/c/der-akkudoktor/eos)
|
- [German Forum of Akkudoktor EOS](https://akkudoktor.net/c/der-akkudoktor/eos)
|
||||||
- [Akkudoktor-EOS GitHub Repository](https://github.com/Akkudoktor-EOS/EOS)
|
- [Akkudoktor-EOS GitHub Repository](https://github.com/Akkudoktor-EOS/EOS)
|
||||||
- [Latest EOS Documentation](https://akkudoktor-eos.readthedocs.io/en/latest/)
|
- [Latest EOS Documentation](https://akkudoktor-eos.readthedocs.io/en/latest/)
|
||||||
|
|||||||
@@ -1,81 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(logging-page)=
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
|
|
||||||
EOS automatically records important events and messages to help you understand what’s happening and
|
|
||||||
to troubleshoot problems.
|
|
||||||
|
|
||||||
## How Logging Works
|
|
||||||
|
|
||||||
- By default, logs are shown in your terminal (console).
|
|
||||||
- You can also save logs to a file for later review.
|
|
||||||
- Log files are rotated automatically to avoid becoming too large.
|
|
||||||
|
|
||||||
## Controlling Log Details
|
|
||||||
|
|
||||||
### 1. Command-Line Option
|
|
||||||
|
|
||||||
Set the amount of log detail shown on the console by using `--log-level` when starting EOS.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
.venv\Scripts\python src/akkudoktoreos/server/eos.py --log-level DEBUG
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
.venv/bin/python src/akkudoktoreos/server/eos.py --log-level DEBUG
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Common levels:
|
|
||||||
|
|
||||||
- DEBUG (most detail)
|
|
||||||
- INFO (default)
|
|
||||||
- WARNING
|
|
||||||
- ERROR
|
|
||||||
- CRITICAL (least detail)
|
|
||||||
|
|
||||||
### 2. Configuration File
|
|
||||||
|
|
||||||
You can also set logging options in your EOS configuration file (EOS.config.json).
|
|
||||||
|
|
||||||
```Json
|
|
||||||
{
|
|
||||||
"logging": {
|
|
||||||
"console_level": "INFO",
|
|
||||||
"file_level": "DEBUG"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Environment Variable
|
|
||||||
|
|
||||||
You can also control the log level by setting the `EOS_LOGGING__CONSOLE_LEVEL` and the
|
|
||||||
`EOS_LOGGING__FILE_LEVEL` environment variables.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
EOS_LOGGING__CONSOLE_LEVEL="INFO"
|
|
||||||
EOS_LOGGING__FILE_LEVEL="DEBUG"
|
|
||||||
```
|
|
||||||
|
|
||||||
## File Logging
|
|
||||||
|
|
||||||
If the `file_level` configuration is set, log records are written to a rotating log file. The log
|
|
||||||
file is in the data output directory and named `eos.log`. You may directly read the file or use
|
|
||||||
the `/v1/logging/log` endpoint to access the file log.
|
|
||||||
|
|
||||||
:::{admonition} Note
|
|
||||||
:class: note
|
|
||||||
The `/v1/logging/log` endpoint needs file logging to be enabled. Otherwise old or no logging
|
|
||||||
information is provided.
|
|
||||||
:::
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
(measurement-page)=
|
|
||||||
|
|
||||||
# Measurements
|
# Measurements
|
||||||
|
|
||||||
@@ -13,7 +12,14 @@ accuracy.
|
|||||||
## Storing Measurements
|
## Storing Measurements
|
||||||
|
|
||||||
EOS stores measurements in a **key-value store**, where the term `measurement key` refers to the
|
EOS stores measurements in a **key-value store**, where the term `measurement key` refers to the
|
||||||
unique identifier used to store and retrieve specific measurement data.
|
unique identifier used to store and retrieve specific measurement data. Note that the key-value
|
||||||
|
store is memory-based, meaning that all stored data will be lost upon restarting the EOS REST
|
||||||
|
server.
|
||||||
|
|
||||||
|
:::{admonition} Todo
|
||||||
|
:class: note
|
||||||
|
Ensure that measurement data persists across server restarts.
|
||||||
|
:::
|
||||||
|
|
||||||
Several endpoints of the EOS REST server allow for the management and retrieval of these
|
Several endpoints of the EOS REST server allow for the management and retrieval of these
|
||||||
measurements.
|
measurements.
|
||||||
@@ -24,14 +30,14 @@ The measurement data must be or is provided in one of the following formats:
|
|||||||
|
|
||||||
A dictionary with the following structure:
|
A dictionary with the following structure:
|
||||||
|
|
||||||
```json
|
```python
|
||||||
{
|
{
|
||||||
"start_datetime": "2024-01-01 00:00:00",
|
"start_datetime": "2024-01-01 00:00:00",
|
||||||
"interval": "1 hour",
|
"interval": "1 Hour",
|
||||||
"<measurement key>": [value, value, ...],
|
"<measurement key>": [value, value, ...],
|
||||||
"<measurement key>": [value, value, ...],
|
"<measurement key>": [value, value, ...],
|
||||||
...
|
...
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2. DateTimeDataFrame
|
### 2. DateTimeDataFrame
|
||||||
@@ -45,84 +51,43 @@ The column name of the data must be the same as the names of the `measurement ke
|
|||||||
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) series with a
|
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) series with a
|
||||||
`DatetimeIndex`. Use [pandas.Series.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.Series.to_json.html#pandas.Series.to_json).
|
`DatetimeIndex`. Use [pandas.Series.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.Series.to_json.html#pandas.Series.to_json).
|
||||||
|
|
||||||
Creates a dictionary like this:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"2024-01-01T00:00:00+01:00": 1,
|
|
||||||
"2024-01-02T00:00:00+01:00": 2,
|
|
||||||
"2024-01-03T00:00:00+01:00": 3,
|
|
||||||
...
|
|
||||||
},
|
|
||||||
"dtype": "float64",
|
|
||||||
"tz": "Europe/Berlin"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Load Measurement
|
## Load Measurement
|
||||||
|
|
||||||
The EOS measurement store provides for storing energy meter readings of loads.
|
The EOS measurement store provides for storing meter readings of loads. There are currently five loads
|
||||||
|
foreseen. The associated `measurement key`s are:
|
||||||
|
|
||||||
The associated `measurement key`s can be configured by:
|
- `measurement_load0_mr`: Load0 meter reading [kWh]
|
||||||
|
- `measurement_load1_mr`: Load1 meter reading [kWh]
|
||||||
|
- `measurement_load2_mr`: Load2 meter reading [kWh]
|
||||||
|
- `measurement_load3_mr`: Load3 meter reading [kWh]
|
||||||
|
- `measurement_load4_mr`: Load4 meter reading [kWh]
|
||||||
|
|
||||||
```json
|
For ease of use, you can assign descriptive names to the `measurement key`s to represent your
|
||||||
{
|
system's load sources. Use the following `configuration options` to set these names
|
||||||
"measurement": {
|
(e.g., 'Dish Washer', 'Heat Pump'):
|
||||||
"load_emr_keys": ["load0_emr", "my special load", ...]
|
|
||||||
}
|
- `measurement_load0_name`: Name of the load0 source
|
||||||
}
|
- `measurement_load1_name`: Name of the load1 source
|
||||||
```
|
- `measurement_load2_name`: Name of the load2 source
|
||||||
|
- `measurement_load3_name`: Name of the load3 source
|
||||||
|
- `measurement_load4_name`: Name of the load4 source
|
||||||
|
|
||||||
Load measurements can be stored for any datetime. The values between different meter readings are
|
Load measurements can be stored for any datetime. The values between different meter readings are
|
||||||
linearly approximated. Storing values between optimization intervals is generally not useful.
|
linearly approximated. Since optimization occurs on the hour, storing values between hours is
|
||||||
|
generally not useful.
|
||||||
|
|
||||||
The EOS measurement store automatically sums all given loads to create a total load value series
|
The EOS measurement store automatically sums all given loads to create a total load value series
|
||||||
for specified intervals, usually one hour. This aggregated data can be used for load predictions.
|
for specified intervals, usually one hour. This aggregated data can be used for load predictions.
|
||||||
|
|
||||||
:::{admonition} Warning
|
|
||||||
:class: warning
|
|
||||||
Only use **actual meter readings** in **kWh**, not energy consumption.
|
|
||||||
Example: `112345.77`, `112389.23`, `112412.55`, …
|
|
||||||
:::
|
|
||||||
|
|
||||||
## Grid Export/ Import Measurement
|
## Grid Export/ Import Measurement
|
||||||
|
|
||||||
The EOS measurement store also allows for the storage of meter readings for grid import and export.
|
The EOS measurement store also allows for the storage of meter readings for grid import and export.
|
||||||
|
The associated `measurement key`s are:
|
||||||
|
|
||||||
The associated `measurement key`s can be configured by:
|
- `measurement_grid_export_mr`: Export to grid meter reading [kWh]
|
||||||
|
- `measurement_grid_import_mr`: Import from grid meter reading [kWh]
|
||||||
```json
|
|
||||||
{
|
|
||||||
"measurement": {
|
|
||||||
"grid_export_emr_keys": ["grid_export_emr", ...],
|
|
||||||
"grid_import_emr_keys": ["grid_import_emr", ...],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
:::{admonition} Todo
|
:::{admonition} Todo
|
||||||
:class: note
|
:class: note
|
||||||
Currently not used. Integrate grid meter readings into the respective predictions.
|
Currently not used. Integrate grid meter readings into the respective predictions.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Battery/ Electric Vehicle State of Charge (SoC) Measurement
|
|
||||||
|
|
||||||
The state of charge (SoC) measurement of batteries and electric vehicle batteries can be stored.
|
|
||||||
|
|
||||||
The associated `measurement key` is pre-defined by the device configuration. It can be
|
|
||||||
determined from the device configuration by the read-only `measurement_key_soc_factor` configuration
|
|
||||||
option.
|
|
||||||
|
|
||||||
## Battery/ Electric Vehicle Power Measurement
|
|
||||||
|
|
||||||
The charge/ discharge power measurements of batteries and electric vehicle batteries can be stored.
|
|
||||||
Charging power is denoted by a negative value, discharging power by a positive value.
|
|
||||||
|
|
||||||
The associated `measurement key`s are pre-defined by the device configuration. They can be
|
|
||||||
determined from the device configuration by read-only configuration options:
|
|
||||||
|
|
||||||
- `measurement_key_power_l1_w`
|
|
||||||
- `measurement_key_power_l2_w`
|
|
||||||
- `measurement_key_power_l3_w`
|
|
||||||
- `measurement_key_power_3_phase_sym_w`
|
|
||||||
|
|||||||
@@ -1,448 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
# Automatic Optimization
|
|
||||||
|
|
||||||
## Introduction
|
|
||||||
|
|
||||||
EOS offers two approaches to optimize your energy management system: `post /optimize optimization` and
|
|
||||||
`automatic optimization`.
|
|
||||||
|
|
||||||
The `post /optimize optimization` interface, based on a **POST** request to `/optimize`, is widely
|
|
||||||
used. It was originally developed by Andreas at the start of the project and is still demonstrated
|
|
||||||
in his instructional videos. This interface allows users or external systems to trigger an
|
|
||||||
optimization manually, supplying custom parameters and timing.
|
|
||||||
|
|
||||||
As an alternative, EOS supports `automatic optimization`, which runs automatically at configured
|
|
||||||
intervals. It retrieves all required input data — including electricity prices, battery storage
|
|
||||||
capacity, PV production forecasts, and temperature data — based on your system configuration.
|
|
||||||
|
|
||||||
### Genetic Algorithm
|
|
||||||
|
|
||||||
Both optimization modes use the same core optimization engine.
|
|
||||||
|
|
||||||
EOS uses a [genetic algorithm](https://en.wikipedia.org/wiki/Genetic_algorithm) to find an optimal
|
|
||||||
control strategy for home energy devices such as household loads, batteries, and electric vehicles.
|
|
||||||
|
|
||||||
In this context, each **individual** represents a possible solution — a specific control schedule
|
|
||||||
that defines how devices should operate over time. These individuals are evaluated using
|
|
||||||
[resource simulations](#resource-page), which model the system’s energy behavior over a defined time
|
|
||||||
period divided into fixed intervals.
|
|
||||||
|
|
||||||
The quality of each solution (its *fitness*) is determined by how well it performs during
|
|
||||||
simulation, based on objectives such as minimizing electricity costs, maximizing self-consumption,
|
|
||||||
or meeting battery charge targets.
|
|
||||||
|
|
||||||
Through an iterative process of selection, crossover, and mutation, the algorithm gradually evolves
|
|
||||||
more effective solutions. The final result is an optimized control strategy that balances multiple
|
|
||||||
system goals within the constraints of the input data and configuration.
|
|
||||||
|
|
||||||
:::{note}
|
|
||||||
You don’t need to understand the internal workings of the genetic algorithm to benefit from
|
|
||||||
automatic optimization. EOS handles everything behind the scenes based on your configuration.
|
|
||||||
However, advanced users can fine-tune the optimization behavior using additional settings like
|
|
||||||
population size, penalties, and random seed.
|
|
||||||
:::
|
|
||||||
|
|
||||||
## Energy Management Plan
|
|
||||||
|
|
||||||
Whenever the optimization is run, the energy management plan is updated. The energy management plan
|
|
||||||
provides a list of energy management instructions in chronological order. The instructions lean on
|
|
||||||
to the [S2 standard](https://docs.s2standard.org/) to have maximum flexibility and stay completely
|
|
||||||
independent from any manufacturer.
|
|
||||||
|
|
||||||
### Battery Instructions
|
|
||||||
|
|
||||||
The battery control instructions assume an idealized battery model. Under this model, the battery
|
|
||||||
can be operated in four discrete operation modes:
|
|
||||||
|
|
||||||
| **Operation Mode ID** | **Description** |
|
|
||||||
| --------------------- | ------------------------------------------------------------------------------------ |
|
|
||||||
| **IDLE** | Battery neither charges nor discharges; holds its state of charge. |
|
|
||||||
| **CHARGE** | Charge at a specified power rate up to the allowable maximum. |
|
|
||||||
| **DISCHARGE** | Discharge at a specified power rate up to the allowable maximum. |
|
|
||||||
| **ALLOW_DISCHARGE** | Allow the battery to freely discharge depending on its instantaneous power setpoint. |
|
|
||||||
|
|
||||||
The **operation mode factor** (0.0–1.0) specifies the normalized power rate relative to the
|
|
||||||
battery's nominal maximum charge or discharge power. A value of 1.0 corresponds to full-rate
|
|
||||||
charging or discharging, while 0.0 indicates no power transfer. Intermediate values scale the power
|
|
||||||
proportionally.
|
|
||||||
|
|
||||||
### Electric Vehicle Instructions
|
|
||||||
|
|
||||||
The electric vehicle control instructions assume an idealized EV battery model. Under this model,
|
|
||||||
the EV battery can be operated in two operation modes:
|
|
||||||
|
|
||||||
| **Operation Mode ID** | **Description** |
|
|
||||||
| --------------------- | ------------------------------------------------------------------------------------ |
|
|
||||||
| **IDLE** | Battery neither charges nor discharges; holds its state of charge. |
|
|
||||||
| **CHARGE** | Charge at a specified power rate up to the allowable maximum. |
|
|
||||||
|
|
||||||
The **operation mode factor** (0.0–1.0) specifies the normalized power rate relative to the
|
|
||||||
battery's nominal maximum charge power. A value of 1.0 corresponds to full-rate charging, while 0.0
|
|
||||||
indicates no power transfer. Intermediate values scale the power proportionally.
|
|
||||||
|
|
||||||
### Home Appliance Instructions
|
|
||||||
|
|
||||||
The home appliance instructions assume an idealized home appliance model. Under this model,
|
|
||||||
the home appliance can be operated in two operation modes:
|
|
||||||
|
|
||||||
| **Operation Mode ID** | **Description** |
|
|
||||||
| --------------------- | ------------------------------------------------------------------------------------ |
|
|
||||||
| **RUN** | The home appliance is started and runs until the end of it's power sequence. |
|
|
||||||
| **IDLE** | The home appliance does not run. |
|
|
||||||
|
|
||||||
The **operation mode factor** (0.0–1.0) is ignored.
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
### Energy management configuration
|
|
||||||
|
|
||||||
The energy management is run on configured intervals with some startup delay after server start.
|
|
||||||
Both values are given in seconds.
|
|
||||||
|
|
||||||
:::{admonition} Note
|
|
||||||
:class: note
|
|
||||||
If no interval is configured (`None`, `null`) there will be only one energy management run at
|
|
||||||
startup.
|
|
||||||
:::
|
|
||||||
|
|
||||||
The energy management can be run in two modes:
|
|
||||||
|
|
||||||
- **OPTIMIZATION**: A full optimization is done. This includes update of predictions.
|
|
||||||
- **PREDICTION**: Only the predictions are updated.
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"ems": {
|
|
||||||
"startup_delay": 5.0,
|
|
||||||
"interval": 300.0,
|
|
||||||
"mode": "OPTIMIZATION"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Optimization Configuration
|
|
||||||
|
|
||||||
#### Optimization Time Configuration
|
|
||||||
|
|
||||||
- **horizon_hours**:
|
|
||||||
The optimization horizon parameter defines the default time window — in hours — within which
|
|
||||||
the energy optimization goal shall be achieved.
|
|
||||||
|
|
||||||
Specific devices, like the home appliance, have their own configuration for time windows. If
|
|
||||||
the time windows are not configured the simulation uses the default time window.
|
|
||||||
|
|
||||||
Each device simulation run must ensure that all tasks or appliance cycles (e.g., running a
|
|
||||||
dishwasher) are completed within the configured time windows.
|
|
||||||
|
|
||||||
- **interval**: Defines the time step in seconds between control actions
|
|
||||||
(e.g. `3600` for one hour, `900` for 15 minutes).
|
|
||||||
|
|
||||||
:::{warning}
|
|
||||||
**Current Limitation**
|
|
||||||
|
|
||||||
At present, the `interval` setting is **not used** by the genetic algorithm. Instead:
|
|
||||||
|
|
||||||
- The control interval is fixed to **1 hour**.
|
|
||||||
|
|
||||||
Support for configurable intervals (e.g. 15-minute steps) may be added in a future release.
|
|
||||||
:::
|
|
||||||
|
|
||||||
#### Genetic Algorithm Parameters
|
|
||||||
|
|
||||||
The behavior of the genetic algorithm can be customized using the following configuration options:
|
|
||||||
|
|
||||||
- **individuals** (`int`, default: `300`):
|
|
||||||
Sets the number of individuals (candidate solutions) in the (first) generation. A higher number
|
|
||||||
increases solution diversity and the chance of finding a good result, but also increases
|
|
||||||
computation time.
|
|
||||||
|
|
||||||
- **generations** (`int`, default: `400`):
|
|
||||||
Sets the number of generations to evaluate the optimal solution. In each generation, solutions are
|
|
||||||
evaluated and evolved. More generations can improve optimization quality but increase computation
|
|
||||||
time. Best results are usually found within a moderate number of generations.
|
|
||||||
|
|
||||||
- **seed** (`int` or `null`, default: `null`):
|
|
||||||
Sets the random seed for reproducible results.
|
|
||||||
|
|
||||||
- If `null`, a random seed is used (non-reproducible).
|
|
||||||
- If an integer is provided, it ensures that the same optimization input yields the same output.
|
|
||||||
|
|
||||||
A fixed seed to ensure reproducibility. Runs with the same seed and configuration will
|
|
||||||
produce the same results.
|
|
||||||
|
|
||||||
- **penalties** (`dict`):
|
|
||||||
Defines how penalties are applied to solutions that violate constraints (e.g., undercharged
|
|
||||||
batteries). Penalty function parameter values influence the fitness score, discouraging
|
|
||||||
undesirable solutions.
|
|
||||||
|
|
||||||
:::{note}
|
|
||||||
**Supported Penalty Functions**
|
|
||||||
|
|
||||||
Currently, the only supported penalty function parameter is:
|
|
||||||
|
|
||||||
- `ev_soc_miss`:
|
|
||||||
Applies a penalty when the **state of charge (SOC)** of the electric vehicle battery falls below
|
|
||||||
the required minimum. This encourages the optimizer to ensure sufficient EV charging.
|
|
||||||
:::
|
|
||||||
|
|
||||||
#### Value Formats
|
|
||||||
|
|
||||||
- **Time-related values**:
|
|
||||||
- `hours`: specified in **hours** (e.g. `24`)
|
|
||||||
- `interval`: specified in **seconds** (e.g. `3600`)
|
|
||||||
|
|
||||||
- **Genetic algorithm parameters**:
|
|
||||||
- `individuals`: must be an **integer**
|
|
||||||
- `seed`: must be an **integer** or `null` for random behavior
|
|
||||||
|
|
||||||
- **Penalty function parameter values**: may be `float`, `int`, or `string`, depending on the type
|
|
||||||
of penalty function.
|
|
||||||
|
|
||||||
#### Optimization configuration example
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"optimization": {
|
|
||||||
"hours": 24,
|
|
||||||
"interval": 3600,
|
|
||||||
"genetic" : {
|
|
||||||
"individuals": 300,
|
|
||||||
"generations": 400,
|
|
||||||
"seed": null,
|
|
||||||
"penalties": {
|
|
||||||
"ev_soc_miss": 10
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Device simulation configuration
|
|
||||||
|
|
||||||
The device simulations are used to evaluate the fitness of the individuals of the solution
|
|
||||||
population.
|
|
||||||
|
|
||||||
The GENETIC algorithm supports 4 devices:
|
|
||||||
|
|
||||||
- **inverter**: A photovoltaic power inverter that can export to the grid and charge a battery.
|
|
||||||
The inverter is mandatory.
|
|
||||||
- **electric_vehicle**: An electric vehicle, basically the battery of an electric vehicle. The
|
|
||||||
The electrical vehicle is optional.
|
|
||||||
- **battery**: A battery that can be charged by the inverter. The battery is mandatory.
|
|
||||||
- **home_appliance**: A home appliance, like a washing machine or a dish washer. The home
|
|
||||||
appliance is optional.
|
|
||||||
|
|
||||||
:::{admonition} Warning
|
|
||||||
:class: warning
|
|
||||||
The GENETIC algorithm can only use the first inverter, electrical vehicle, battery, home appliance
|
|
||||||
that is configured, even if more devices are configured.
|
|
||||||
:::
|
|
||||||
|
|
||||||
#### Inverter simulation configuration
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"devices": {
|
|
||||||
"max_inverters": 1,
|
|
||||||
"inverters": [
|
|
||||||
{
|
|
||||||
"device_id": "inv1",
|
|
||||||
"max_power_w": 10000,
|
|
||||||
"battery_id": "bat1"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Electric vehicle simulation configuration
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"devices": {
|
|
||||||
"max_electric_vehicles": 1,
|
|
||||||
"electric_vehicles": [
|
|
||||||
{
|
|
||||||
"device_id": "ev1",
|
|
||||||
"capacity_wh": 50000,
|
|
||||||
"max_charge_power_w": 10000,
|
|
||||||
"charge_rates": [0.0, 0.25, 0.5, 0.75, 1.0],
|
|
||||||
"min_soc_percentage": 10,
|
|
||||||
"max_soc_percentage": 80
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"measurement": {
|
|
||||||
"electric_vehicle_soc_keys": ["ev1_soc"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Battery simulation configuration
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"devices": {
|
|
||||||
"max_batteries": 1,
|
|
||||||
"batteries": [
|
|
||||||
{
|
|
||||||
"device_id": "battery1",
|
|
||||||
"capacity_wh": 8000,
|
|
||||||
"charging_efficiency": 0.88,
|
|
||||||
"discharging_efficiency": 0.88,
|
|
||||||
"levelized_cost_of_storage_kwh": 0.12,
|
|
||||||
"max_charge_power_w": 8000,
|
|
||||||
"min_charge_power_w": 50,
|
|
||||||
"charge_rates": null,
|
|
||||||
"min_soc_percentage": 5,
|
|
||||||
"max_soc_percentage": 95
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Home appliance simulation configuration
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"devices": {
|
|
||||||
"max_home_appliances": 1,
|
|
||||||
"home_appliances": [
|
|
||||||
{
|
|
||||||
"device_id": "washing machine",
|
|
||||||
"consumption_wh": 600,
|
|
||||||
"duration_h": 3,
|
|
||||||
"time_windows": null,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The time windows the home appliance may run can be [configured](#configtimewindow-page) in several
|
|
||||||
ways. See the [time window configuration](#configtimewindow-page) for details.
|
|
||||||
|
|
||||||
## Predictions configuration
|
|
||||||
|
|
||||||
The device simulation may rely on predictions to simulate proper behaviour. E.g. the inverter needs
|
|
||||||
to know the PV forecast.
|
|
||||||
|
|
||||||
Configure the [predictions](#prediction-page) as described on the [prediction page](#prediction-page).
|
|
||||||
|
|
||||||
### Providing your own prediction data
|
|
||||||
|
|
||||||
If EOS does not have a suitable prediction provider you can provide your own data for a prediction.
|
|
||||||
Configure the respective import provider (ElecPriceImport, LoadImport, PVForecastImport,
|
|
||||||
WeatherImport) and use one of the following endpoints to provide your own data:
|
|
||||||
|
|
||||||
- **PUT** `/v1/prediction/import/ElecPriceImport`
|
|
||||||
- **PUT** `/v1/prediction/import/LoadImport`
|
|
||||||
- **PUT** `/v1/prediction/import/PVForecastImport`
|
|
||||||
- **PUT** `/v1/prediction/import/WeatherImport`
|
|
||||||
|
|
||||||
## Measurement configuration
|
|
||||||
|
|
||||||
Predictions and device simulations often rely on **measurement data** to produce accurate results.
|
|
||||||
For example:
|
|
||||||
|
|
||||||
- A **load forecast** requires past energy meter readings.
|
|
||||||
- A **battery simulation** needs the current **state of charge (SoC)** to start from the correct
|
|
||||||
condition.
|
|
||||||
|
|
||||||
Before using these features, make sure to configure the [measurement](#measurement-page) as
|
|
||||||
described on the [measurement page](#measurement-page).
|
|
||||||
|
|
||||||
### Providing your own measurement data
|
|
||||||
|
|
||||||
You can provide your own measurement data to the prediction and simulation engine through the
|
|
||||||
following REST endpoints (see the [measurement page](#measurement-page) for details on the data
|
|
||||||
format):
|
|
||||||
|
|
||||||
- **PUT** `/v1/measurement/data`
|
|
||||||
- **PUT** `/v1/measurement/dataframe`
|
|
||||||
- **PUT** `/v1/measurement/series`
|
|
||||||
- **PUT** `/v1/measurement/value`
|
|
||||||
|
|
||||||
### Example: Supplying Battery and EV SoC
|
|
||||||
|
|
||||||
For **batteries** and **electric vehicles**, it is strongly recommended to provide
|
|
||||||
**current SoC**. This ensures that simulations start with the correct state.
|
|
||||||
|
|
||||||
The simplest way is to use the `/v1/measurement/value` endpoint.
|
|
||||||
Assuming the battery is named `battery1` and the EV is named `ev11`:
|
|
||||||
|
|
||||||
1. **Use the measurement keys** that are pre-configured for your **devices**. For example:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"devices": {
|
|
||||||
"batteries": [
|
|
||||||
{
|
|
||||||
"device_id": "battery1", "capacity_wh": 8000, ...
|
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor", ...
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"electric_vehicles": [
|
|
||||||
{
|
|
||||||
"device_id": "ev11", "capacity_wh": 8000, ...
|
|
||||||
"measurement_key_soc_factor": "ev11-soc-factor", ...
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Record your SoC readings** to these keys.
|
|
||||||
|
|
||||||
- Enter the values as **factor of total capacity** of the respective **battery**.
|
|
||||||
|
|
||||||
In these examples:
|
|
||||||
|
|
||||||
- datetime specifies the timestamp of the measurement.
|
|
||||||
- key is the measurement key (e.g. battery1-soc-factor).
|
|
||||||
- value is the numeric measurement value (e.g. SoC as factor of total capacity).
|
|
||||||
|
|
||||||
#### Raw HTTP request
|
|
||||||
|
|
||||||
```http
|
|
||||||
PUT http://127.0.0.1:8503/v1/measurement/value?datetime=2025-09-26T16%3A39&key=battery1-soc-factor&value=0.57
|
|
||||||
PUT http://127.0.0.1:8503/v1/measurement/value?datetime=2025-09-26T16%3A39&key=ev11-soc-factor&value=0.22
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Equivalent curl commands
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -X PUT "http://127.0.0.1:8503/v1/measurement/value?datetime=2025-09-26T16%3A39&key=battery1-soc-factor&value=0.57"
|
|
||||||
curl -X PUT "http://127.0.0.1:8503/v1/measurement/value?datetime=2025-09-26T16%3A39&key=ev11-soc-factor&value=0.22"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example: Supplying Load Data
|
|
||||||
|
|
||||||
To provide your actual load measurements in Akkudoktor-EOS:
|
|
||||||
|
|
||||||
1. **Configure the measurement keys** for your load energy meters. For example:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"measurements": {
|
|
||||||
"load_emr_keys": ["my_load_meter_reading", "my_other_load_meter_reading"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Record your meter readings** to these keys.
|
|
||||||
|
|
||||||
- Enter the values exactly as your energy meters report them, in **kWh**.
|
|
||||||
- Use the same approach as when supplying battery or EV SoC data.
|
|
||||||
@@ -1,22 +1,12 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
# `POST /optimize` Optimization
|
# Optimization
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
The `POST /optimize` API endpoint optimizes your energy management system based on various inputs
|
The `POST /optimize` API endpoint optimizes your energy management system based on various inputs
|
||||||
including electricity prices, battery storage capacity, PV forecast, and temperature data.
|
including electricity prices, battery storage capacity, PV forecast, and temperature data.
|
||||||
|
|
||||||
The `POST /optimize` optimization interface is the "classical" interface developed by Andreas at the
|
|
||||||
start of the projects and used and described in his videos. It allows and requires to define all the
|
|
||||||
optimization paramters on the endpoint request.
|
|
||||||
|
|
||||||
:::{admonition} Warning
|
|
||||||
:class: warning
|
|
||||||
The `POST /optimize` endpoint interface does not regard configurations set for the parameters
|
|
||||||
passed to the request. You have to set the parameters even if given in the configuration.
|
|
||||||
:::
|
|
||||||
|
|
||||||
## Input Payload
|
## Input Payload
|
||||||
|
|
||||||
### Sample Request
|
### Sample Request
|
||||||
@@ -24,71 +14,34 @@ passed to the request. You have to set the parameters even if given in the confi
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"ems": {
|
"ems": {
|
||||||
"preis_euro_pro_wh_akku": 0.0001,
|
"preis_euro_pro_wh_akku": 0.0007,
|
||||||
"einspeiseverguetung_euro_pro_wh": [
|
"einspeiseverguetung_euro_pro_wh": 0.00007,
|
||||||
0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007,
|
"gesamtlast": [500, 500, ..., 500, 500],
|
||||||
0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007,
|
"pv_prognose_wh": [300, 0, 0, ..., 2160, 1840],
|
||||||
0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007,
|
"strompreis_euro_pro_wh": [0.0003784, 0.0003868, ..., 0.00034102, 0.00033709]
|
||||||
0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007,
|
|
||||||
0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007,
|
|
||||||
0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007,
|
|
||||||
0.00007, 0.00007, 0.00007, 0.00007, 0.00007, 0.00007
|
|
||||||
],
|
|
||||||
"gesamtlast": [
|
|
||||||
676.71, 876.19, 527.13, 468.88, 531.38, 517.95, 483.15, 472.28,
|
|
||||||
1011.68, 995.00, 1053.07, 1063.91, 1320.56, 1132.03, 1163.67,
|
|
||||||
1176.82, 1216.22, 1103.78, 1129.12, 1178.71, 1050.98, 988.56, 912.38,
|
|
||||||
704.61, 516.37, 868.05, 694.34, 608.79, 556.31, 488.89, 506.91,
|
|
||||||
804.89, 1141.98, 1056.97, 992.46, 1155.99, 827.01, 1257.98, 1232.67,
|
|
||||||
871.26, 860.88, 1158.03, 1222.72, 1221.04, 949.99, 987.01, 733.99,
|
|
||||||
592.97
|
|
||||||
],
|
|
||||||
"pv_prognose_wh": [
|
|
||||||
0, 0, 0, 0, 0, 0, 0, 8.05, 352.91, 728.51, 930.28, 1043.25, 1106.74,
|
|
||||||
1161.69, 6018.82, 5519.07, 3969.88, 3017.96, 1943.07, 1007.17,
|
|
||||||
319.67, 7.88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.04, 335.59, 705.32,
|
|
||||||
1121.12, 1604.79, 2157.38, 1433.25, 5718.49, 4553.96, 3027.55,
|
|
||||||
2574.46, 1720.4, 963.4, 383.3, 0, 0, 0
|
|
||||||
],
|
|
||||||
"strompreis_euro_pro_wh": [
|
|
||||||
0.0003384, 0.0003318, 0.0003284, 0.0003283, 0.0003289, 0.0003334,
|
|
||||||
0.0003290, 0.0003302, 0.0003042, 0.0002430, 0.0002280, 0.0002212,
|
|
||||||
0.0002093, 0.0001879, 0.0001838, 0.0002004, 0.0002198, 0.0002270,
|
|
||||||
0.0002997, 0.0003195, 0.0003081, 0.0002969, 0.0002921, 0.0002780,
|
|
||||||
0.0003384, 0.0003318, 0.0003284, 0.0003283, 0.0003289, 0.0003334,
|
|
||||||
0.0003290, 0.0003302, 0.0003042, 0.0002430, 0.0002280, 0.0002212,
|
|
||||||
0.0002093, 0.0001879, 0.0001838, 0.0002004, 0.0002198, 0.0002270,
|
|
||||||
0.0002997, 0.0003195, 0.0003081, 0.0002969, 0.0002921, 0.0002780
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"pv_akku": {
|
"pv_akku": {
|
||||||
"device_id": "battery1",
|
"capacity_wh": 12000,
|
||||||
"capacity_wh": 26400,
|
"charging_efficiency": 0.92,
|
||||||
"max_charge_power_w": 5000,
|
"discharging_efficiency": 0.92,
|
||||||
"initial_soc_percentage": 80,
|
"max_charge_power_w": 5700,
|
||||||
"min_soc_percentage": 15
|
"initial_soc_percentage": 66,
|
||||||
|
"min_soc_percentage": 5,
|
||||||
|
"max_soc_percentage": 100
|
||||||
},
|
},
|
||||||
"inverter": {
|
"inverter": {
|
||||||
"device_id": "inverter1",
|
"max_power_wh": 15500
|
||||||
"max_power_wh": 10000,
|
|
||||||
"battery_id": "battery1"
|
|
||||||
},
|
},
|
||||||
"eauto": {
|
"eauto": {
|
||||||
"device_id": "ev1",
|
"capacity_wh": 64000,
|
||||||
"capacity_wh": 60000,
|
"charging_efficiency": 0.88,
|
||||||
"charging_efficiency": 0.95,
|
"discharging_efficiency": 0.88,
|
||||||
"charge_rates": [0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0],
|
|
||||||
"discharging_efficiency": 1.0,
|
|
||||||
"max_charge_power_w": 11040,
|
"max_charge_power_w": 11040,
|
||||||
"initial_soc_percentage": 54,
|
"initial_soc_percentage": 98,
|
||||||
"min_soc_percentage": 0
|
"min_soc_percentage": 60,
|
||||||
|
"max_soc_percentage": 100
|
||||||
},
|
},
|
||||||
"temperature_forecast": [
|
"temperature_forecast": [18.3, 18, ..., 20.16, 19.84],
|
||||||
18.3, 17.8, 16.9, 16.2, 15.6, 15.1, 14.6, 14.2, 14.3, 14.8, 15.7, 16.7, 17.4,
|
|
||||||
18.0, 18.6, 19.2, 19.1, 18.7, 18.5, 17.7, 16.2, 14.6, 13.6, 13.0, 12.6, 12.2,
|
|
||||||
11.7, 11.6, 11.3, 11.0, 10.7, 10.2, 11.4, 14.4, 16.4, 18.3, 19.5, 20.7, 21.9,
|
|
||||||
22.7, 23.1, 23.1, 22.8, 21.8, 20.2, 19.1, 18.0, 17.4
|
|
||||||
],
|
|
||||||
"start_solution": null
|
"start_solution": null
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -101,8 +54,7 @@ passed to the request. You have to set the parameters even if given in the confi
|
|||||||
|
|
||||||
- Unit: €/Wh
|
- Unit: €/Wh
|
||||||
- Purpose: Represents the residual value of energy stored in the battery
|
- Purpose: Represents the residual value of energy stored in the battery
|
||||||
- Impact: Lower values encourage battery depletion, higher values preserve charge at the end of the
|
- Impact: Lower values encourage battery depletion, higher values preserve charge at the end of the simulation.
|
||||||
simulation.
|
|
||||||
|
|
||||||
#### Feed-in Tariff (`einspeiseverguetung_euro_pro_wh`)
|
#### Feed-in Tariff (`einspeiseverguetung_euro_pro_wh`)
|
||||||
|
|
||||||
@@ -143,7 +95,6 @@ Verify prices against your local tariffs.
|
|||||||
|
|
||||||
#### Configuration
|
#### Configuration
|
||||||
|
|
||||||
- `device_id`: ID of battery
|
|
||||||
- `capacity_wh`: Total battery capacity in Wh
|
- `capacity_wh`: Total battery capacity in Wh
|
||||||
- `charging_efficiency`: Charging efficiency (0-1)
|
- `charging_efficiency`: Charging efficiency (0-1)
|
||||||
- `discharging_efficiency`: Discharging efficiency (0-1)
|
- `discharging_efficiency`: Discharging efficiency (0-1)
|
||||||
@@ -157,13 +108,10 @@ Verify prices against your local tariffs.
|
|||||||
|
|
||||||
### Inverter
|
### Inverter
|
||||||
|
|
||||||
- `device_id`: ID of inverter
|
|
||||||
- `max_power_wh`: Maximum inverter power in Wh
|
- `max_power_wh`: Maximum inverter power in Wh
|
||||||
- `battery_id`: ID of battery
|
|
||||||
|
|
||||||
### Electric Vehicle (EV)
|
### Electric Vehicle (EV)
|
||||||
|
|
||||||
- `device_id`: ID of electric vehicle
|
|
||||||
- `capacity_wh`: Battery capacity in Wh
|
- `capacity_wh`: Battery capacity in Wh
|
||||||
- `charging_efficiency`: Charging efficiency (0-1)
|
- `charging_efficiency`: Charging efficiency (0-1)
|
||||||
- `discharging_efficiency`: Discharging efficiency (0-1)
|
- `discharging_efficiency`: Discharging efficiency (0-1)
|
||||||
@@ -206,24 +154,23 @@ Verify prices against your local tariffs.
|
|||||||
|
|
||||||
#### Battery Control
|
#### Battery Control
|
||||||
|
|
||||||
- `ac_charge`: Grid charging schedule (0.0-1.0)
|
- `ac_charge`: Grid charging schedule (0-1)
|
||||||
- `dc_charge`: DC charging schedule (0-1)
|
- `dc_charge`: DC charging schedule (0-1)
|
||||||
- `discharge_allowed`: Discharge permission (0 or 1)
|
- `discharge_allowed`: Discharge permission (0 or 1)
|
||||||
|
|
||||||
0 (no charge)
|
0 (no charge)
|
||||||
1 (charge with full load)
|
1 (charge with full load)
|
||||||
|
|
||||||
`ac_charge` multiplied by the maximum charge power of the battery results in the planned charging
|
`ac_charge` multiplied by the maximum charge power of the battery results in the planned charging power.
|
||||||
power.
|
|
||||||
|
|
||||||
#### EV Charging
|
#### EV Charging
|
||||||
|
|
||||||
- `eautocharge_hours_float`: EV charging schedule (0.0-1.0)
|
- `eautocharge_hours_float`: EV charging schedule (0-1)
|
||||||
|
|
||||||
#### Results
|
#### Results
|
||||||
|
|
||||||
The `result` object contains detailed information about the optimization outcome. The length of the
|
The `result` object contains detailed information about the optimization outcome.
|
||||||
array is between 25 and 48 and starts at the current hour and ends at 23:00 tomorrow.
|
The length of the array is between 25 and 48 and starts at the current hour and ends at 23:00 tomorrow.
|
||||||
|
|
||||||
- `Last_Wh_pro_Stunde`: Array of hourly load values in Wh
|
- `Last_Wh_pro_Stunde`: Array of hourly load values in Wh
|
||||||
- Shows the total energy consumption per hour
|
- Shows the total energy consumption per hour
|
||||||
@@ -8,26 +8,22 @@ optimization is executed. In EOS, a standard set of predictions is managed, incl
|
|||||||
|
|
||||||
- Household Load Prediction
|
- Household Load Prediction
|
||||||
- Electricity Price Prediction
|
- Electricity Price Prediction
|
||||||
- Feed In Tariff Prediction
|
|
||||||
- PV Power Prediction
|
- PV Power Prediction
|
||||||
- Weather Prediction
|
- Weather Prediction
|
||||||
|
|
||||||
## Storing Predictions
|
## Storing Predictions
|
||||||
|
|
||||||
EOS stores predictions in a **key-value store**, where the term `prediction key` refers to the
|
EOS stores predictions in a **key-value store**, where the term `prediction key` refers to the
|
||||||
unique key used to retrieve specific prediction data.
|
unique key used to retrieve specific prediction data. The key-value store is in memory. Stored
|
||||||
|
data is lost on re-start of the EOS REST server.
|
||||||
|
|
||||||
## Prediction Providers
|
## Prediction Providers
|
||||||
|
|
||||||
Most predictions can be sourced from various providers. The specific provider to use is configured
|
Most predictions can be sourced from various providers. The specific provider to use is configured
|
||||||
in the EOS configuration and can be set by prediction type. For example:
|
in the EOS configuration. For example:
|
||||||
|
|
||||||
```json
|
```python
|
||||||
{
|
weather_provider = "ClearOutside"
|
||||||
"weather": {
|
|
||||||
"provider": "ClearOutside"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Some providers offer multiple prediction keys. For instance, a weather provider might provide data
|
Some providers offer multiple prediction keys. For instance, a weather provider might provide data
|
||||||
@@ -48,7 +44,7 @@ The prediction data must be provided in one of the following formats:
|
|||||||
|
|
||||||
A dictionary with the following structure:
|
A dictionary with the following structure:
|
||||||
|
|
||||||
```json
|
```python
|
||||||
{
|
{
|
||||||
"start_datetime": "2024-01-01 00:00:00",
|
"start_datetime": "2024-01-01 00:00:00",
|
||||||
"interval": "1 Hour",
|
"interval": "1 Hour",
|
||||||
@@ -78,7 +74,7 @@ predictions are adjusted by real data from your system's measurements if given t
|
|||||||
|
|
||||||
For example, the load prediction provider `LoadAkkudoktor` takes generic load data assembled by
|
For example, the load prediction provider `LoadAkkudoktor` takes generic load data assembled by
|
||||||
Akkudoktor.net, maps that to the yearly energy consumption given in the configuration option
|
Akkudoktor.net, maps that to the yearly energy consumption given in the configuration option
|
||||||
`loadakkudoktor_year_energy`, and finally adjusts the predicted load by the `loads`
|
`loadakkudoktor_year_energy`, and finally adjusts the predicted load by the `measurement_loads`
|
||||||
of your system.
|
of your system.
|
||||||
|
|
||||||
## Prediction Updates
|
## Prediction Updates
|
||||||
@@ -114,45 +110,23 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `elecprice`: Electricity price configuration.
|
- `elecprice_provider`: Electricity price provider id of provider to be used.
|
||||||
|
|
||||||
- `provider`: Electricity price provider id of provider to be used.
|
- `ElecPriceAkkudoktor`: Retrieves from Akkudoktor.net.
|
||||||
|
- `ElecPriceImport`: Imports from a file or JSON string.
|
||||||
|
|
||||||
- `ElecPriceAkkudoktor`: Retrieves from Akkudoktor.net.
|
- `elecprice_charges_kwh`: Electricity price charges (€/kWh).
|
||||||
- `ElecPriceEnergyCharts`: Retrieves from Energy-Charts.info.
|
- `elecpriceimport_file_path`: Path to the file to import electricity price forecast data from.
|
||||||
- `ElecPriceImport`: Imports from a file or JSON string.
|
- `elecpriceimport_json`: JSON string, dictionary of electricity price forecast value lists.
|
||||||
|
|
||||||
- `charges_kwh`: Electricity price charges (€/kWh).
|
|
||||||
- `vat_rate`: VAT rate factor applied to electricity price when charges are used (default: 1.19).
|
|
||||||
- `provider_settings.import_file_path`: Path to the file to import electricity price forecast data from.
|
|
||||||
- `provider_settings.import_json`: JSON string, dictionary of electricity price forecast value lists.
|
|
||||||
|
|
||||||
### ElecPriceAkkudoktor Provider
|
### ElecPriceAkkudoktor Provider
|
||||||
|
|
||||||
The `ElecPriceAkkudoktor` provider retrieves electricity prices directly from **Akkudoktor.net**,
|
The `ElecPriceAkkudoktor` provider retrieves electricity prices directly from **Akkudoktor.net**,
|
||||||
which supplies price data for the next 24 hours. For periods beyond 24 hours, the provider generates
|
which supplies price data for the next 24 hours. For periods beyond 24 hours, the provider generates
|
||||||
prices by extrapolating historical price data combined with the most recent actual prices obtained
|
prices by extrapolating historical price data combined with the most recent actual prices obtained
|
||||||
from Akkudoktor.net. Electricity price charges given in the `charges_kwh` configuration
|
from Akkudoktor.net. Electricity price charges given in the `elecprice_charges_kwh` configuration
|
||||||
option are added.
|
option are added.
|
||||||
|
|
||||||
### ElecPriceEnergyCharts Provider
|
|
||||||
|
|
||||||
The `ElecPriceEnergyCharts` provider retrieves day-ahead electricity market prices from
|
|
||||||
[Energy-Charts.info](https://www.Energy-Charts.info). It supports both short-term and extended forecasting by combining
|
|
||||||
real-time market data with historical price trends.
|
|
||||||
|
|
||||||
- For the next 24 hours, market prices are fetched directly from Energy-Charts.info.
|
|
||||||
- For periods beyond 24 hours, prices are estimated using extrapolation based on historical data and the latest
|
|
||||||
available market values.
|
|
||||||
|
|
||||||
Charges and VAT
|
|
||||||
|
|
||||||
- If `charges_kwh` configuration option is greater than 0, the electricity price is calculated as:
|
|
||||||
`(market price + charges_kwh) * vat_rate` where `vat_rate` is configurable (default: 1.19 for 19% VAT).
|
|
||||||
- If `charges_kwh` is set to 0, the electricity price is simply: `market_price` (no VAT applied).
|
|
||||||
|
|
||||||
**Note:** For the most accurate forecasts, it is recommended to set the `historic_hours` parameter to 840.
|
|
||||||
|
|
||||||
### ElecPriceImport Provider
|
### ElecPriceImport Provider
|
||||||
|
|
||||||
The `ElecPriceImport` provider is designed to import electricity prices from a file or a JSON
|
The `ElecPriceImport` provider is designed to import electricity prices from a file or a JSON
|
||||||
@@ -164,96 +138,33 @@ The prediction key for the electricity price forecast data is:
|
|||||||
- `elecprice_marketprice_wh`: Electricity market price per Wh (€/Wh).
|
- `elecprice_marketprice_wh`: Electricity market price per Wh (€/Wh).
|
||||||
|
|
||||||
The electricity proce forecast data must be provided in one of the formats described in
|
The electricity proce forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source can be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`import_file_path` or `import_json` configuration option.
|
`elecpriceimport_file_path` or `elecpriceimport_json` configuration option.
|
||||||
|
|
||||||
The data may additionally or solely be provided by the
|
|
||||||
**PUT** `/v1/prediction/import/ElecPriceImport` endpoint.
|
|
||||||
|
|
||||||
## Feed In Tariff Prediction
|
|
||||||
|
|
||||||
Prediction keys:
|
|
||||||
|
|
||||||
- `feed_in_tarif_wh`: Feed in tarif per Wh (€/Wh).
|
|
||||||
- `feed_in_tarif_kwh`: Feed in tarif per kWh (€/kWh)
|
|
||||||
|
|
||||||
Configuration options:
|
|
||||||
|
|
||||||
- `feedintarif`: Feed in tariff configuration.
|
|
||||||
|
|
||||||
- `provider`: Feed in tariff provider id of provider to be used.
|
|
||||||
|
|
||||||
- `FeedInTariffFixed`: Provides fixed feed in tariff values.
|
|
||||||
- `FeedInTariffImport`: Imports from a file or JSON string.
|
|
||||||
|
|
||||||
- `provider_settings.feed_in_tariff_kwh`: Fixed feed in tariff (€/kWh).
|
|
||||||
- `provider_settings.import_file_path`: Path to the file to import feed in tariff forecast data from.
|
|
||||||
- `provider_settings.import_json`: JSON string, dictionary of feed in tariff value lists.
|
|
||||||
|
|
||||||
## Load Prediction
|
## Load Prediction
|
||||||
|
|
||||||
Prediction keys:
|
Prediction keys:
|
||||||
|
|
||||||
- `loadforecast_power_w`: Predicted load mean value (W).
|
- `load_mean`: Predicted load mean value (W).
|
||||||
- `load_std`: Predicted load standard deviation (W).
|
- `load_std`: Predicted load standard deviation (W).
|
||||||
- `load_mean_adjusted`: Predicted load mean value adjusted by load measurement (W).
|
- `load_mean_adjusted`: Predicted load mean value adjusted by load measurement (W).
|
||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `load`: Load configuration.
|
- `load_provider`: Load provider id of provider to be used.
|
||||||
|
|
||||||
- `provider`: Load provider id of provider to be used.
|
- `LoadAkkudoktor`: Retrieves from local database.
|
||||||
|
- `LoadImport`: Imports from a file or JSON string.
|
||||||
|
|
||||||
- `LoadAkkudoktor`: Retrieves from local database.
|
- `loadakkudoktor_year_energy`: Yearly energy consumption (kWh).
|
||||||
- `LoadVrm`: Retrieves data from the VRM API by Victron Energy.
|
- `loadimport_file_path`: Path to the file to import load forecast data from.
|
||||||
- `LoadImport`: Imports from a file or JSON string.
|
- `loadimport_json`: JSON string, dictionary of load forecast value lists.
|
||||||
|
|
||||||
- `provider_settings.LoadAkkudoktor.loadakkudoktor_year_energy_kwh`: Yearly energy consumption (kWh).
|
|
||||||
- `provider_settings.LoadVRM.load_vrm_token`: API token.
|
|
||||||
- `provider_settings.LoadVRM.load_vrm_idsite`: load_vrm_idsite.
|
|
||||||
- `provider_settings.LoadImport.loadimport_file_path`: Path to the file to import load forecast data from.
|
|
||||||
- `provider_settings.LoadImport.loadimport_json`: JSON string, dictionary of load forecast value lists.
|
|
||||||
|
|
||||||
### LoadAkkudoktor Provider
|
### LoadAkkudoktor Provider
|
||||||
|
|
||||||
The `LoadAkkudoktor` provider retrieves generic load data from the local database and scales
|
The `LoadAkkudoktor` provider retrieves generic load data from a local database and tailors it to
|
||||||
it to match the annual energy consumption specified in the
|
align with the annual energy consumption specified in the `loadakkudoktor_year_energy` configuration
|
||||||
`LoadAkkudoktor.loadakkudoktor_year_energy` configuration option.
|
option.
|
||||||
|
|
||||||
### LoadAkkudoktorAdjusted Provider
|
|
||||||
|
|
||||||
The `LoadAkkudoktorAdjusted` provider retrieves generic load data from the local database and scales
|
|
||||||
it to match the annual energy consumption specified in the
|
|
||||||
`LoadAkkudoktor.loadakkudoktor_year_energy` configuration option. In addition, the provider refines
|
|
||||||
the forecast by incorporating available measured load data, ensuring a more realistic and
|
|
||||||
site-specific consumption profile.
|
|
||||||
|
|
||||||
For details on how to supply load measurements, see the [Measurements](measurement-page) section.
|
|
||||||
|
|
||||||
### LoadVrm Provider
|
|
||||||
|
|
||||||
The `LoadVrm` provider retrieves load forecast data from the VRM API by Victron Energy.
|
|
||||||
To receive forecasts, the system data must be configured under Dynamic ESS in the VRM portal.
|
|
||||||
To query the forecasts, an API token is required, which can also be created in the VRM portal under Preferences.
|
|
||||||
This token must be stored in the EOS configuration along with the VRM-Installations-ID.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"load": {
|
|
||||||
"provider": "LoadVrm",
|
|
||||||
"provider_settings": {
|
|
||||||
"LoadVRM": {
|
|
||||||
"load_vrm_token": "dummy-token",
|
|
||||||
"load_vrm_idsite": 12345
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The prediction keys for the load forecast data are:
|
|
||||||
|
|
||||||
- `load_mean`: Predicted load mean value (W).
|
|
||||||
|
|
||||||
### LoadImport Provider
|
### LoadImport Provider
|
||||||
|
|
||||||
@@ -268,12 +179,9 @@ The prediction keys for the load forecast data are:
|
|||||||
- `load_mean_adjusted`: Predicted load mean value adjusted by load measurement (W).
|
- `load_mean_adjusted`: Predicted load mean value adjusted by load measurement (W).
|
||||||
|
|
||||||
The load forecast data must be provided in one of the formats described in
|
The load forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source can be given in the `loadimport_file_path`
|
<project:#prediction-import-providers>. The data source must be given in the `loadimport_file_path`
|
||||||
or `loadimport_json` configuration option.
|
or `loadimport_json` configuration option.
|
||||||
|
|
||||||
The data may additionally or solely be provided by the
|
|
||||||
**PUT** `/v1/prediction/import/LoadImport` endpoint.
|
|
||||||
|
|
||||||
## PV Power Prediction
|
## PV Power Prediction
|
||||||
|
|
||||||
Prediction keys:
|
Prediction keys:
|
||||||
@@ -283,52 +191,48 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `general`: General configuration.
|
- `pvforecast_provider`: PVForecast provider id of provider to be used.
|
||||||
|
|
||||||
- `latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
- `PVForecastAkkudoktor`: Retrieves from Akkudoktor.net.
|
||||||
- `longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
- `PVForecastImport`: Imports from a file or JSON string.
|
||||||
|
|
||||||
- `pvforecast`: PV forecast configuration.
|
- `latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
||||||
|
- `longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
||||||
- `provider`: PVForecast provider id of provider to be used.
|
- `pvforecast<0..5>_surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
||||||
|
- `pvforecast<0..5>_surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
||||||
- `PVForecastAkkudoktor`: Retrieves from Akkudoktor.net.
|
Clockwise from north (north=0, east=90, south=180, west=270).
|
||||||
- `PVForecastVrm`: Retrieves data from the VRM API by Victron Energy.
|
- `pvforecast<0..5>_userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
||||||
- `PVForecastImport`: Imports from a file or JSON string.
|
- `pvforecast<0..5>_peakpower`: Nominal power of PV system in kW.
|
||||||
|
- `pvforecast<0..5>_pvtechchoice`: PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
||||||
- `planes[].surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
- `pvforecast<0..5>_mountingplace`: Type of mounting for PV system. Options are 'free' for free-standing
|
||||||
- `planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
and 'building' for building-integrated.
|
||||||
Clockwise from north (north=0, east=90, south=180, west=270).
|
- `pvforecast<0..5>_loss`: Sum of PV system losses in percent
|
||||||
- `planes[].userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
- `pvforecast<0..5>_trackingtype`: Type of suntracking. 0=fixed,
|
||||||
- `planes[].peakpower`: Nominal power of PV system in kW.
|
1=single horizontal axis aligned north-south,
|
||||||
- `planes[].pvtechchoice`: PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
2=two-axis tracking,
|
||||||
- `planes[].mountingplace`: Type of mounting for PV system.
|
3=vertical axis tracking,
|
||||||
Options are 'free' for free-standing and 'building' for building-integrated.
|
4=single horizontal axis aligned east-west,
|
||||||
- `planes[].loss`: Sum of PV system losses in percent
|
5=single inclined axis aligned north-south.
|
||||||
- `planes[].trackingtype`: Type of suntracking.
|
- `pvforecast<0..5>_optimal_surface_tilt`: Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
||||||
0=fixed,
|
- `pvforecast<0..5>_optimalangles`: Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
||||||
1=single horizontal axis aligned north-south,
|
- `pvforecast<0..5>_albedo`: Proportion of the light hitting the ground that it reflects back.
|
||||||
2=two-axis tracking,
|
- `pvforecast<0..5>_module_model`: Model of the PV modules of this plane.
|
||||||
3=vertical axis tracking,
|
- `pvforecast<0..5>_inverter_model`: Model of the inverter of this plane.
|
||||||
4=single horizontal axis aligned east-west,
|
- `pvforecast<0..5>_inverter_paco`: AC power rating of the inverter. [W]
|
||||||
5=single inclined axis aligned north-south.
|
- `pvforecast<0..5>_modules_per_string`: Number of the PV modules of the strings of this plane.
|
||||||
- `planes[].optimal_surface_tilt`: Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
- `pvforecast<0..5>_strings_per_inverter`: Number of the strings of the inverter of this plane.
|
||||||
- `planes[].optimalangles`: Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
- `pvforecastimport_file_path`: Path to the file to import PV forecast data from.
|
||||||
- `planes[].albedo`: Proportion of the light hitting the ground that it reflects back.
|
- `pvforecastimport_json`: JSON string, dictionary of PV forecast value lists.
|
||||||
- `planes[].module_model`: Model of the PV modules of this plane.
|
|
||||||
- `planes[].inverter_model`: Model of the inverter of this plane.
|
|
||||||
- `planes[].inverter_paco`: AC power rating of the inverter. [W]
|
|
||||||
- `planes[].modules_per_string`: Number of the PV modules of the strings of this plane.
|
|
||||||
- `planes[].strings_per_inverter`: Number of the strings of the inverter of this plane.
|
|
||||||
- `provider_settings.import_file_path`: Path to the file to import PV forecast data from.
|
|
||||||
- `provider_settings.import_json`: JSON string, dictionary of PV forecast value lists.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
Detailed definitions taken from
|
Some of the configuration options directly follow the
|
||||||
[PVGIS](https://joint-research-centre.ec.europa.eu/photovoltaic-geographical-information-system-pvgis/getting-started-pvgis/pvgis-user-manual_en).
|
[PVGIS](https://joint-research-centre.ec.europa.eu/photovoltaic-geographical-information-system-pvgis/getting-started-pvgis/pvgis-user-manual_en)
|
||||||
|
nomenclature.
|
||||||
|
|
||||||
- `pvtechchoice`
|
Detailed definitions taken from **PVGIS**:
|
||||||
|
|
||||||
|
- `pvforecast<0..5>_pvtechchoice`
|
||||||
|
|
||||||
The performance of PV modules depends on the temperature and on the solar irradiance, but the exact
|
The performance of PV modules depends on the temperature and on the solar irradiance, but the exact
|
||||||
dependence varies between different types of PV modules. At the moment we can estimate the losses
|
dependence varies between different types of PV modules. At the moment we can estimate the losses
|
||||||
@@ -347,7 +251,7 @@ variations of the spectrum of sunlight affects the overall energy production fro
|
|||||||
the moment this calculation can be done for crystalline silicon and CdTe modules. Note that this
|
the moment this calculation can be done for crystalline silicon and CdTe modules. Note that this
|
||||||
calculation is not yet available when using the NSRDB solar radiation database.
|
calculation is not yet available when using the NSRDB solar radiation database.
|
||||||
|
|
||||||
- `peakpower`
|
- `pvforecast<0..5>_peakpower`
|
||||||
|
|
||||||
This is the power that the manufacturer declares that the PV array can produce under standard test
|
This is the power that the manufacturer declares that the PV array can produce under standard test
|
||||||
conditions (STC), which are a constant 1000W of solar irradiation per square meter in the plane of
|
conditions (STC), which are a constant 1000W of solar irradiation per square meter in the plane of
|
||||||
@@ -363,7 +267,7 @@ value and the bifaciality factor, φ (if reported in the module data sheet) as:
|
|||||||
P_BNPI = P_STC \* (1 + φ \* 0.135). NB this bifacial approach is not appropriate for BAPV or BIPV
|
P_BNPI = P_STC \* (1 + φ \* 0.135). NB this bifacial approach is not appropriate for BAPV or BIPV
|
||||||
installations or for modules mounting on a N-S axis i.e. facing E-W.
|
installations or for modules mounting on a N-S axis i.e. facing E-W.
|
||||||
|
|
||||||
- `loss`
|
- `pvforecast<0..5>_loss`
|
||||||
|
|
||||||
The estimated system losses are all the losses in the system, which cause the power actually
|
The estimated system losses are all the losses in the system, which cause the power actually
|
||||||
delivered to the electricity grid to be lower than the power produced by the PV modules. There are
|
delivered to the electricity grid to be lower than the power produced by the PV modules. There are
|
||||||
@@ -375,7 +279,7 @@ in the first years.
|
|||||||
We have given a default value of 14% for the overall losses. If you have a good idea that your value
|
We have given a default value of 14% for the overall losses. If you have a good idea that your value
|
||||||
will be different (maybe due to a really high-efficiency inverter) you may reduce this value a little.
|
will be different (maybe due to a really high-efficiency inverter) you may reduce this value a little.
|
||||||
|
|
||||||
- `mountingplace`
|
- `pvforecast<0..5>_mountingplace`
|
||||||
|
|
||||||
For fixed (non-tracking) systems, the way the modules are mounted will have an influence on the
|
For fixed (non-tracking) systems, the way the modules are mounted will have an influence on the
|
||||||
temperature of the module, which in turn affects the efficiency. Experiments have shown that if the
|
temperature of the module, which in turn affects the efficiency. Experiments have shown that if the
|
||||||
@@ -391,7 +295,7 @@ Some types of mounting are in between these two extremes, for instance if the mo
|
|||||||
a roof with curved roof tiles, allowing air to move behind the modules. In such cases, the
|
a roof with curved roof tiles, allowing air to move behind the modules. In such cases, the
|
||||||
performance will be somewhere between the results of the two calculations that are possible here.
|
performance will be somewhere between the results of the two calculations that are possible here.
|
||||||
|
|
||||||
- `userhorizon`
|
- `pvforecast<0..5>_userhorizon`
|
||||||
|
|
||||||
Elevation of horizon in degrees, at equally spaced azimuth clockwise from north. In the user horizon
|
Elevation of horizon in degrees, at equally spaced azimuth clockwise from north. In the user horizon
|
||||||
data each number represents the horizon height in degrees in a certain compass direction around the
|
data each number represents the horizon height in degrees in a certain compass direction around the
|
||||||
@@ -408,11 +312,11 @@ Most of the configuration options are in line with the
|
|||||||
|
|
||||||
Detailed definitions from **PVLib** for PVGIS data.
|
Detailed definitions from **PVLib** for PVGIS data.
|
||||||
|
|
||||||
- `surface_tilt`:
|
- `pvforecast<0..5>_surface_tilt`:
|
||||||
|
|
||||||
Tilt angle from horizontal plane.
|
Tilt angle from horizontal plane.
|
||||||
|
|
||||||
- `surface_azimuth`
|
- `pvforecast<0..5>_surface_azimuth`
|
||||||
|
|
||||||
Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180,
|
Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180,
|
||||||
west=270). This is offset 180 degrees from the convention used by PVGIS.
|
west=270). This is offset 180 degrees from the convention used by PVGIS.
|
||||||
@@ -424,86 +328,51 @@ west=270). This is offset 180 degrees from the convention used by PVGIS.
|
|||||||
The `PVForecastAkkudoktor` provider retrieves the PV power forecast data directly from
|
The `PVForecastAkkudoktor` provider retrieves the PV power forecast data directly from
|
||||||
**Akkudoktor.net**.
|
**Akkudoktor.net**.
|
||||||
|
|
||||||
The following prediction configuration options of the PV system must be set:
|
The following general configuration options of the PV system must be set:
|
||||||
|
|
||||||
- `general.latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
- `latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
||||||
- `general.longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
- `longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
||||||
|
|
||||||
For each plane of the PV system the following configuration options must be set:
|
For each plane `<0..5>` of the PV system the following configuration options must be set:
|
||||||
|
|
||||||
- `pvforecast.planes[].surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
- `pvforecast<0..5>_surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
||||||
- `pvforecast.planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
- `pvforecast<0..5>_surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
||||||
Clockwise from north (north=0, east=90, south=180, west=270).
|
Clockwise from north (north=0, east=90, south=180, west=270).
|
||||||
- `pvforecast.planes[].userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
- `pvforecast<0..5>_userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
||||||
- `pvforecast.planes[].inverter_paco`: AC power rating of the inverter. [W]
|
- `pvforecast<0..5>_inverter_paco`: AC power rating of the inverter. [W]
|
||||||
- `pvforecast.planes[].peakpower`: Nominal power of PV system in kW.
|
- `pvforecast<0..5>_peakpower`: Nominal power of PV system in kW.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```Python
|
```Python
|
||||||
{
|
{
|
||||||
"general": {
|
"latitude": 50.1234,
|
||||||
"latitude": 50.1234,
|
"longitude": 9.7654,
|
||||||
"longitude": 9.7654,
|
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||||
},
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast": {
|
"pvforecast0_surface_azimuth": -10,
|
||||||
"provider": "PVForecastAkkudoktor",
|
"pvforecast0_surface_tilt": 7,
|
||||||
"planes": [
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
{
|
"pvforecast0_inverter_paco": 10000,
|
||||||
"peakpower": 5.0,
|
"pvforecast1_peakpower": 4.8,
|
||||||
"surface_azimuth": -10,
|
"pvforecast1_surface_azimuth": -90,
|
||||||
"surface_tilt": 7,
|
"pvforecast1_surface_tilt": 7,
|
||||||
"userhorizon": [20, 27, 22, 20],
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
"inverter_paco": 10000
|
"pvforecast1_inverter_paco": 10000,
|
||||||
},
|
"pvforecast2_peakpower": 1.4,
|
||||||
{
|
"pvforecast2_surface_azimuth": -40,
|
||||||
"peakpower": 4.8,
|
"pvforecast2_surface_tilt": 60,
|
||||||
"surface_azimuth": -90,
|
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||||
"surface_tilt": 7,
|
"pvforecast2_inverter_paco": 2000,
|
||||||
"userhorizon": [30, 30, 30, 50],
|
"pvforecast3_peakpower": 1.6,
|
||||||
"inverter_paco": 10000
|
"pvforecast3_surface_azimuth": 5,
|
||||||
},
|
"pvforecast3_surface_tilt": 45,
|
||||||
{
|
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||||
"peakpower": 1.4,
|
"pvforecast3_inverter_paco": 1400,
|
||||||
"surface_azimuth": -40,
|
"pvforecast4_peakpower": None,
|
||||||
"surface_tilt": 60,
|
|
||||||
"userhorizon": [60, 30, 0, 30],
|
|
||||||
"inverter_paco": 2000
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"peakpower": 1.6,
|
|
||||||
"surface_azimuth": 5,
|
|
||||||
"surface_tilt": 45,
|
|
||||||
"userhorizon": [45, 25, 30, 60],
|
|
||||||
"inverter_paco": 1400
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### PVForecastVrm Provider
|
|
||||||
|
|
||||||
The `PVForecastVrm` provider retrieves pv power forecast data from the VRM API by Victron Energy.
|
|
||||||
To receive forecasts, the system data must be configured under Dynamic ESS in the VRM portal.
|
|
||||||
To query the forecasts, an API token is required, which can also be created in the VRM portal under Preferences.
|
|
||||||
This token must be stored in the EOS configuration along with the VRM-Installations-ID.
|
|
||||||
|
|
||||||
```python
|
|
||||||
{
|
|
||||||
"pvforecast": {
|
|
||||||
"provider": "PVForecastVrm",
|
|
||||||
"provider_settings": {
|
|
||||||
"pvforecast_vrm_token": "dummy-token",
|
|
||||||
"pvforecast_vrm_idsite": 12345
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The prediction keys for the PV forecast data are:
|
|
||||||
|
|
||||||
- `pvforecast_dc_power`: Total DC power (W).
|
|
||||||
|
|
||||||
### PVForecastImport Provider
|
### PVForecastImport Provider
|
||||||
|
|
||||||
The `PVForecastImport` provider is designed to import PV forecast data from a file or a JSON
|
The `PVForecastImport` provider is designed to import PV forecast data from a file or a JSON
|
||||||
@@ -512,15 +381,12 @@ becomes available.
|
|||||||
|
|
||||||
The prediction keys for the PV forecast data are:
|
The prediction keys for the PV forecast data are:
|
||||||
|
|
||||||
- `pvforecast_ac_power`: Total AC power (W).
|
- `pvforecast_ac_power`: Total DC power (W).
|
||||||
- `pvforecast_dc_power`: Total DC power (W).
|
- `pvforecast_dc_power`: Total AC power (W).
|
||||||
|
|
||||||
The PV forecast data must be provided in one of the formats described in
|
The PV forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source can be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`import_file_path` or `import_json` configuration option.
|
`pvforecastimport_file_path` or `pvforecastimport_json` configuration option.
|
||||||
|
|
||||||
The data may additionally or solely be provided by the
|
|
||||||
**PUT** `/v1/prediction/import/PVForecastImport` endpoint.
|
|
||||||
|
|
||||||
## Weather Prediction
|
## Weather Prediction
|
||||||
|
|
||||||
@@ -546,21 +412,19 @@ Prediction keys:
|
|||||||
- `weather_temp_air`: Temperature (°C)
|
- `weather_temp_air`: Temperature (°C)
|
||||||
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
||||||
- `weather_visibility`: Visibility (m)
|
- `weather_visibility`: Visibility (m)
|
||||||
- `weather_wind_direction`: Wind Direction (°)
|
- `weather_wind_direction`: "Wind Direction (°)
|
||||||
- `weather_wind_speed`: Wind Speed (kmph)
|
- `weather_wind_speed`: Wind Speed (kmph)
|
||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `weather`: General weather configuration.
|
- `weather_provider`: Load provider id of provider to be used.
|
||||||
|
|
||||||
- `provider`: Load provider id of provider to be used.
|
- `BrightSky`: Retrieves from [BrightSky](https://api.brightsky.dev).
|
||||||
|
- `ClearOutside`: Retrieves from [ClearOutside](https://clearoutside.com/forecast).
|
||||||
|
- `LoadImport`: Imports from a file or JSON string.
|
||||||
|
|
||||||
- `BrightSky`: Retrieves from [BrightSky](https://api.brightsky.dev).
|
- `weatherimport_file_path`: Path to the file to import weatherforecast data from.
|
||||||
- `ClearOutside`: Retrieves from [ClearOutside](https://clearoutside.com/forecast).
|
- `weatherimport_json`: JSON string, dictionary of weather forecast value lists.
|
||||||
- `LoadImport`: Imports from a file or JSON string.
|
|
||||||
|
|
||||||
- `provider_settings.import_file_path`: Path to the file to import weatherforecast data from.
|
|
||||||
- `provider_settings.import_json`: JSON string, dictionary of weather forecast value lists.
|
|
||||||
|
|
||||||
### BrightSky Provider
|
### BrightSky Provider
|
||||||
|
|
||||||
@@ -578,7 +442,7 @@ The provider provides forecast data for the following prediction keys:
|
|||||||
- `weather_temp_air`: Temperature (°C)
|
- `weather_temp_air`: Temperature (°C)
|
||||||
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
||||||
- `weather_visibility`: Visibility (m)
|
- `weather_visibility`: Visibility (m)
|
||||||
- `weather_wind_direction`: Wind Direction (°)
|
- `weather_wind_direction`: "Wind Direction (°)
|
||||||
- `weather_wind_speed`: Wind Speed (kmph)
|
- `weather_wind_speed`: Wind Speed (kmph)
|
||||||
|
|
||||||
### ClearOutside Provider
|
### ClearOutside Provider
|
||||||
@@ -608,7 +472,7 @@ The provider provides forecast data for the following prediction keys:
|
|||||||
- `weather_temp_air`: Temperature (°C)
|
- `weather_temp_air`: Temperature (°C)
|
||||||
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
||||||
- `weather_visibility`: Visibility (m)
|
- `weather_visibility`: Visibility (m)
|
||||||
- `weather_wind_direction`: Wind Direction (°)
|
- `weather_wind_direction`: "Wind Direction (°)
|
||||||
- `weather_wind_speed`: Wind Speed (kmph)
|
- `weather_wind_speed`: Wind Speed (kmph)
|
||||||
|
|
||||||
### WeatherImport Provider
|
### WeatherImport Provider
|
||||||
@@ -617,7 +481,7 @@ The `WeatherImport` provider is designed to import weather forecast data from a
|
|||||||
string. An external entity should update the file or JSON string whenever new prediction data
|
string. An external entity should update the file or JSON string whenever new prediction data
|
||||||
becomes available.
|
becomes available.
|
||||||
|
|
||||||
The prediction keys for the weather forecast data are:
|
The prediction keys for the PV forecast data are:
|
||||||
|
|
||||||
- `weather_dew_point`: Dew Point (°C)
|
- `weather_dew_point`: Dew Point (°C)
|
||||||
- `weather_dhi`: Diffuse Horizontal Irradiance (W/m2)
|
- `weather_dhi`: Diffuse Horizontal Irradiance (W/m2)
|
||||||
@@ -639,12 +503,9 @@ The prediction keys for the weather forecast data are:
|
|||||||
- `weather_temp_air`: Temperature (°C)
|
- `weather_temp_air`: Temperature (°C)
|
||||||
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
- `weather_total_clouds`: Total Clouds (% Sky Obscured)
|
||||||
- `weather_visibility`: Visibility (m)
|
- `weather_visibility`: Visibility (m)
|
||||||
- `weather_wind_direction`: Wind Direction (°)
|
- `weather_wind_direction`: "Wind Direction (°)
|
||||||
- `weather_wind_speed`: Wind Speed (kmph)
|
- `weather_wind_speed`: Wind Speed (kmph)
|
||||||
|
|
||||||
The PV forecast data must be provided in one of the formats described in
|
The PV forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source can be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`import_file_path` or `import_json` configuration option.
|
`weatherimport_file_path` or `pvforecastimport_json` configuration option.
|
||||||
|
|
||||||
The data may additionally or solely be provided by the
|
|
||||||
**PUT** `/v1/prediction/import/WeatherImport` endpoint.
|
|
||||||
|
|||||||
@@ -1,258 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(resource-page)=
|
|
||||||
|
|
||||||
# Resources (Device Simulations)
|
|
||||||
|
|
||||||
## Concepts
|
|
||||||
|
|
||||||
The simulations for resources are leaning on general concepts of the [S2 standard].
|
|
||||||
|
|
||||||
### Control Types
|
|
||||||
|
|
||||||
The control of resources and such what a resource simulation will simulate follows three
|
|
||||||
basic control principles:
|
|
||||||
|
|
||||||
- Operation Mode Based Control (OMBC)
|
|
||||||
- Fill Rate Based Control (FRBC)
|
|
||||||
- Demand Driven Based Control (DDBC)
|
|
||||||
|
|
||||||
Although these control principles differ enough to separate them into three distinct control types,
|
|
||||||
there are some common aspects that make them similar:
|
|
||||||
|
|
||||||
- Operation Modes
|
|
||||||
- Transitions and
|
|
||||||
- Timers.
|
|
||||||
|
|
||||||
The objective for a control type is under which circumstances what things can be adjusted, and what
|
|
||||||
the constraints are for these adjustments. The three control types model a virtual, abstract resource
|
|
||||||
for simulation.
|
|
||||||
|
|
||||||
The abstract resource ignores all details of pyhsical device that are not relevant to energy
|
|
||||||
management. In addition, physical devices have an enormous variety in parameters, sensors, control
|
|
||||||
strategies, concerns, safeguards, and so on. It would be practically impossible to develop a
|
|
||||||
simulation that can
|
|
||||||
understand all the parameters of all the physical devices on the market. By making the resource more
|
|
||||||
abstract, its concepts can be translated to all sorts of physical devices, even though internally
|
|
||||||
they function very differently. As a consequence, it not always possible to make a 100% accurate
|
|
||||||
description of all the behaviors and constraints in these abstractions. But the abstractions used
|
|
||||||
in the control types are quite powerful, and should allow you to come pretty close.
|
|
||||||
|
|
||||||
The control types basically define how the simulated resource can be described. The user in the end
|
|
||||||
selects the proper desciption of a physical device using the configuration options provided for
|
|
||||||
resource simulations. The configuration sets how the simulated resource functions, what it can do and
|
|
||||||
what kind of constraints it has.
|
|
||||||
|
|
||||||
### Resource Simulation
|
|
||||||
|
|
||||||
Based on the description of this virtual resource, the resource simulation can make predictions of
|
|
||||||
what the physical device will do in certain situations, and when it is allowed to execute
|
|
||||||
instructions generated by the optimization as part of the energy management plan evaluation.
|
|
||||||
|
|
||||||
### Resource Status
|
|
||||||
|
|
||||||
Once the physical device has changed it's behavior, the resource simulation should be informed
|
|
||||||
to make the simulation change it's state accordingly.
|
|
||||||
|
|
||||||
The actual state of a pyhsical device may be reported to the resource simulation by the
|
|
||||||
**PUT** `/v1/resource/status` API endpoint.
|
|
||||||
|
|
||||||
## Battery
|
|
||||||
|
|
||||||
There is a wealth of possible battery operation modes:
|
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
|
||||||
| Mode | Purpose / Behavior | Typical Trigger / Context |
|
|
||||||
| ------------------------- | --------------------------------------------------------------------------------------- | ---------------------------------------------------------------- |
|
|
||||||
| **IDLE** | Battery neither charges nor discharges (SOC stable). | No active control objective or power imbalance below thresholds. |
|
|
||||||
| **SELF_CONSUMPTION** | Charge from PV surplus and discharge to cover local load. | PV generation > load (charge) or load > PV (discharge). |
|
|
||||||
| **NON_EXPORT** | Charge from on-site or local surplus with the goal of minimizing or preventing energy export to the external grid. Discharging to the grid is not allowed. | Export limit reached and SOC < SOC_max. |
|
|
||||||
| **PEAK_SHAVING** | Discharge to keep grid import below a target threshold. | Predicted or measured site load exceeds peak limit. |
|
|
||||||
| **GRID_SUPPORT_EXPORT** | Discharge energy to grid for revenue (V2G, wholesale market, flexibility service). | Market or signal permits profitable export. |
|
|
||||||
| **GRID_SUPPORT_IMPORT** | Charge from grid to absorb surplus or provide up-regulation service. | Low-price or grid-support signal detected. |
|
|
||||||
| **FREQUENCY_REGULATION** | Rapid charge/discharge response to grid frequency deviations. | Active participation in frequency control. |
|
|
||||||
| **RAMP_RATE_CONTROL** | Smooth site-level power ramp rates by buffering fluctuations. | Sudden PV/load change exceeding ramp limit. |
|
|
||||||
| **RESERVE_BACKUP** | Maintain SOC ≥ reserve threshold to ensure backup capacity. | Resilience mode active, grid operational. |
|
|
||||||
| **OUTAGE_SUPPLY** | Islanded operation: power local loads using stored energy (and PV if available). | Grid failure detected. |
|
|
||||||
| **FORCED_CHARGE** | Manual or external control command to charge (e.g., pre-event, maintenance). No discharge. | Operator or optimizer command. |
|
|
||||||
| **FORCED_DISCHARGE** | Manual or external control command to discharge. No charge. | Operator or optimizer command. |
|
|
||||||
| **FAULT** | Battery unavailable due to fault, safety, or protection state. | Fault detected (thermal, voltage, comms, etc.). |
|
|
||||||
<!-- pyml enable line-length -->
|
|
||||||
|
|
||||||
The optimization algorithm, the device simulation and the configuration properties only support the
|
|
||||||
most important of these modes.
|
|
||||||
|
|
||||||
### Battery Simulation
|
|
||||||
|
|
||||||
The battery simulation assumes an idealized battery model. Under this model, the battery can be
|
|
||||||
operated in three discrete operation modes with fill rate based control (FRBC):
|
|
||||||
|
|
||||||
| **Operation Mode ID** | **Description** |
|
|
||||||
| ------------------------ | --------------------------------------------------------------------- |
|
|
||||||
| **SELF_CONSUMPTION** | Charge from local surplus and discharge to cover local load. |
|
|
||||||
| **NON_EXPORT** | Charge from local surplus and do not discharge. |
|
|
||||||
| **FORCED_CHARGE** | Charge. |
|
|
||||||
|
|
||||||
The **operation mode factor** (0.0–1.0) specifies the normalized power rate relative to the
|
|
||||||
battery's nominal maximum charge or discharge power. A value of 1.0 corresponds to full-rate
|
|
||||||
charging or discharging, while 0.0 indicates no power transfer. Intermediate values scale the power
|
|
||||||
proportionally.
|
|
||||||
|
|
||||||
The **fill level** (0.0–1.0) specifies the normalized fill level relative to the
|
|
||||||
battery's nominal maximum charge. A value of 1.0 corresponds to full while 0.0 indicates empty.
|
|
||||||
Intermediate values scale the fill level proportionally.
|
|
||||||
|
|
||||||
### Battery Configuration
|
|
||||||
|
|
||||||
### Battery Stati
|
|
||||||
|
|
||||||
To keep the battery simulation in synchonization with the actual stati of the battery the following
|
|
||||||
resource stati may be reported to EOS by the **PUT** `/v1/resource/status` API endpoint.
|
|
||||||
|
|
||||||
#### Battery FRBCActuatorStatus
|
|
||||||
|
|
||||||
The operation mode the battery is currently operated.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "FRBCActuatorStatus",
|
|
||||||
"active_operation_mode_id": "GRID_SUPPORT_IMPORT",
|
|
||||||
"operation_mode_factor": "0.375",
|
|
||||||
"previous_operation_mode_id": "SELF_CONSUMPTION",
|
|
||||||
"transistion_timestamp": "20250725T12:00:12"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Battery FRBCStorageStatus
|
|
||||||
|
|
||||||
The current battery state of charge (SoC).
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "FRBCStorageStatus",
|
|
||||||
"present_fill_level": "0.88"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Battery PowerMeasurement
|
|
||||||
|
|
||||||
The current power that the battery is charged or discharged with \[W\].
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "PowerMeasurement",
|
|
||||||
"measurement_timestamp": "20250725T12:00:12",
|
|
||||||
"values": [
|
|
||||||
{
|
|
||||||
"commodity_quantity": "ELECTRIC.POWER.L1",
|
|
||||||
"value": "887.5"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"commodity_quantity": "ELECTRIC.POWER.L2",
|
|
||||||
"value": "905.5"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"commodity_quantity": "ELECTRIC.POWER.L2",
|
|
||||||
"value": "1100.7"
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For symmetric (or unknown) power distribution:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "PowerMeasurement",
|
|
||||||
"measurement_timestamp": "20250725T12:00:12",
|
|
||||||
"values": [
|
|
||||||
{
|
|
||||||
"commodity_quantity": "ELECTRIC.POWER.3_PHASE_SYM",
|
|
||||||
"value": "1000"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Electric Vehicle
|
|
||||||
|
|
||||||
The electric vehicle is basically a battery with a reduced set of operation modes.
|
|
||||||
|
|
||||||
### Electric Vehicle Instructions
|
|
||||||
|
|
||||||
The electric vehicle control instructions assume an idealized EV battery model. Under this model,
|
|
||||||
the EV battery can be operated in two operation modes:
|
|
||||||
|
|
||||||
| **Operation Mode ID** | **Description** |
|
|
||||||
| --------------------- | ----------------------------------------------------------------------- |
|
|
||||||
| **IDLE** | Battery neither charges nor discharges; holds its state of charge. |
|
|
||||||
| **FORCED_CHARGE** | Charge at a specified power rate up to the allowable maximum. |
|
|
||||||
|
|
||||||
The **operation mode factor** (0.0–1.0) specifies the normalized power rate relative to the
|
|
||||||
battery's nominal maximum charge power. A value of 1.0 corresponds to full-rate charging, while 0.0
|
|
||||||
indicates no power transfer. Intermediate values scale the power proportionally.
|
|
||||||
|
|
||||||
## Home Appliance
|
|
||||||
|
|
||||||
The optimization algorithm supports one start of the home appliance within the optimization
|
|
||||||
horizon.
|
|
||||||
|
|
||||||
### Home Appliance Simulation
|
|
||||||
|
|
||||||
### Home Appliance Configuration
|
|
||||||
|
|
||||||
Home appliance to run within the optimization horizon.
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"device_id": "dishwasher1",
|
|
||||||
"consumption_wh": 2000,
|
|
||||||
"duration_h": 3
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
Home appliance to run within a time window of 5 hours starting at 8:00 every day and another time
|
|
||||||
window of 3 hours starting at 15:00 every day. See
|
|
||||||
[Time Window Sequence Configuration](configtimewindow-page) for more information.
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"device_id": "dishwasher1",
|
|
||||||
"consumption_wh": 2000,
|
|
||||||
"duration_h": 3,
|
|
||||||
"time_windows": {
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "5 hours"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "15:00",
|
|
||||||
"duration": "3 hours"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
:::{admonition} Note
|
|
||||||
:class: note
|
|
||||||
The optimization algorithm always restricts to one start within the optimization horizon per
|
|
||||||
energy management run.
|
|
||||||
:::
|
|
||||||
|
|
||||||
### Home Appliance Instructions
|
|
||||||
|
|
||||||
The home appliance instructions assume an idealized home appliance model. Under this model,
|
|
||||||
the home appliance can be operated in two operation modes:
|
|
||||||
|
|
||||||
| **Operation Mode ID** | **Description** |
|
|
||||||
|-----------------------|-------------------------------------------------------------------------|
|
|
||||||
| **RUN** | The home appliance is started and runs until the end of it's power |
|
|
||||||
| | sequence. |
|
|
||||||
| **IDLE** | The home appliance does not run. |
|
|
||||||
|
|
||||||
The **operation mode factor** (0.0–1.0) is ignored.
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
(server-api-page)=
|
|
||||||
|
|
||||||
# Server API
|
# Server API
|
||||||
|
|
||||||
|
|||||||
@@ -99,7 +99,6 @@ html_theme_options = {
|
|||||||
"logo_only": False,
|
"logo_only": False,
|
||||||
"titles_only": True,
|
"titles_only": True,
|
||||||
}
|
}
|
||||||
html_css_files = ["eos.css"] # Make body size wider
|
|
||||||
|
|
||||||
# -- Options for autodoc -------------------------------------------------
|
# -- Options for autodoc -------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
|
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
```{include} ../../CHANGELOG.md
|
|
||||||
:relative-docs: ../
|
|
||||||
:relative-images:
|
|
||||||
```
|
|
||||||
@@ -1,593 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(develop-page)=
|
|
||||||
|
|
||||||
# Development Guide
|
|
||||||
|
|
||||||
## Development Prerequisites
|
|
||||||
|
|
||||||
Have or
|
|
||||||
[create](https://docs.github.com/en/get-started/start-your-journey/creating-an-account-on-github)
|
|
||||||
a [GitHub](https://github.com/) account.
|
|
||||||
|
|
||||||
Make shure all the source installation prequistes are installed. See the
|
|
||||||
[installation guideline](#install-page) for a detailed list of tools.
|
|
||||||
|
|
||||||
Under Linux the [make](https://www.gnu.org/software/make/manual/make.html) tool should be installed
|
|
||||||
as we have a lot of pre-fabricated commands for it.
|
|
||||||
|
|
||||||
Install your favorite editor or integrated development environment (IDE):
|
|
||||||
|
|
||||||
- Full-Featured IDEs
|
|
||||||
|
|
||||||
- [Eclipse + PyDev](https://www.pydev.org/)
|
|
||||||
- [KDevelop](https://www.kdevelop.org/)
|
|
||||||
- [PyCharm](https://www.jetbrains.com/pycharm/)
|
|
||||||
- ...
|
|
||||||
|
|
||||||
- Code Editors with Python Support
|
|
||||||
|
|
||||||
- [Visual Studio Code (VS Code)](https://code.visualstudio.com/)
|
|
||||||
- [Sublime Text](https://www.sublimetext.com/)
|
|
||||||
- [Atom / Pulsar](https://pulsar-edit.dev/)
|
|
||||||
- ...
|
|
||||||
|
|
||||||
- Python-Focused or Beginner-Friendly IDEs
|
|
||||||
|
|
||||||
- [Spyder](https://www.spyder-ide.org/)
|
|
||||||
- [Thonny](https://thonny.org/)
|
|
||||||
- [IDLE](https://www.python.org/downloads/)
|
|
||||||
- ...
|
|
||||||
|
|
||||||
## Step 1 – Fork the Repository
|
|
||||||
|
|
||||||
[Fork the EOS repository](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo)
|
|
||||||
to your GitHub account.
|
|
||||||
|
|
||||||
Clone your fork locally and add the EOS upstream remote to track updates.
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
git clone https://github.com/<YOURUSERNAME>/EOS.git
|
|
||||||
cd EOS
|
|
||||||
git remote add eos https://github.com/Akkudoktor-EOS/EOS.git
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
git clone https://github.com/<YOURUSERNAME>/EOS.git
|
|
||||||
cd EOS
|
|
||||||
git remote add eos https://github.com/Akkudoktor-EOS/EOS.git
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace `<YOURUSERNAME>` with your GitHub username.
|
|
||||||
|
|
||||||
## Step 2 – Development Setup
|
|
||||||
|
|
||||||
This is recommended for developers who want to modify the source code and test changes locally.
|
|
||||||
|
|
||||||
### Step 2.1 – Create a Virtual Environment
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
python -m venv .venv
|
|
||||||
.venv\Scripts\pip install --upgrade pip
|
|
||||||
.venv\Scripts\pip install -r requirements-dev.txt
|
|
||||||
.venv\Scripts\pip install build
|
|
||||||
.venv\Scripts\pip install -e .
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
python3 -m venv .venv
|
|
||||||
.venv/bin/pip install --upgrade pip
|
|
||||||
.venv/bin/pip install -r requirements-dev.txt
|
|
||||||
.venv/bin/pip install build
|
|
||||||
.venv/bin/pip install -e .
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make install
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 2.2 – Activate the Virtual Environment
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
.venv\Scripts\activate.bat
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
source .venv/bin/activate
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 2.3 - Install pre-commit
|
|
||||||
|
|
||||||
Our code style and commit message checks use [`pre-commit`](https://pre-commit.com).
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
pre-commit install
|
|
||||||
pre-commit install --hook-type commit-msg --hook-type pre-push
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
pre-commit install
|
|
||||||
pre-commit install --hook-type commit-msg --hook-type pre-push
|
|
||||||
```
|
|
||||||
|
|
||||||
## Step 3 - Run EOS
|
|
||||||
|
|
||||||
Make EOS accessible at [http://localhost:8503/docs](http://localhost:8503/docs) and EOSdash at
|
|
||||||
[http://localhost:8504](http://localhost:8504).
|
|
||||||
|
|
||||||
### Option 1 – Using Python Virtual Environment
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
python -m akkudoktoreos.server.eos
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
python -m akkudoktoreos.server.eos
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make run
|
|
||||||
```
|
|
||||||
|
|
||||||
To have full control of the servers during development you may start the servers independently -
|
|
||||||
e.g. in different terminal windows. Don't forget to activate the virtual environment in your
|
|
||||||
terminal window.
|
|
||||||
|
|
||||||
:::{admonition} Note
|
|
||||||
:class: note
|
|
||||||
If you killed or stopped the servers shortly before, the ports may still be occupied by the last
|
|
||||||
processes. It may take more than 60 seconds until the ports are released.
|
|
||||||
:::
|
|
||||||
|
|
||||||
You may add the `--reload true` parameter to have the servers automatically restarted on source code
|
|
||||||
changes. It is best to also add `--startup_eosdash false` to EOS to prevent the automatic restart
|
|
||||||
interfere with the EOS server trying to start EOSdash.
|
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
python -m akkudoktoreos.server.eosdash --host localhost --port 8504 --log_level DEBUG --reload true
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
python -m akkudoktoreos.server.eosdash --host localhost --port 8504 --log_level DEBUG --reload true
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make run-dash-dev
|
|
||||||
```
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
python -m akkudoktoreos.server.eos --host localhost --port 8503 --log_level DEBUG --startup_eosdash false --reload true
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
python -m akkudoktoreos.server.eos --host localhost --port 8503 --log_level DEBUG --startup_eosdash false --reload true
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make run-dev
|
|
||||||
```
|
|
||||||
<!-- pyml enable line-length -->
|
|
||||||
|
|
||||||
### Option 2 – Using Docker
|
|
||||||
|
|
||||||
#### Step 3.1 – Build the Docker Image
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
docker build -t akkudoktoreos .
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
docker build -t akkudoktoreos .
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 3.2 – Run the Container
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
docker run -d `
|
|
||||||
--name akkudoktoreos `
|
|
||||||
-p 8503:8503 `
|
|
||||||
-p 8504:8504 `
|
|
||||||
-e OPENBLAS_NUM_THREADS=1 `
|
|
||||||
-e OMP_NUM_THREADS=1 `
|
|
||||||
-e MKL_NUM_THREADS=1 `
|
|
||||||
-e EOS_SERVER__HOST=0.0.0.0 `
|
|
||||||
-e EOS_SERVER__PORT=8503 `
|
|
||||||
-e EOS_SERVER__EOSDASH_HOST=0.0.0.0 `
|
|
||||||
-e EOS_SERVER__EOSDASH_PORT=8504 `
|
|
||||||
--ulimit nproc=65535:65535 `
|
|
||||||
--ulimit nofile=65535:65535 `
|
|
||||||
--security-opt seccomp=unconfined `
|
|
||||||
akkudoktor-eos:latest
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
docker run -d \
|
|
||||||
--name akkudoktoreos \
|
|
||||||
-p 8503:8503 \
|
|
||||||
-p 8504:8504 \
|
|
||||||
-e OPENBLAS_NUM_THREADS=1 \
|
|
||||||
-e OMP_NUM_THREADS=1 \
|
|
||||||
-e MKL_NUM_THREADS=1 \
|
|
||||||
-e EOS_SERVER__HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__PORT=8503 \
|
|
||||||
-e EOS_SERVER__EOSDASH_HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__EOSDASH_PORT=8504 \
|
|
||||||
--ulimit nproc=65535:65535 \
|
|
||||||
--ulimit nofile=65535:65535 \
|
|
||||||
--security-opt seccomp=unconfined \
|
|
||||||
akkudoktor-eos:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 3.3 – Manage the Container
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
docker logs -f akkudoktoreos
|
|
||||||
docker stop akkudoktoreos
|
|
||||||
docker start akkudoktoreos
|
|
||||||
docker rm -f akkudoktoreos
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
docker logs -f akkudoktoreos
|
|
||||||
docker stop akkudoktoreos
|
|
||||||
docker start akkudoktoreos
|
|
||||||
docker rm -f akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
For detailed Docker instructions, refer to [Installation Guideline](install-page)
|
|
||||||
|
|
||||||
### Step 4 - Create the changes
|
|
||||||
|
|
||||||
#### Step 4.1 - Create a development branch
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout -b <MY_DEVELOPMENT_BRANCH>
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace `<MY_DEVELOPMENT_BRANCH>` with the development branch name. The branch name shall be of the
|
|
||||||
format (feat|fix|chore|docs|refactor|test)/[a-z0-9._-]+, e.g:
|
|
||||||
|
|
||||||
- feat/my_cool_new_feature
|
|
||||||
- fix/this_annoying_bug
|
|
||||||
- ...
|
|
||||||
|
|
||||||
#### Step 4.2 – Edit the sources
|
|
||||||
|
|
||||||
Use your fovourite editor or IDE to edit the sources.
|
|
||||||
|
|
||||||
#### Step 4.3 - Check the source code for correct format
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
pre-commit run --all-files
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
pre-commit run --all-files
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make format
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 4.4 - Test the changes
|
|
||||||
|
|
||||||
At a minimum, you should run the module tests:
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
pytest -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
pytest -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make test
|
|
||||||
```
|
|
||||||
|
|
||||||
You should also run the system tests. These include additional tests that interact with real
|
|
||||||
resources:
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
pytest --system-test -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
pytest --system-test -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make test-system
|
|
||||||
```
|
|
||||||
|
|
||||||
To do profiling use:
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
python tests/single_test_optimization.py --profile
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
python tests/single_test_optimization.py --profile
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make test-profile
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 4.5 - Commit the changes
|
|
||||||
|
|
||||||
Add the changed and new files to the commit.
|
|
||||||
|
|
||||||
Create a commit.
|
|
||||||
|
|
||||||
### Step 5 - Pull request
|
|
||||||
|
|
||||||
Before creating a pull request assure the changes are based on the latest EOS upstream.
|
|
||||||
|
|
||||||
Update your local main branch:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout main
|
|
||||||
git pull eos main
|
|
||||||
```
|
|
||||||
|
|
||||||
Switch back to your local development branch and rebase to main.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout <MY_DEVELOPMENT_BRANCH>
|
|
||||||
git rebase -i main
|
|
||||||
```
|
|
||||||
|
|
||||||
During rebase you can also squash your changes into one (preferred) or a set of commits that have
|
|
||||||
proper commit messages and can easily be reviewed.
|
|
||||||
|
|
||||||
After rebase run the tests once again.
|
|
||||||
|
|
||||||
If everything is ok push the commit(s) to your fork on Github.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git push -f origin
|
|
||||||
```
|
|
||||||
|
|
||||||
If your push by intention does not comply to the rules you can skip the verification by:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git push -f --no-verify origin
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
|
||||||
Once ready, [submit a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)
|
|
||||||
with your fork to the [Akkudoktor-EOS/EOS@master](https://github.com/Akkudoktor-EOS/EOS) repository.
|
|
||||||
<!-- pyml enable line-length -->
|
|
||||||
|
|
||||||
## Developer Tips
|
|
||||||
|
|
||||||
### Keep Your Fork Updated
|
|
||||||
|
|
||||||
Regularly pull changes from the eos repository to avoid merge conflicts:
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
git checkout main
|
|
||||||
git pull eos main
|
|
||||||
git push origin
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
git checkout main
|
|
||||||
git pull eos main
|
|
||||||
git push origin
|
|
||||||
```
|
|
||||||
|
|
||||||
Rebase your development branch to the latest eos main branch.
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
git checkout <MY_DEVELOPMENT_BRANCH>
|
|
||||||
git rebase -i main
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
git checkout <MY_DEVELOPMENT_BRANCH>
|
|
||||||
git rebase -i main
|
|
||||||
```
|
|
||||||
|
|
||||||
### Create Feature Branches
|
|
||||||
|
|
||||||
Work in separate branches for each feature or bug fix:
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
git checkout -b feat/my-feature
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
git checkout -b feat/my-feature
|
|
||||||
```
|
|
||||||
|
|
||||||
### Run Tests Frequently
|
|
||||||
|
|
||||||
Ensure your changes do not break existing functionality:
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
pytest -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
pytest -vs --cov src --cov-report term-missing
|
|
||||||
|
|
||||||
.. tab:: Linux Make
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
make test
|
|
||||||
```
|
|
||||||
|
|
||||||
### Follow Coding Standards
|
|
||||||
|
|
||||||
Keep your code consistent with existing style and conventions.
|
|
||||||
|
|
||||||
### Use Issues for Discussion
|
|
||||||
|
|
||||||
Before making major changes, open an issue or discuss with maintainers.
|
|
||||||
|
|
||||||
### Document Changes
|
|
||||||
|
|
||||||
Update docstrings, comments, and any relevant documentation.
|
|
||||||
@@ -1,86 +1,127 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(getting-started-page)=
|
|
||||||
|
|
||||||
# Getting Started
|
# Getting Started
|
||||||
|
|
||||||
## Installation and Running
|
## Installation
|
||||||
|
|
||||||
AkkudoktorEOS can be installed and run using several different methods:
|
The project requires Python 3.10 or newer. Currently there are no official packages or images published.
|
||||||
|
|
||||||
- **Release package** (for stable versions)
|
Following sections describe how to locally start the EOS server on `http://localhost:8503`.
|
||||||
- **Docker image** (for easy deployment)
|
|
||||||
- **From source** (for developers)
|
|
||||||
|
|
||||||
See the [installation guideline](#install-page) for detailed instructions on each method.
|
### Run from source
|
||||||
|
|
||||||
### Where to Find AkkudoktorEOS
|
Install the dependencies in a virtual environment:
|
||||||
|
|
||||||
- **Release Packages**: [GitHub Releases](https://github.com/Akkudoktor-EOS/EOS/releases)
|
```{eval-rst}
|
||||||
- **Docker Images**: [Docker Hub](https://hub.docker.com/r/akkudoktor/eos)
|
.. tabs::
|
||||||
- **Source Code**: [GitHub Repository](https://github.com/Akkudoktor-EOS/EOS)
|
|
||||||
|
.. tab:: Windows
|
||||||
|
|
||||||
|
.. code-block:: powershell
|
||||||
|
|
||||||
|
python -m venv .venv
|
||||||
|
.venv\Scripts\pip install -r requirements.txt
|
||||||
|
.venv\Scripts\pip install -e .
|
||||||
|
|
||||||
|
.. tab:: Linux
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
python -m venv .venv
|
||||||
|
.venv/bin/pip install -r requirements.txt
|
||||||
|
.venv/bin/pip install -e .
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Start the EOS fastapi server:
|
||||||
|
|
||||||
|
```{eval-rst}
|
||||||
|
.. tabs::
|
||||||
|
|
||||||
|
.. tab:: Windows
|
||||||
|
|
||||||
|
.. code-block:: powershell
|
||||||
|
|
||||||
|
.venv\Scripts\python src/akkudoktoreos/server/eos.py
|
||||||
|
|
||||||
|
.. tab:: Linux
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
.venv/bin/python src/akkudoktoreos/server/eos.py
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker
|
||||||
|
|
||||||
|
```{eval-rst}
|
||||||
|
.. tabs::
|
||||||
|
|
||||||
|
.. tab:: Windows
|
||||||
|
|
||||||
|
.. code-block:: powershell
|
||||||
|
|
||||||
|
docker compose up --build
|
||||||
|
|
||||||
|
.. tab:: Linux
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
docker compose up --build
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
AkkudoktorEOS uses the `EOS.config.json` file to manage all configuration settings.
|
This project uses the `EOS.config.json` file to manage configuration settings.
|
||||||
|
|
||||||
### Default Configuration
|
### Default Configuration
|
||||||
|
|
||||||
If essential configuration settings are missing, the application automatically uses a default
|
A default configuration file `default.config.json` is provided. This file contains all the necessary
|
||||||
configuration to get you started quickly.
|
configuration keys with their default values.
|
||||||
|
|
||||||
### Custom Configuration Directory
|
### Custom Configuration
|
||||||
|
|
||||||
You can specify a custom location for your configuration by setting the `EOS_DIR` environment
|
Users can specify a custom configuration directory by setting the environment variable `EOS_DIR`.
|
||||||
variable:
|
|
||||||
|
|
||||||
```bash
|
- If the directory specified by `EOS_DIR` contains an existing `EOS.config.json` file, the
|
||||||
export EOS_DIR=/path/to/your/config
|
application will use this configuration file.
|
||||||
```
|
- If the `EOS.config.json` file does not exist in the specified directory, the `default.config.json`
|
||||||
|
file will be copied to the directory as `EOS.config.json`.
|
||||||
|
|
||||||
**How it works:**
|
### Configuration Updates
|
||||||
|
|
||||||
- **If `EOS.config.json` exists** in the `EOS_DIR` directory → the application uses this
|
If the configuration keys in the `EOS.config.json` file are missing or different from those in
|
||||||
configuration
|
`default.config.json`, they will be automatically updated to match the default settings, ensuring
|
||||||
- **If `EOS.config.json` doesn't exist** → the application copies `default.config.json` to `EOS_DIR`
|
that all required keys are present.
|
||||||
as `EOS.config.json`
|
|
||||||
|
|
||||||
### Creating Your Configuration
|
## Classes and Functionalities
|
||||||
|
|
||||||
There are three ways to configure AkkudoktorEOS:
|
This project uses various classes to simulate and optimize the components of an energy system. Each
|
||||||
|
class represents a specific aspect of the system, as described below:
|
||||||
|
|
||||||
1. **EOSdash (Recommended)** - The easiest method is to use the web-based dashboard at
|
- `Battery`: Simulates a battery storage system, including capacity, state of charge, and now
|
||||||
[http://localhost:8504](http://localhost:8504)
|
charge and discharge losses.
|
||||||
|
|
||||||
2. **Manual editing** - Create or edit the `EOS.config.json` file directly in your preferred text
|
- `PVForecast`: Provides forecast data for photovoltaic generation, based on weather data and
|
||||||
editor
|
historical generation data.
|
||||||
|
|
||||||
3. **Server API** - Programmatically change configuration through the [server API](#server-api-page)
|
- `Load`: Models the load requirements of a household or business, enabling the prediction of future
|
||||||
|
energy demand.
|
||||||
|
|
||||||
For a complete reference of all available configuration options, see the [configuration guideline](#configuration-page).
|
- `Heatpump`: Simulates a heat pump, including its energy consumption and efficiency under various
|
||||||
|
operating conditions.
|
||||||
|
|
||||||
## Quick Start Example
|
- `Strompreis`: Provides information on electricity prices, enabling optimization of energy
|
||||||
|
consumption and generation based on tariff information.
|
||||||
|
|
||||||
```bash
|
- `EMS`: The Energy Management System (EMS) coordinates the interaction between the various
|
||||||
# Pull the latest docker image
|
components, performs optimization, and simulates the operation of the entire energy system.
|
||||||
docker pull akkudoktor/eos:latest
|
|
||||||
|
|
||||||
# Run the application
|
These classes work together to enable a detailed simulation and optimization of the energy system.
|
||||||
docker run -d \
|
For each class, specific parameters and settings can be adjusted to test different scenarios and
|
||||||
--name akkudoktoreos \
|
strategies.
|
||||||
-p 8503:8503 \
|
|
||||||
-p 8504:8504 \
|
|
||||||
-e OPENBLAS_NUM_THREADS=1 \
|
|
||||||
-e OMP_NUM_THREADS=1 \
|
|
||||||
-e MKL_NUM_THREADS=1 \
|
|
||||||
-e EOS_SERVER__HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__PORT=8503 \
|
|
||||||
-e EOS_SERVER__EOSDASH_HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__EOSDASH_PORT=8504 \
|
|
||||||
--ulimit nproc=65535:65535 \
|
|
||||||
--ulimit nofile=65535:65535 \
|
|
||||||
--security-opt seccomp=unconfined \
|
|
||||||
akkudoktor/eos:latest
|
|
||||||
|
|
||||||
# Access the dashboard
|
### Customization and Extension
|
||||||
open http://localhost:8504
|
|
||||||
```
|
Each class is designed to be easily customized and extended to integrate additional functions or
|
||||||
|
improvements. For example, new methods can be added for more accurate modeling of PV system or
|
||||||
|
battery behavior. Developers are invited to modify and extend the system according to their needs.
|
||||||
|
|||||||
@@ -1,288 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(install-page)=
|
|
||||||
|
|
||||||
# Installation Guide
|
|
||||||
|
|
||||||
This guide provides different methods to install AkkudoktorEOS:
|
|
||||||
|
|
||||||
- Installation from Source (GitHub)
|
|
||||||
- Installation from Release Package (GitHub)
|
|
||||||
- Installation with Docker (DockerHub)
|
|
||||||
- Installation with Docker (docker-compose)
|
|
||||||
|
|
||||||
Choose the method that best suits your needs.
|
|
||||||
|
|
||||||
:::{admonition} Tip
|
|
||||||
:class: Note
|
|
||||||
If you need to update instead, see the [Update Guideline](update-page). For reverting to a previous
|
|
||||||
release see the [Revert Guideline](revert-page).
|
|
||||||
:::
|
|
||||||
|
|
||||||
## Installation Prerequisites
|
|
||||||
|
|
||||||
Before installing, ensure you have the following:
|
|
||||||
|
|
||||||
### For Source / Release Installation
|
|
||||||
|
|
||||||
- Python 3.10 or higher
|
|
||||||
- pip
|
|
||||||
- Git (only for source)
|
|
||||||
- Tar/Zip (for release package)
|
|
||||||
|
|
||||||
### For Docker Installation
|
|
||||||
|
|
||||||
- Docker Engine 20.10 or higher
|
|
||||||
- Docker Compose (optional, recommended)
|
|
||||||
|
|
||||||
## Installation from Source (GitHub) (M1)
|
|
||||||
|
|
||||||
Recommended for developers or users wanting the latest updates.
|
|
||||||
|
|
||||||
### 1) Clone the Repository (M1)
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
git clone https://github.com/Akkudoktor-EOS/EOS.git
|
|
||||||
cd EOS
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
git clone https://github.com/Akkudoktor-EOS/EOS.git
|
|
||||||
cd EOS
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2) Create a Virtual Environment and install dependencies (M1)
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
python -m venv .venv
|
|
||||||
.venv\Scripts\pip install -r requirements.txt
|
|
||||||
.venv\Scripts\pip install -e .
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
python -m venv .venv
|
|
||||||
.venv/bin/pip install -r requirements.txt
|
|
||||||
.venv/bin/pip install -e .
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3) Run EOS (M1)
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
.venv\Scripts\python -m akkudoktoreos.server.eos
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
.venv/bin/python -m akkudoktoreos.server.eos
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
EOS is now available at:
|
|
||||||
|
|
||||||
- API: [http://localhost:8503/docs](http://localhost:8503/docs)
|
|
||||||
- EOSdash: [http://localhost:8504](http://localhost:8504)
|
|
||||||
|
|
||||||
If you want to make EOS and EOSdash accessible from outside of your machine or container at this
|
|
||||||
stage of the installation provide appropriate IP addresses on startup.
|
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
.venv\Scripts\python -m akkudoktoreos.server.eos --host 0.0.0.0 --eosdash-host 0.0.0.0
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
.venv/bin/python -m akkudoktoreos.server.eos --host 0.0.0.0 --eosdash-host 0.0.0.0
|
|
||||||
|
|
||||||
```
|
|
||||||
<!-- pyml enable line-length -->
|
|
||||||
|
|
||||||
### 4) Configure EOS (M1)
|
|
||||||
|
|
||||||
Use EOSdash at [http://localhost:8504](http://localhost:8504) to configure EOS.
|
|
||||||
|
|
||||||
## Installation from Release Package (GitHub) (M2)
|
|
||||||
|
|
||||||
This method is recommended for users who want a stable, tested version.
|
|
||||||
|
|
||||||
### 1) Download the Latest Release (M2)
|
|
||||||
|
|
||||||
Visit the [Releases page](https://github.com/Akkudoktor-EOS/EOS/tags) and download the latest
|
|
||||||
release package (e.g., `akkudoktoreos-v0.2.0.tar.gz` or `akkudoktoreos-v0.2.0.zip`).
|
|
||||||
|
|
||||||
### 2) Extract the Package (M2)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tar -xzf akkudoktoreos-v0.2.0.tar.gz # For .tar.gz
|
|
||||||
# or
|
|
||||||
unzip akkudoktoreos-v0.2.0.zip # For .zip
|
|
||||||
|
|
||||||
cd akkudoktoreos-v0.2.0
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3) Create a virtual environment and run and configure EOS (M2)
|
|
||||||
|
|
||||||
Follow Step 2), 3) and 4) of method M1. Start at
|
|
||||||
`2) Create a Virtual Environment and install dependencies`
|
|
||||||
|
|
||||||
### 4) Update the source code (M2)
|
|
||||||
|
|
||||||
To extract a new release to a new directory just proceed with method M2 step 1) for the new release.
|
|
||||||
|
|
||||||
You may remove the old release directory afterwards.
|
|
||||||
|
|
||||||
## Installation with Docker (DockerHub) (M3)
|
|
||||||
|
|
||||||
This method is recommended for easy deployment and containerized environments.
|
|
||||||
|
|
||||||
### 1) Pull the Docker Image (M3)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull akkudoktor/eos:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
For a specific version:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull akkudoktor/eos:v<version>
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2) Run the Container (M3)
|
|
||||||
|
|
||||||
**Basic run:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d \
|
|
||||||
--name akkudoktoreos \
|
|
||||||
-p 8503:8503 \
|
|
||||||
-p 8504:8504 \
|
|
||||||
-e OPENBLAS_NUM_THREADS=1 \
|
|
||||||
-e OMP_NUM_THREADS=1 \
|
|
||||||
-e MKL_NUM_THREADS=1 \
|
|
||||||
-e EOS_SERVER__HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__PORT=8503 \
|
|
||||||
-e EOS_SERVER__EOSDASH_HOST=0.0.0.0 \
|
|
||||||
-e EOS_SERVER__EOSDASH_PORT=8504 \
|
|
||||||
--ulimit nproc=65535:65535 \
|
|
||||||
--ulimit nofile=65535:65535 \
|
|
||||||
--security-opt seccomp=unconfined \
|
|
||||||
akkudoktor/eos:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3) Verify the Container is Running (M3)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker ps
|
|
||||||
docker logs akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
EOS should now be accessible at [http://localhost:8503/docs](http://localhost:8503/docs) and EOSdash
|
|
||||||
should be available at [http://localhost:8504](http://localhost:8504).
|
|
||||||
|
|
||||||
### 4) Configure EOS (M3)
|
|
||||||
|
|
||||||
Use EOSdash at [http://localhost:8504](http://localhost:8504) to configure EOS.
|
|
||||||
|
|
||||||
## Installation with Docker (docker-compose) (M4)
|
|
||||||
|
|
||||||
### 1) Get the akkudoktoreos source code (M4)
|
|
||||||
|
|
||||||
You may use either method M1 or method M2 to get the source code.
|
|
||||||
|
|
||||||
### 2) Build and run the container (M4)
|
|
||||||
|
|
||||||
```{eval-rst}
|
|
||||||
.. tabs::
|
|
||||||
|
|
||||||
.. tab:: Windows
|
|
||||||
|
|
||||||
.. code-block:: powershell
|
|
||||||
|
|
||||||
docker compose up --build
|
|
||||||
|
|
||||||
.. tab:: Linux
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
docker compose up --build
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3) Verify the Container is Running (M4)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker ps
|
|
||||||
docker logs akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
EOS should now be accessible at [http://localhost:8503/docs](http://localhost:8503/docs) and EOSdash
|
|
||||||
should be available at [http://localhost:8504](http://localhost:8504).
|
|
||||||
|
|
||||||
### 4) Configure EOS
|
|
||||||
|
|
||||||
Use EOSdash at [http://localhost:8504](http://localhost:8504) to configure EOS.
|
|
||||||
|
|
||||||
## Helpful Docker Commands
|
|
||||||
|
|
||||||
**View logs:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker logs -f akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
**Stop the container:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker stop akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
**Start the container:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker start akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
**Remove the container:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker rm -f akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
**Update to latest version:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull Akkudoktor-EOS/EOS:latest
|
|
||||||
docker stop akkudoktoreos
|
|
||||||
docker rm akkudoktoreos
|
|
||||||
# Then run the container again with the run command
|
|
||||||
```
|
|
||||||
@@ -1,212 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(release-page)=
|
|
||||||
|
|
||||||
# Release Process
|
|
||||||
|
|
||||||
This document describes how to prepare and publish a new release **via a Pull Request from a fork**,
|
|
||||||
and how to set a **development version** after the release.
|
|
||||||
|
|
||||||
## ✅ Overview of the Process
|
|
||||||
|
|
||||||
| Step | Actor | Action |
|
|
||||||
|------|-------------|--------|
|
|
||||||
| 1 | Contributor | Prepare a release branch **in your fork** using Commitizen |
|
|
||||||
| 2 | Contributor | Open a **Pull Request to upstream** (`Akkudoktor-EOS/EOS`) |
|
|
||||||
| 3 | Maintainer | Review and **merge the release PR** |
|
|
||||||
| 4 | Maintainer | Create the **GitHub Release and tag** |
|
|
||||||
| 5 | Maintainer | Set the **development version marker** via a follow-up PR |
|
|
||||||
|
|
||||||
## 🔄 Detailed Workflow
|
|
||||||
|
|
||||||
### 1️⃣ Contributor: Prepare the Release in Your Fork
|
|
||||||
|
|
||||||
#### Clone and sync your fork
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/<your-username>/EOS
|
|
||||||
cd EOS
|
|
||||||
git remote add eos https://github.com/Akkudoktor-EOS/EOS
|
|
||||||
|
|
||||||
git fetch eos
|
|
||||||
git checkout main
|
|
||||||
git pull eos main
|
|
||||||
````
|
|
||||||
|
|
||||||
#### Create the release branch
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout -b release/vX.Y.Z
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Bump the version information
|
|
||||||
|
|
||||||
At least update
|
|
||||||
|
|
||||||
- pyproject.toml
|
|
||||||
- src/akkudoktoreos/core/version.py
|
|
||||||
- src/akkudoktoreos/data/default.config.json
|
|
||||||
- Makefile
|
|
||||||
|
|
||||||
and the generated documentation:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
make bump VERSION=0.1.0+dev NEW_VERSION=X.Y.Z
|
|
||||||
make gen-docs
|
|
||||||
```
|
|
||||||
|
|
||||||
You may check the changes by:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git diff
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Create a new CHANGELOG.md entry
|
|
||||||
|
|
||||||
Edit CHANGELOG.md
|
|
||||||
|
|
||||||
#### Create the new release commit
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git add pyproject.toml src/akkudoktoreos/core/version.py \
|
|
||||||
src/akkudoktoreos/data/default.config.json Makefile CHANGELOG.md
|
|
||||||
git commit -s -m "chore(release): Release vX.Y.Z"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Push the branch to your fork
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git push --set-upstream origin release/vX.Y.Z
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2️⃣ Contributor: Open the Release Pull Request
|
|
||||||
|
|
||||||
| From | To |
|
|
||||||
| ------------------------------------ | ------------------------- |
|
|
||||||
| `<your-username>/EOS:release/vX.Y.Z` | `Akkudoktor-EOS/EOS:main` |
|
|
||||||
|
|
||||||
**PR Title:**
|
|
||||||
|
|
||||||
```text
|
|
||||||
chore(release): release vX.Y.Z
|
|
||||||
```
|
|
||||||
|
|
||||||
**PR Description Template:**
|
|
||||||
|
|
||||||
```markdown
|
|
||||||
## Release vX.Y.Z
|
|
||||||
|
|
||||||
This pull request prepares release **vX.Y.Z**.
|
|
||||||
|
|
||||||
### Changes
|
|
||||||
- Version bump
|
|
||||||
- Changelog update
|
|
||||||
|
|
||||||
### Changelog Summary
|
|
||||||
<!-- Copy key highlights from CHANGELOG.md here -->
|
|
||||||
|
|
||||||
See `CHANGELOG.md` for full details.
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3️⃣ Maintainer: Review and Merge the Release PR
|
|
||||||
|
|
||||||
**Review Checklist:**
|
|
||||||
|
|
||||||
- ✅ Only version files and `CHANGELOG.md` are modified
|
|
||||||
- ✅ Version numbers are consistent
|
|
||||||
- ✅ Changelog is complete and properly formatted
|
|
||||||
- ✅ No unrelated changes are included
|
|
||||||
|
|
||||||
**Merge Strategy:**
|
|
||||||
|
|
||||||
- Prefer **Merge Commit** (or **Squash Merge**, per project preference)
|
|
||||||
- Use commit message: `chore(release): Release vX.Y.Z`
|
|
||||||
|
|
||||||
### 4️⃣ Maintainer: Publish the GitHub Release
|
|
||||||
|
|
||||||
1. Go to **GitHub → Releases → Draft a new release**
|
|
||||||
2. **Choose tag** → enter `vX.Y.Z` (GitHub creates the tag on publish)
|
|
||||||
3. **Release title:** `vX.Y.Z`
|
|
||||||
4. **Paste changelog entry** from `CHANGELOG.md`
|
|
||||||
5. Optionally enable **Set as latest release**
|
|
||||||
6. Click **Publish release** 🎉
|
|
||||||
|
|
||||||
### 5️⃣ Maintainer: Prepare the Development Version Marker
|
|
||||||
|
|
||||||
**Sync local copy:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git fetch eos
|
|
||||||
git checkout main
|
|
||||||
git pull eos main
|
|
||||||
```
|
|
||||||
|
|
||||||
**Create a development version branch:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout -b release/vX.Y.Z_dev
|
|
||||||
```
|
|
||||||
|
|
||||||
**Set development version marker manually:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
make bump VERSION=X.Y.Z NEW_VERSION=X.Y.Z+dev
|
|
||||||
make gen-docs
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git add pyproject.toml src/akkudoktoreos/core/version.py \
|
|
||||||
src/akkudoktoreos/data/default.config.json Makefile
|
|
||||||
git commit -s -m "chore: set development version marker X.Y.Z+dev"
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git push --set-upstream origin release/vX.Y.Z_dev
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6️⃣ Maintainer (or Contributor): Open the Development Version PR
|
|
||||||
|
|
||||||
| From | To |
|
|
||||||
| ---------------------------------------- | ------------------------- |
|
|
||||||
| `<your-username>/EOS:release/vX.Y.Z_dev` | `Akkudoktor-EOS/EOS:main` |
|
|
||||||
|
|
||||||
**PR Title:**
|
|
||||||
|
|
||||||
```text
|
|
||||||
chore: development version vX.Y.Z+dev
|
|
||||||
```
|
|
||||||
|
|
||||||
**PR Description Template:**
|
|
||||||
|
|
||||||
```markdown
|
|
||||||
## Development version vX.Y.Z+dev
|
|
||||||
|
|
||||||
This pull request marks the repository as back in active development.
|
|
||||||
|
|
||||||
### Changes
|
|
||||||
- Set version to `vX.Y.Z+dev`
|
|
||||||
|
|
||||||
No changelog entry is needed.
|
|
||||||
```
|
|
||||||
|
|
||||||
### 7️⃣ Maintainer: Review and Merge the Development Version PR
|
|
||||||
|
|
||||||
**Checklist:**
|
|
||||||
|
|
||||||
- ✅ Only version files updated to `+dev`
|
|
||||||
- ✅ No unintended changes
|
|
||||||
|
|
||||||
**Merge Strategy:**
|
|
||||||
|
|
||||||
- Merge with commit message: `chore: development version vX.Y.Z+dev`
|
|
||||||
|
|
||||||
## ✅ Quick Reference
|
|
||||||
|
|
||||||
| Step | Actor | Action |
|
|
||||||
| ---- | ----- | ------ |
|
|
||||||
| **1. Prepare release branch** | Contributor | Bump version & changelog via Commitizen |
|
|
||||||
| **2. Open release PR** | Contributor | Submit release for review |
|
|
||||||
| **3. Review & merge release PR** | Maintainer | Finalize changes into `main` |
|
|
||||||
| **4. Publish GitHub Release** | Maintainer | Create tag & notify users |
|
|
||||||
| **5. Prepare development version branch** | Maintainer | Set development marker |
|
|
||||||
| **6. Open development PR** | Maintainer (or Contributor) | Propose returning to development state |
|
|
||||||
| **7. Review & merge development PR** | Maintainer | Mark repository as back in development |
|
|
||||||
@@ -1,155 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
(revert-page)=
|
|
||||||
|
|
||||||
# Revert Guide
|
|
||||||
|
|
||||||
This guide explains how to **revert AkkudoktorEOS to a previous version**.
|
|
||||||
The exact methods and steps differ depending on how EOS was installed:
|
|
||||||
|
|
||||||
- M1/M2: Reverting when Installed from Source or Release Package
|
|
||||||
- M3/M4: Reverting when Installed via Docker
|
|
||||||
|
|
||||||
:::{admonition} Important
|
|
||||||
:class: warning
|
|
||||||
Before reverting, ensure you have a backup of your `EOS.config.json`.
|
|
||||||
EOS also maintains internal configuration backups that can be restored after a downgrade.
|
|
||||||
:::
|
|
||||||
|
|
||||||
:::{admonition} Tip
|
|
||||||
:class: Note
|
|
||||||
If you need to update instead, see the [Update Guideline](update-page).
|
|
||||||
:::
|
|
||||||
|
|
||||||
## Revert to a Previous Version of EOS
|
|
||||||
|
|
||||||
You can revert to a previous version using the same installation method you originally selected.
|
|
||||||
See: [Installation Guideline](install-page)
|
|
||||||
|
|
||||||
## Reverting when Installed from Source or Release Package (M1/M2)
|
|
||||||
|
|
||||||
### 1) Locate the target version (M2)
|
|
||||||
|
|
||||||
Go to the GitHub Releases page:
|
|
||||||
|
|
||||||
> <https://github.com/Akkudoktor-EOS/EOS/tags>
|
|
||||||
|
|
||||||
### 2) Download or check out that version (M1/M2)
|
|
||||||
|
|
||||||
#### Git (source) (M1)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git fetch
|
|
||||||
git checkout v<version>
|
|
||||||
````
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout v0.1.0
|
|
||||||
```
|
|
||||||
|
|
||||||
Then reinstall dependencies:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
.venv/bin/pip install -r requirements.txt --upgrade
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Release package (M2)
|
|
||||||
|
|
||||||
Download and extract the desired ZIP or TAR release.
|
|
||||||
Refer to **Method 2** in the [Installation Guideline](install-page).
|
|
||||||
|
|
||||||
### 3) Restart EOS (M1/M2)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
.venv/bin/python -m akkudoktoreos.server.eos
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4) Restore configuration (optional) (M1/M2)
|
|
||||||
|
|
||||||
If your configuration changed since the downgrade, you may restore a previous backup:
|
|
||||||
|
|
||||||
- via **EOSdash**
|
|
||||||
|
|
||||||
Admin → configuration → Revert to backup
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
Admin → configuration → Import from file
|
|
||||||
|
|
||||||
- via **REST**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -X PUT "http://<host>:8503/v1/config/revert?backup_id=<backup>"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reverting when Installed via Docker (M3/M4)
|
|
||||||
|
|
||||||
### 1) Pull the desired image version (M3/M4)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull akkudoktor/eos:v<version>
|
|
||||||
```
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull akkudoktor/eos:v0.1.0
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2) Stop and remove the current container (M3/M4)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker stop akkudoktoreos
|
|
||||||
docker rm akkudoktoreos
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3) Start a container with the selected version (M3/M4)
|
|
||||||
|
|
||||||
Start EOS as usual, using your existing `docker run` or `docker compose` setup
|
|
||||||
(see Method 3 or Method 4 in the [Installation Guideline](install-page)).
|
|
||||||
|
|
||||||
### 4) Restore configuration (optional) (M3/M4)
|
|
||||||
|
|
||||||
In many cases configuration will migrate automatically.
|
|
||||||
If needed, you may restore a configuration backup:
|
|
||||||
|
|
||||||
- via **EOSdash**
|
|
||||||
|
|
||||||
Admin → configuration → Revert to backup
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
Admin → configuration → Import from file
|
|
||||||
|
|
||||||
- via **REST**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -X PUT "http://<host>:8503/v1/config/revert?backup_id=<backup>"
|
|
||||||
```
|
|
||||||
|
|
||||||
## About Configuration Backups
|
|
||||||
|
|
||||||
EOS keeps configuration backup files next to your active `EOS.config.json`.
|
|
||||||
|
|
||||||
You can list and restore backups:
|
|
||||||
|
|
||||||
- via **EOSdash UI**
|
|
||||||
- via **REST API**
|
|
||||||
|
|
||||||
### List available backups
|
|
||||||
|
|
||||||
```bash
|
|
||||||
GET /v1/config/backups
|
|
||||||
```
|
|
||||||
|
|
||||||
### Restore backup
|
|
||||||
|
|
||||||
```bash
|
|
||||||
PUT /v1/config/revert?backup_id=<id>
|
|
||||||
```
|
|
||||||
|
|
||||||
:::{admonition} Important
|
|
||||||
:class: warning
|
|
||||||
If no backup file is available, create or copy a previously saved `EOS.config.json` before reverting.
|
|
||||||
:::
|
|
||||||
@@ -27,9 +27,6 @@ develop/getting_started.md
|
|||||||
:caption: How-To Guides
|
:caption: How-To Guides
|
||||||
|
|
||||||
develop/CONTRIBUTING.md
|
develop/CONTRIBUTING.md
|
||||||
develop/install.md
|
|
||||||
develop/update.md
|
|
||||||
develop/revert.md
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -39,29 +36,15 @@ develop/revert.md
|
|||||||
|
|
||||||
akkudoktoreos/architecture.md
|
akkudoktoreos/architecture.md
|
||||||
akkudoktoreos/configuration.md
|
akkudoktoreos/configuration.md
|
||||||
akkudoktoreos/configtimewindow.md
|
akkudoktoreos/optimization.md
|
||||||
akkudoktoreos/optimpost.md
|
|
||||||
akkudoktoreos/optimauto.md
|
|
||||||
akkudoktoreos/resource.md
|
|
||||||
akkudoktoreos/prediction.md
|
akkudoktoreos/prediction.md
|
||||||
akkudoktoreos/measurement.md
|
akkudoktoreos/measurement.md
|
||||||
akkudoktoreos/integration.md
|
akkudoktoreos/integration.md
|
||||||
akkudoktoreos/logging.md
|
|
||||||
akkudoktoreos/serverapi.md
|
akkudoktoreos/serverapi.md
|
||||||
akkudoktoreos/api.rst
|
akkudoktoreos/api.rst
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```{toctree}
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: Development
|
|
||||||
|
|
||||||
develop/develop.md
|
|
||||||
develop/release.md
|
|
||||||
develop/CHANGELOG.md
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
## Indices and tables
|
## Indices and tables
|
||||||
|
|
||||||
- {ref}`genindex`
|
- {ref}`genindex`
|
||||||
|
|||||||
13371
openapi.json
13371
openapi.json
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "akkudoktor-eos"
|
name = "akkudoktor-eos"
|
||||||
version = "0.2.0+dev"
|
version = "0.0.1"
|
||||||
authors = [
|
authors = [
|
||||||
{ name="Andreas Schmitz", email="author@example.com" },
|
{ name="Andreas Schmitz", email="author@example.com" },
|
||||||
]
|
]
|
||||||
@@ -43,18 +43,12 @@ profile = "black"
|
|||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
line-length = 100
|
line-length = 100
|
||||||
exclude = [
|
|
||||||
"tests",
|
|
||||||
"scripts",
|
|
||||||
]
|
|
||||||
output-format = "full"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"F", # Enable all `Pyflakes` rules.
|
"F", # Enable all `Pyflakes` rules.
|
||||||
"D", # Enable all `pydocstyle` rules, limiting to those that adhere to the
|
"D", # Enable all `pydocstyle` rules, limiting to those that adhere to the
|
||||||
# Google convention via `convention = "google"`, below.
|
# Google convention via `convention = "google"`, below.
|
||||||
"S", # Enable all `flake8-bandit` rules.
|
|
||||||
]
|
]
|
||||||
ignore = [
|
ignore = [
|
||||||
# Prevent errors due to ruff false positives
|
# Prevent errors due to ruff false positives
|
||||||
@@ -107,31 +101,3 @@ ignore_missing_imports = true
|
|||||||
[[tool.mypy.overrides]]
|
[[tool.mypy.overrides]]
|
||||||
module = "xprocess.*"
|
module = "xprocess.*"
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
|
||||||
[tool.commitizen]
|
|
||||||
name = "cz_conventional_commits"
|
|
||||||
version_scheme = "semver"
|
|
||||||
version = "0.2.0+dev" # <-- Set your current version heretag_format = "v$version"
|
|
||||||
|
|
||||||
# Files to automatically update when bumping version
|
|
||||||
update_changelog_on_bump = true
|
|
||||||
changelog_incremental = true
|
|
||||||
annotated_tag = true
|
|
||||||
bump_message = "chore(release): $current_version → $new_version"
|
|
||||||
|
|
||||||
# Branch validation settings
|
|
||||||
branch_validation = true
|
|
||||||
branch_pattern = "^(feat|fix|chore|docs|refactor|test)/[a-z0-9._-]+$"
|
|
||||||
|
|
||||||
# Customize changelog generation
|
|
||||||
[tool.commitizen.changelog]
|
|
||||||
path = "CHANGELOG.md"
|
|
||||||
template = "keepachangelog"
|
|
||||||
|
|
||||||
# If your version is stored in multiple files (Python modules, docs etc.), add them here
|
|
||||||
[tool.commitizen.files]
|
|
||||||
version = [
|
|
||||||
"pyproject.toml", # Auto-update project version
|
|
||||||
"src/akkudoktoreos/core/version.py",
|
|
||||||
"src/akkudoktoreos/data/default.config.json"
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -1,29 +1,14 @@
|
|||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
gitpython==3.1.44
|
||||||
# Pre-commit framework - basic package requirements handled by pre-commit itself
|
linkify-it-py==2.0.3
|
||||||
# - pre-commit-hooks
|
myst-parser==4.0.1
|
||||||
# - isort
|
|
||||||
# - ruff
|
|
||||||
# - mypy (mirrors-mypy) - sync with requirements-dev.txt (if on pypi)
|
|
||||||
# - pymarkdown
|
|
||||||
# - commitizen - sync with requirements-dev.txt (if on pypi)
|
|
||||||
pre-commit==4.3.0
|
|
||||||
mypy==1.18.2
|
|
||||||
types-requests==2.32.4.20250913 # for mypy
|
|
||||||
pandas-stubs==2.3.2.250926 # for mypy
|
|
||||||
tokenize-rt==6.2.0 # for mypy
|
|
||||||
commitizen==4.9.1
|
|
||||||
deprecated==1.3.1 # for commitizen
|
|
||||||
|
|
||||||
# Sphinx
|
|
||||||
sphinx==8.2.3
|
sphinx==8.2.3
|
||||||
sphinx_rtd_theme==3.0.2
|
sphinx_rtd_theme==3.0.2
|
||||||
sphinx-tabs==3.4.7
|
sphinx-tabs==3.4.7
|
||||||
GitPython==3.1.45
|
pytest==8.3.5
|
||||||
myst-parser==4.0.1
|
pytest-cov==6.0.0
|
||||||
|
|
||||||
# Pytest
|
|
||||||
pytest==9.0.0
|
|
||||||
pytest-cov==7.0.0
|
|
||||||
coverage==7.11.3
|
|
||||||
pytest-xprocess==1.0.2
|
pytest-xprocess==1.0.2
|
||||||
|
pre-commit
|
||||||
|
mypy==1.15.0
|
||||||
|
types-requests==2.32.0.20250306
|
||||||
|
pandas-stubs==2.2.3.250308
|
||||||
|
|||||||
@@ -1,31 +1,16 @@
|
|||||||
babel==2.17.0
|
numpy==2.2.4
|
||||||
beautifulsoup4==4.14.2
|
numpydantic==1.6.8
|
||||||
cachebox==5.1.0
|
matplotlib==3.10.1
|
||||||
numpy==2.3.4
|
fastapi[standard]==0.115.11
|
||||||
numpydantic==1.7.0
|
python-fasthtml==0.12.4
|
||||||
matplotlib==3.10.7
|
uvicorn==0.34.0
|
||||||
contourpy==1.3.3
|
scikit-learn==1.6.1
|
||||||
fastapi[standard-no-fastapi-cloud-cli]==0.121.1
|
timezonefinder==6.5.8
|
||||||
fastapi_cli==0.0.16
|
deap==1.4.2
|
||||||
rich-toolkit==0.15.1
|
requests==2.32.3
|
||||||
python-fasthtml==0.12.33
|
pandas==2.2.3
|
||||||
MonsterUI==1.0.32
|
pendulum==3.0.0
|
||||||
markdown-it-py==3.0.0
|
platformdirs==4.3.7
|
||||||
mdit-py-plugins==0.5.0
|
pvlib==0.12.0
|
||||||
bokeh==3.8.1
|
pydantic==2.10.6
|
||||||
uvicorn==0.38.0
|
statsmodels==0.14.4
|
||||||
scikit-learn==1.7.2
|
|
||||||
tzfpy==1.1.0
|
|
||||||
deap==1.4.3
|
|
||||||
requests==2.32.5
|
|
||||||
pandas==2.3.3
|
|
||||||
pendulum==3.1.0
|
|
||||||
platformdirs==4.5.0
|
|
||||||
psutil==7.1.3
|
|
||||||
pvlib==0.13.1
|
|
||||||
pydantic==2.12.4
|
|
||||||
pydantic_extra_types==2.10.6
|
|
||||||
statsmodels==0.14.5
|
|
||||||
pydantic-settings==2.12.0
|
|
||||||
linkify-it-py==2.0.3
|
|
||||||
loguru==0.7.3
|
|
||||||
|
|||||||
@@ -1,170 +0,0 @@
|
|||||||
"""Update version strings in multiple project files only if the old version matches.
|
|
||||||
|
|
||||||
This script updates version information in:
|
|
||||||
- pyproject.toml
|
|
||||||
- src/akkudoktoreos/core/version.py
|
|
||||||
- src/akkudoktoreos/data/default.config.json
|
|
||||||
- Makefile
|
|
||||||
|
|
||||||
Supported version formats:
|
|
||||||
- __version__ = "<version>"
|
|
||||||
- version = "<version>"
|
|
||||||
- "version": "<version>"
|
|
||||||
- VERSION ?: <version>
|
|
||||||
|
|
||||||
It will:
|
|
||||||
- Replace VERSION → NEW_VERSION if the old version is found.
|
|
||||||
- Report which files were updated.
|
|
||||||
- Report which files contained mismatched versions.
|
|
||||||
- Report which files had no version.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python bump_version.py VERSION NEW_VERSION
|
|
||||||
|
|
||||||
Args:
|
|
||||||
VERSION (str): Version expected before replacement.
|
|
||||||
NEW_VERSION (str): Version to write.
|
|
||||||
|
|
||||||
"""
|
|
||||||
#!/usr/bin/env python3
|
|
||||||
import argparse
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
# Patterns to match version strings
|
|
||||||
VERSION_PATTERNS = [
|
|
||||||
re.compile(r'(__version__\s*=\s*")(?P<ver>[^"]+)(")'),
|
|
||||||
re.compile(r'(version\s*=\s*")(?P<ver>[^"]+)(")'),
|
|
||||||
re.compile(r'("version"\s*:\s*")(?P<ver>[^"]+)(")'),
|
|
||||||
re.compile(r'(VERSION\s*\?=\s*)(?P<ver>[^\s]+)'), # For Makefile: VERSION ?= 0.2.0
|
|
||||||
]
|
|
||||||
|
|
||||||
# Default files to process
|
|
||||||
DEFAULT_FILES = [
|
|
||||||
"pyproject.toml",
|
|
||||||
"src/akkudoktoreos/core/version.py",
|
|
||||||
"src/akkudoktoreos/data/default.config.json",
|
|
||||||
"Makefile",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def backup_file(file_path: str) -> str:
|
|
||||||
"""Create a backup of the given file with a .bak suffix.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to the file to backup.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path to the backup file.
|
|
||||||
"""
|
|
||||||
backup_path = f"{file_path}.bak"
|
|
||||||
shutil.copy2(file_path, backup_path)
|
|
||||||
return backup_path
|
|
||||||
|
|
||||||
|
|
||||||
def replace_version_in_file(
|
|
||||||
file_path: Path, old_version: str, new_version: str, dry_run: bool = False
|
|
||||||
) -> Tuple[bool, bool]:
|
|
||||||
"""
|
|
||||||
Replace old_version with new_version in the given file if it matches.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to the file to modify.
|
|
||||||
old_version: The old version to replace.
|
|
||||||
new_version: The new version to set.
|
|
||||||
dry_run: If True, don't actually modify files.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple[bool, bool]: (file_would_be_updated, old_version_found)
|
|
||||||
"""
|
|
||||||
content = file_path.read_text()
|
|
||||||
new_content = content
|
|
||||||
old_version_found = False
|
|
||||||
file_would_be_updated = False
|
|
||||||
|
|
||||||
for pattern in VERSION_PATTERNS:
|
|
||||||
def repl(match):
|
|
||||||
nonlocal old_version_found, file_would_be_updated
|
|
||||||
ver = match.group("ver")
|
|
||||||
if ver == old_version:
|
|
||||||
old_version_found = True
|
|
||||||
file_would_be_updated = True
|
|
||||||
# Some patterns have 3 groups (like quotes)
|
|
||||||
if len(match.groups()) == 3:
|
|
||||||
return f"{match.group(1)}{new_version}{match.group(3)}"
|
|
||||||
else:
|
|
||||||
return f"{match.group(1)}{new_version}"
|
|
||||||
return match.group(0)
|
|
||||||
|
|
||||||
new_content = pattern.sub(repl, new_content)
|
|
||||||
|
|
||||||
if file_would_be_updated:
|
|
||||||
if dry_run:
|
|
||||||
print(f"[DRY-RUN] Would update {file_path}")
|
|
||||||
else:
|
|
||||||
backup_path = file_path.with_suffix(file_path.suffix + ".bak")
|
|
||||||
shutil.copy(file_path, backup_path)
|
|
||||||
file_path.write_text(new_content)
|
|
||||||
print(f"Updated {file_path} (backup saved to {backup_path})")
|
|
||||||
elif not old_version_found:
|
|
||||||
print(f"[SKIP] {file_path}: old version '{old_version}' not found")
|
|
||||||
|
|
||||||
return file_would_be_updated, old_version_found
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description="Bump version across project files.")
|
|
||||||
parser.add_argument("old_version", help="Old version to replace")
|
|
||||||
parser.add_argument("new_version", help="New version to set")
|
|
||||||
parser.add_argument(
|
|
||||||
"--dry-run", action="store_true", help="Show what would be changed without modifying files"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--glob", nargs="*", help="Optional glob patterns to include additional files"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
updated_files = []
|
|
||||||
not_found_files = []
|
|
||||||
|
|
||||||
# Determine files to update
|
|
||||||
files_to_update: List[Path] = [Path(f) for f in DEFAULT_FILES]
|
|
||||||
if args.glob:
|
|
||||||
for pattern in args.glob:
|
|
||||||
files_to_update.extend(Path(".").glob(pattern))
|
|
||||||
|
|
||||||
files_to_update = list(dict.fromkeys(files_to_update)) # remove duplicates
|
|
||||||
|
|
||||||
any_updated = False
|
|
||||||
for file_path in files_to_update:
|
|
||||||
if file_path.exists() and file_path.is_file():
|
|
||||||
updated, _ = replace_version_in_file(
|
|
||||||
file_path, args.old_version, args.new_version, args.dry_run
|
|
||||||
)
|
|
||||||
any_updated |= updated
|
|
||||||
if updated:
|
|
||||||
updated_files.append(file_path)
|
|
||||||
else:
|
|
||||||
print(f"[SKIP] {file_path}: file does not exist")
|
|
||||||
not_found_files.append(file_path)
|
|
||||||
|
|
||||||
print("\nSummary:")
|
|
||||||
if updated_files:
|
|
||||||
print(f"Updated files ({len(updated_files)}):")
|
|
||||||
for f in updated_files:
|
|
||||||
print(f" {f}")
|
|
||||||
else:
|
|
||||||
print("No files were updated.")
|
|
||||||
|
|
||||||
if not_found_files:
|
|
||||||
print(f"Files where old version was not found ({len(not_found_files)}):")
|
|
||||||
for f in not_found_files:
|
|
||||||
print(f" {f}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
MESSAGE_PREFIX = "Converted to annotated tag:"
|
|
||||||
|
|
||||||
def run(cmd, capture_output=False):
|
|
||||||
"""Run a shell command and return output if needed."""
|
|
||||||
result = subprocess.run(cmd, shell=True, check=True, text=True, capture_output=capture_output)
|
|
||||||
return result.stdout.strip() if capture_output else None
|
|
||||||
|
|
||||||
def get_all_tags():
|
|
||||||
"""Return a list of all tags."""
|
|
||||||
return run("git tag", capture_output=True).splitlines()
|
|
||||||
|
|
||||||
def is_lightweight(tag):
|
|
||||||
"""Return True if a tag is lightweight (points to commit, not tag object)."""
|
|
||||||
return run(f"git cat-file -t {tag}", capture_output=True) == "commit"
|
|
||||||
|
|
||||||
def get_commit_of_tag(tag):
|
|
||||||
"""Return the commit SHA a tag points to."""
|
|
||||||
return run(f"git rev-list -n 1 {tag}", capture_output=True)
|
|
||||||
|
|
||||||
def convert_tag(tag):
|
|
||||||
"""Delete and recreate a tag as annotated."""
|
|
||||||
commit = get_commit_of_tag(tag)
|
|
||||||
print(f"Converting {tag} -> annotated ({commit})")
|
|
||||||
run(f"git tag -d {tag}")
|
|
||||||
run(f'git tag -a {tag} -m "{MESSAGE_PREFIX} {tag}" {commit}')
|
|
||||||
|
|
||||||
def main():
|
|
||||||
dry_run = "--dry-run" in sys.argv
|
|
||||||
push = "--push" in sys.argv
|
|
||||||
|
|
||||||
tags = get_all_tags()
|
|
||||||
lightweight_tags = [t for t in tags if is_lightweight(t)]
|
|
||||||
|
|
||||||
if not lightweight_tags:
|
|
||||||
print("✅ No lightweight tags found.")
|
|
||||||
return
|
|
||||||
|
|
||||||
print("🔍 Lightweight tags found:\n " + "\n ".join(lightweight_tags))
|
|
||||||
|
|
||||||
if dry_run:
|
|
||||||
print("\n📝 Dry run: No changes will be made.")
|
|
||||||
return
|
|
||||||
|
|
||||||
confirm = input("\n⚠️ Convert ALL of these tags to annotated? (y/N): ").lower()
|
|
||||||
if confirm != "y":
|
|
||||||
print("❌ Aborted.")
|
|
||||||
return
|
|
||||||
|
|
||||||
for tag in lightweight_tags:
|
|
||||||
convert_tag(tag)
|
|
||||||
|
|
||||||
print("\n✅ Conversion complete.")
|
|
||||||
|
|
||||||
if push:
|
|
||||||
print("📤 Pushing updated tags to origin (force)...")
|
|
||||||
run("git push origin --tags --force")
|
|
||||||
print("✅ Tags pushed.")
|
|
||||||
else:
|
|
||||||
print("\n🚀 To push changes, run:\n git push origin --tags --force")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
print("=== Lightweight Tag Converter ===")
|
|
||||||
print("Usage: python convert_lightweight_tags.py [--dry-run] [--push]\n")
|
|
||||||
main()
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Branch name checker using regex (compatible with Commitizen v4.9.1).
|
|
||||||
|
|
||||||
Cross-platform + .venv aware.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def find_cz() -> str:
|
|
||||||
venv = os.getenv("VIRTUAL_ENV")
|
|
||||||
paths = [Path(venv)] if venv else []
|
|
||||||
paths.append(Path.cwd() / ".venv")
|
|
||||||
|
|
||||||
for base in paths:
|
|
||||||
cz = base / ("Scripts" if os.name == "nt" else "bin") / ("cz.exe" if os.name == "nt" else "cz")
|
|
||||||
if cz.exists():
|
|
||||||
return str(cz)
|
|
||||||
return "cz"
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# Get current branch name
|
|
||||||
try:
|
|
||||||
branch = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"], text=True).strip()
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
print("❌ Could not determine current branch name.")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# Regex pattern
|
|
||||||
pattern = r"^(feat|fix|chore|docs|refactor|test)/[a-z0-9._-]+$"
|
|
||||||
|
|
||||||
print(f"🔍 Checking branch name '{branch}'...")
|
|
||||||
if not re.match(pattern, branch):
|
|
||||||
print(f"❌ Branch name '{branch}' does not match pattern '{pattern}'")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
print("✅ Branch name is valid.")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Commitizen commit message checker that is .venv aware.
|
|
||||||
|
|
||||||
Works for commits with -m or commit message file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def find_cz() -> str:
|
|
||||||
"""Find Commitizen executable, preferring virtualenv."""
|
|
||||||
venv = os.getenv("VIRTUAL_ENV")
|
|
||||||
paths = []
|
|
||||||
if venv:
|
|
||||||
paths.append(Path(venv))
|
|
||||||
paths.append(Path.cwd() / ".venv")
|
|
||||||
|
|
||||||
for base in paths:
|
|
||||||
cz = base / ("Scripts" if os.name == "nt" else "bin") / ("cz.exe" if os.name == "nt" else "cz")
|
|
||||||
if cz.exists():
|
|
||||||
return str(cz)
|
|
||||||
return "cz"
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
cz = find_cz()
|
|
||||||
|
|
||||||
# 1️⃣ Try commit-msg file (interactive commit)
|
|
||||||
commit_msg_file = sys.argv[1] if len(sys.argv) > 1 else None
|
|
||||||
|
|
||||||
# 2️⃣ If not file, fallback to -m message (Git sets GIT_COMMIT_MSG in some environments, or we create a temp file)
|
|
||||||
if not commit_msg_file:
|
|
||||||
msg = os.getenv("GIT_COMMIT_MSG") or ""
|
|
||||||
if not msg:
|
|
||||||
print("⚠️ No commit message file or environment message found. Skipping Commitizen check.")
|
|
||||||
return 0
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile("w+", delete=False) as tmp:
|
|
||||||
tmp.write(msg)
|
|
||||||
tmp.flush()
|
|
||||||
commit_msg_file = tmp.name
|
|
||||||
|
|
||||||
print(f"🔍 Checking commit message using {cz}...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.check_call([cz, "check", "--commit-msg-file", commit_msg_file])
|
|
||||||
print("✅ Commit message follows Commitizen convention.")
|
|
||||||
return 0
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
print("❌ Commit message validation failed.")
|
|
||||||
return 1
|
|
||||||
finally:
|
|
||||||
# Clean up temp file if we created one
|
|
||||||
if 'tmp' in locals():
|
|
||||||
os.unlink(tmp.name)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Pre-push hook: Commitizen check for *new commits only*.
|
|
||||||
|
|
||||||
Cross-platform + virtualenv-aware:
|
|
||||||
- Prefers activated virtual environment (VIRTUAL_ENV)
|
|
||||||
- Falls back to ./.venv if found
|
|
||||||
- Falls back to global cz otherwise
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def find_cz_executable() -> str:
|
|
||||||
"""Return path to Commitizen executable, preferring virtual environments."""
|
|
||||||
# 1️⃣ Active virtual environment (if running inside one)
|
|
||||||
venv_env = os.getenv("VIRTUAL_ENV")
|
|
||||||
if venv_env:
|
|
||||||
cz_path = Path(venv_env) / ("Scripts" if os.name == "nt" else "bin") / ("cz.exe" if os.name == "nt" else "cz")
|
|
||||||
if cz_path.exists():
|
|
||||||
return str(cz_path)
|
|
||||||
|
|
||||||
# 2️⃣ Local .venv in repo root
|
|
||||||
repo_venv = Path.cwd() / ".venv"
|
|
||||||
cz_path = repo_venv / ("Scripts" if os.name == "nt" else "bin") / ("cz.exe" if os.name == "nt" else "cz")
|
|
||||||
if cz_path.exists():
|
|
||||||
return str(cz_path)
|
|
||||||
|
|
||||||
# 3️⃣ Global fallback
|
|
||||||
return "cz"
|
|
||||||
|
|
||||||
|
|
||||||
def get_merge_base() -> str | None:
|
|
||||||
"""Return merge-base between HEAD and upstream branch, or None if unavailable."""
|
|
||||||
try:
|
|
||||||
return (
|
|
||||||
subprocess.check_output(
|
|
||||||
["git", "merge-base", "@{u}", "HEAD"],
|
|
||||||
stderr=subprocess.DEVNULL,
|
|
||||||
text=True,
|
|
||||||
)
|
|
||||||
.strip()
|
|
||||||
)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> int:
|
|
||||||
cz = find_cz_executable()
|
|
||||||
base = get_merge_base()
|
|
||||||
|
|
||||||
if not base:
|
|
||||||
print("⚠️ No upstream found; skipping Commitizen check for new commits.")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
print(f"🔍 Using {cz} to check new commits from {base}..HEAD ...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.check_call([cz, "check", "--rev-range", f"{base}..HEAD"])
|
|
||||||
print("✅ All new commits follow Commitizen conventions.")
|
|
||||||
return 0
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print("❌ Commitizen check failed for one or more new commits.")
|
|
||||||
return e.returncode
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -150,7 +150,7 @@ def main():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if args.input_file:
|
if args.input_file:
|
||||||
with open(args.input_file, "r", encoding="utf-8", newline=None) as f:
|
with open(args.input_file, "r", encoding="utf8") as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
elif args.input:
|
elif args.input:
|
||||||
content = args.input
|
content = args.input
|
||||||
@@ -164,7 +164,7 @@ def main():
|
|||||||
)
|
)
|
||||||
if args.output_file:
|
if args.output_file:
|
||||||
# Write to file
|
# Write to file
|
||||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
with open(args.output_file, "w", encoding="utf8") as f:
|
||||||
f.write(extracted_content)
|
f.write(extracted_content)
|
||||||
else:
|
else:
|
||||||
# Write to std output
|
# Write to std output
|
||||||
|
|||||||
@@ -2,378 +2,135 @@
|
|||||||
"""Utility functions for Configuration specification generation."""
|
"""Utility functions for Configuration specification generation."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Type, Union
|
|
||||||
|
|
||||||
from loguru import logger
|
from akkudoktoreos.config.config import get_config
|
||||||
from pydantic.fields import ComputedFieldInfo, FieldInfo
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from pydantic_core import PydanticUndefined
|
|
||||||
|
|
||||||
from akkudoktoreos.config.config import ConfigEOS, GeneralSettings, get_config
|
logger = get_logger(__name__)
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
|
||||||
|
|
||||||
documented_types: set[PydanticBaseModel] = set()
|
config_eos = get_config()
|
||||||
undocumented_types: dict[PydanticBaseModel, tuple[str, list[str]]] = dict()
|
|
||||||
|
|
||||||
global_config_dict: dict[str, Any] = dict()
|
# Fixed set of prefixes to filter configuration values and their respective titles
|
||||||
|
CONFIG_PREFIXES = {
|
||||||
|
"battery": "Battery Device Simulation Configuration",
|
||||||
|
"bev": "Battery Electric Vehicle Device Simulation Configuration",
|
||||||
|
"dishwasher": "Dishwasher Device Simulation Configuration",
|
||||||
|
"inverter": "Inverter Device Simulation Configuration",
|
||||||
|
"measurement": "Measurement Configuration",
|
||||||
|
"optimization": "General Optimization Configuration",
|
||||||
|
"server": "Server Configuration",
|
||||||
|
"elecprice": "Electricity Price Prediction Configuration",
|
||||||
|
"load": "Load Prediction Configuration",
|
||||||
|
"logging": "Logging Configuration",
|
||||||
|
"prediction": "General Prediction Configuration",
|
||||||
|
"pvforecast": "PV Forecast Configuration",
|
||||||
|
"weather": "Weather Forecast Configuration",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Static set of configuration names to include in a separate table
|
||||||
|
GENERAL_CONFIGS = [
|
||||||
|
"config_default_file_path",
|
||||||
|
"config_file_path",
|
||||||
|
"config_folder_path",
|
||||||
|
"config_keys",
|
||||||
|
"config_keys_read_only",
|
||||||
|
"data_cache_path",
|
||||||
|
"data_cache_subpath",
|
||||||
|
"data_folder_path",
|
||||||
|
"data_output_path",
|
||||||
|
"data_output_subpath",
|
||||||
|
"latitude",
|
||||||
|
"longitude",
|
||||||
|
"package_root_path",
|
||||||
|
"timezone",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_title(config: PydanticBaseModel) -> str:
|
def generate_config_table_md(configs, title):
|
||||||
if config.__doc__ is None:
|
|
||||||
raise NameError(f"Missing docstring: {config}")
|
|
||||||
return config.__doc__.strip().splitlines()[0].strip(".")
|
|
||||||
|
|
||||||
|
|
||||||
def get_body(config: PydanticBaseModel) -> str:
|
|
||||||
if config.__doc__ is None:
|
|
||||||
raise NameError(f"Missing docstring: {config}")
|
|
||||||
return textwrap.dedent("\n".join(config.__doc__.strip().splitlines()[1:])).strip()
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_nested_types(field_type: Any, parent_types: list[str]) -> list[tuple[Any, list[str]]]:
|
|
||||||
resolved_types: list[tuple[type, list[str]]] = []
|
|
||||||
|
|
||||||
origin = getattr(field_type, "__origin__", field_type)
|
|
||||||
if origin is Union:
|
|
||||||
for arg in getattr(field_type, "__args__", []):
|
|
||||||
resolved_types.extend(resolve_nested_types(arg, parent_types))
|
|
||||||
elif origin is list:
|
|
||||||
for arg in getattr(field_type, "__args__", []):
|
|
||||||
resolved_types.extend(resolve_nested_types(arg, parent_types + ["list"]))
|
|
||||||
else:
|
|
||||||
resolved_types.append((field_type, parent_types))
|
|
||||||
|
|
||||||
return resolved_types
|
|
||||||
|
|
||||||
|
|
||||||
def get_example_or_default(field_name: str, field_info: FieldInfo, example_ix: int) -> Any:
|
|
||||||
"""Generate a default value for a field, considering constraints.
|
|
||||||
|
|
||||||
Priority:
|
|
||||||
1. field_info.examples
|
|
||||||
2. field_info.example
|
|
||||||
3. json_schema_extra['examples']
|
|
||||||
4. json_schema_extra['example']
|
|
||||||
5. field_info.default
|
|
||||||
"""
|
|
||||||
# 1. Old-style examples attribute
|
|
||||||
examples = getattr(field_info, "examples", None)
|
|
||||||
if examples is not None:
|
|
||||||
try:
|
|
||||||
return examples[example_ix]
|
|
||||||
except IndexError:
|
|
||||||
return examples[-1]
|
|
||||||
|
|
||||||
# 2. Old-style single example
|
|
||||||
example = getattr(field_info, "example", None)
|
|
||||||
if example is not None:
|
|
||||||
return example
|
|
||||||
|
|
||||||
# 3. Look into json_schema_extra (new style)
|
|
||||||
extra = getattr(field_info, "json_schema_extra", {}) or {}
|
|
||||||
|
|
||||||
examples = extra.get("examples")
|
|
||||||
if examples is not None:
|
|
||||||
try:
|
|
||||||
return examples[example_ix]
|
|
||||||
except IndexError:
|
|
||||||
return examples[-1]
|
|
||||||
|
|
||||||
example = extra.get("example")
|
|
||||||
if example is not None:
|
|
||||||
return example
|
|
||||||
|
|
||||||
# 5. Default
|
|
||||||
if getattr(field_info, "default", None) not in (None, ...):
|
|
||||||
return field_info.default
|
|
||||||
|
|
||||||
raise NotImplementedError(
|
|
||||||
f"No default or example provided for field '{field_name}': {field_info}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_model_structure_from_examples(
|
|
||||||
model_class: type[PydanticBaseModel], multiple: bool
|
|
||||||
) -> list[dict[str, Any]]:
|
|
||||||
"""Create a model instance with default or example values, respecting constraints."""
|
|
||||||
example_max_length = 1
|
|
||||||
|
|
||||||
# Get first field with examples (non-default) to get example_max_length
|
|
||||||
if multiple:
|
|
||||||
for _, field_info in model_class.model_fields.items():
|
|
||||||
if field_info.examples is not None:
|
|
||||||
example_max_length = len(field_info.examples)
|
|
||||||
break
|
|
||||||
|
|
||||||
example_data: list[dict[str, Any]] = [{} for _ in range(example_max_length)]
|
|
||||||
|
|
||||||
for field_name, field_info in model_class.model_fields.items():
|
|
||||||
if field_info.deprecated:
|
|
||||||
continue
|
|
||||||
for example_ix in range(example_max_length):
|
|
||||||
example_data[example_ix][field_name] = get_example_or_default(
|
|
||||||
field_name, field_info, example_ix
|
|
||||||
)
|
|
||||||
return example_data
|
|
||||||
|
|
||||||
|
|
||||||
def create_model_from_examples(
|
|
||||||
model_class: PydanticBaseModel, multiple: bool
|
|
||||||
) -> list[PydanticBaseModel]:
|
|
||||||
"""Create a model instance with default or example values, respecting constraints."""
|
|
||||||
return [
|
|
||||||
model_class(**data) for data in get_model_structure_from_examples(model_class, multiple)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def build_nested_structure(keys: list[str], value: Any) -> Any:
|
|
||||||
if not keys:
|
|
||||||
return value
|
|
||||||
|
|
||||||
current_key = keys[0]
|
|
||||||
if current_key == "list":
|
|
||||||
return [build_nested_structure(keys[1:], value)]
|
|
||||||
else:
|
|
||||||
return {current_key: build_nested_structure(keys[1:], value)}
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_value(field_info: Union[FieldInfo, ComputedFieldInfo], regular_field: bool) -> Any:
|
|
||||||
default_value = ""
|
|
||||||
if regular_field:
|
|
||||||
if (val := field_info.default) is not PydanticUndefined:
|
|
||||||
default_value = val
|
|
||||||
else:
|
|
||||||
default_value = "required"
|
|
||||||
else:
|
|
||||||
default_value = "N/A"
|
|
||||||
return default_value
|
|
||||||
|
|
||||||
|
|
||||||
def get_type_name(field_type: type) -> str:
|
|
||||||
type_name = str(field_type).replace("typing.", "").replace("pathlib._local", "pathlib")
|
|
||||||
if type_name.startswith("<class"):
|
|
||||||
type_name = field_type.__name__
|
|
||||||
return type_name
|
|
||||||
|
|
||||||
|
|
||||||
def generate_config_table_md(
|
|
||||||
config: PydanticBaseModel,
|
|
||||||
toplevel_keys: list[str],
|
|
||||||
prefix: str,
|
|
||||||
toplevel: bool = False,
|
|
||||||
extra_config: bool = False,
|
|
||||||
) -> str:
|
|
||||||
"""Generate a markdown table for given configurations.
|
"""Generate a markdown table for given configurations.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
config (PydanticBaseModel): PydanticBaseModel configuration definition.
|
configs (dict): Configuration values with keys and their descriptions.
|
||||||
prefix (str): Prefix for table entries.
|
title (str): Title for the table.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The markdown table as a string.
|
str: The markdown table as a string.
|
||||||
"""
|
"""
|
||||||
table = ""
|
if not configs:
|
||||||
if toplevel:
|
return ""
|
||||||
title = get_title(config)
|
|
||||||
|
|
||||||
heading_level = "###" if extra_config else "##"
|
|
||||||
env_header = ""
|
|
||||||
env_header_underline = ""
|
|
||||||
env_width = ""
|
|
||||||
if not extra_config:
|
|
||||||
env_header = "| Environment Variable "
|
|
||||||
env_header_underline = "| -------------------- "
|
|
||||||
env_width = "20 "
|
|
||||||
|
|
||||||
table += f"{heading_level} {title}\n\n"
|
|
||||||
|
|
||||||
body = get_body(config)
|
|
||||||
if body:
|
|
||||||
table += body
|
|
||||||
table += "\n\n"
|
|
||||||
|
|
||||||
table += (
|
|
||||||
":::{table} "
|
|
||||||
+ f"{'::'.join(toplevel_keys)}\n:widths: 10 {env_width}10 5 5 30\n:align: left\n\n"
|
|
||||||
)
|
|
||||||
table += f"| Name {env_header}| Type | Read-Only | Default | Description |\n"
|
|
||||||
table += f"| ---- {env_header_underline}| ---- | --------- | ------- | ----------- |\n"
|
|
||||||
|
|
||||||
for field_name, field_info in list(config.model_fields.items()) + list(
|
|
||||||
config.model_computed_fields.items()
|
|
||||||
):
|
|
||||||
regular_field = isinstance(field_info, FieldInfo)
|
|
||||||
|
|
||||||
config_name = field_name if extra_config else field_name.upper()
|
|
||||||
field_type = field_info.annotation if regular_field else field_info.return_type
|
|
||||||
default_value = get_default_value(field_info, regular_field)
|
|
||||||
description = field_info.description if field_info.description else "-"
|
|
||||||
deprecated = field_info.deprecated if field_info.deprecated else None
|
|
||||||
read_only = "rw" if regular_field else "ro"
|
|
||||||
type_name = get_type_name(field_type)
|
|
||||||
|
|
||||||
env_entry = ""
|
|
||||||
if not extra_config:
|
|
||||||
if regular_field:
|
|
||||||
env_entry = f"| `{prefix}{config_name}` "
|
|
||||||
else:
|
|
||||||
env_entry = "| "
|
|
||||||
if deprecated:
|
|
||||||
if isinstance(deprecated, bool):
|
|
||||||
description = "Deprecated!"
|
|
||||||
else:
|
|
||||||
description = deprecated
|
|
||||||
table += f"| {field_name} {env_entry}| `{type_name}` | `{read_only}` | `{default_value}` | {description} |\n"
|
|
||||||
|
|
||||||
# inner_types: dict[type[PydanticBaseModel], tuple[str, list[str]]] = dict()
|
|
||||||
inner_types: dict[Any, tuple[str, list[str]]] = dict()
|
|
||||||
|
|
||||||
def extract_nested_models(subtype: Any, subprefix: str, parent_types: list[str]):
|
|
||||||
"""Extract nested models."""
|
|
||||||
if subtype in inner_types.keys():
|
|
||||||
return
|
|
||||||
nested_types = resolve_nested_types(subtype, [])
|
|
||||||
for nested_type, nested_parent_types in nested_types:
|
|
||||||
# Nested type may be of type class, enum, typing.Any
|
|
||||||
if isinstance(nested_type, type) and issubclass(nested_type, PydanticBaseModel):
|
|
||||||
# Nested type is a subclass of PydanticBaseModel
|
|
||||||
new_parent_types = parent_types + nested_parent_types
|
|
||||||
if "list" in parent_types:
|
|
||||||
new_prefix = ""
|
|
||||||
else:
|
|
||||||
new_prefix = f"{subprefix}"
|
|
||||||
inner_types.setdefault(nested_type, (new_prefix, new_parent_types))
|
|
||||||
|
|
||||||
# Handle normal fields
|
|
||||||
for nested_field_name, nested_field_info in nested_type.model_fields.items():
|
|
||||||
nested_field_type = nested_field_info.annotation
|
|
||||||
if new_prefix:
|
|
||||||
new_prefix += f"{nested_field_name.upper()}__"
|
|
||||||
extract_nested_models(
|
|
||||||
nested_field_type,
|
|
||||||
new_prefix,
|
|
||||||
new_parent_types + [nested_field_name],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Do not extract computed fields
|
|
||||||
|
|
||||||
extract_nested_models(field_type, f"{prefix}{config_name}__", toplevel_keys + [field_name])
|
|
||||||
|
|
||||||
for new_type, info in inner_types.items():
|
|
||||||
if new_type not in documented_types:
|
|
||||||
undocumented_types.setdefault(new_type, (info[0], info[1]))
|
|
||||||
|
|
||||||
if toplevel:
|
|
||||||
table += ":::\n\n" # Add an empty line after the table
|
|
||||||
|
|
||||||
has_examples_list = toplevel_keys[-1] == "list"
|
|
||||||
instance_list = create_model_from_examples(config, has_examples_list)
|
|
||||||
if instance_list:
|
|
||||||
ins_dict_list = []
|
|
||||||
ins_out_dict_list = []
|
|
||||||
for ins in instance_list:
|
|
||||||
# Transform to JSON (and manually to dict) to use custom serializers and then merge with parent keys
|
|
||||||
ins_json = ins.model_dump_json(include_computed_fields=False)
|
|
||||||
ins_dict_list.append(json.loads(ins_json))
|
|
||||||
|
|
||||||
ins_out_json = ins.model_dump_json(include_computed_fields=True)
|
|
||||||
ins_out_dict_list.append(json.loads(ins_out_json))
|
|
||||||
|
|
||||||
same_output = ins_out_dict_list == ins_dict_list
|
|
||||||
same_output_str = "/Output" if same_output else ""
|
|
||||||
|
|
||||||
table += f"#{heading_level} Example Input{same_output_str}\n\n"
|
|
||||||
table += "```{eval-rst}\n"
|
|
||||||
table += ".. code-block:: json\n\n"
|
|
||||||
if has_examples_list:
|
|
||||||
input_dict = build_nested_structure(toplevel_keys[:-1], ins_dict_list)
|
|
||||||
if not extra_config:
|
|
||||||
global_config_dict[toplevel_keys[0]] = ins_dict_list
|
|
||||||
else:
|
|
||||||
input_dict = build_nested_structure(toplevel_keys, ins_dict_list[0])
|
|
||||||
if not extra_config:
|
|
||||||
global_config_dict[toplevel_keys[0]] = ins_dict_list[0]
|
|
||||||
table += textwrap.indent(json.dumps(input_dict, indent=4), " ")
|
|
||||||
table += "\n"
|
|
||||||
table += "```\n\n"
|
|
||||||
|
|
||||||
if not same_output:
|
|
||||||
table += f"#{heading_level} Example Output\n\n"
|
|
||||||
table += "```{eval-rst}\n"
|
|
||||||
table += ".. code-block:: json\n\n"
|
|
||||||
if has_examples_list:
|
|
||||||
output_dict = build_nested_structure(toplevel_keys[:-1], ins_out_dict_list)
|
|
||||||
else:
|
|
||||||
output_dict = build_nested_structure(toplevel_keys, ins_out_dict_list[0])
|
|
||||||
table += textwrap.indent(json.dumps(output_dict, indent=4), " ")
|
|
||||||
table += "\n"
|
|
||||||
table += "```\n\n"
|
|
||||||
|
|
||||||
while undocumented_types:
|
|
||||||
extra_config_type, extra_info = undocumented_types.popitem()
|
|
||||||
documented_types.add(extra_config_type)
|
|
||||||
table += generate_config_table_md(
|
|
||||||
extra_config_type, extra_info[1], extra_info[0], True, True
|
|
||||||
)
|
|
||||||
|
|
||||||
|
table = f"## {title}\n\n"
|
||||||
|
table += ":::{table} " + f"{title}\n:widths: 10 10 5 5 30\n:align: left\n\n"
|
||||||
|
table += "| Name | Type | Read-Only | Default | Description |\n"
|
||||||
|
table += "| ---- | ---- | --------- | ------- | ----------- |\n"
|
||||||
|
for name, config in sorted(configs.items()):
|
||||||
|
type_name = config["type"]
|
||||||
|
if type_name.startswith("typing."):
|
||||||
|
type_name = type_name[len("typing.") :]
|
||||||
|
table += f"| `{config['name']}` | `{type_name}` | `{config['read-only']}` | `{config['default']}` | {config['description']} |\n"
|
||||||
|
table += ":::\n\n" # Add an empty line after the table
|
||||||
return table
|
return table
|
||||||
|
|
||||||
|
|
||||||
def generate_config_md(config_eos: ConfigEOS) -> str:
|
def generate_config_md() -> str:
|
||||||
"""Generate configuration specification in Markdown with extra tables for prefixed values.
|
"""Generate configuration specification in Markdown with extra tables for prefixed values.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The Markdown representation of the configuration spec.
|
str: The Markdown representation of the configuration spec.
|
||||||
"""
|
"""
|
||||||
# Fix file path for general settings to not show local/test file path
|
configs = {}
|
||||||
GeneralSettings._config_file_path = Path(
|
config_keys = config_eos.config_keys
|
||||||
"/home/user/.config/net.akkudoktoreos.net/EOS.config.json"
|
config_keys_read_only = config_eos.config_keys_read_only
|
||||||
)
|
for config_key in config_keys:
|
||||||
GeneralSettings._config_folder_path = config_eos.general.config_file_path.parent
|
config = {}
|
||||||
|
config["name"] = config_key
|
||||||
|
config["value"] = getattr(config_eos, config_key)
|
||||||
|
|
||||||
|
if config_key in config_keys_read_only:
|
||||||
|
config["read-only"] = "ro"
|
||||||
|
computed_field_info = config_eos.__pydantic_decorators__.computed_fields[
|
||||||
|
config_key
|
||||||
|
].info
|
||||||
|
config["default"] = "N/A"
|
||||||
|
config["description"] = computed_field_info.description
|
||||||
|
config["type"] = str(computed_field_info.return_type)
|
||||||
|
else:
|
||||||
|
config["read-only"] = "rw"
|
||||||
|
field_info = config_eos.model_fields[config_key]
|
||||||
|
config["default"] = field_info.default
|
||||||
|
config["description"] = field_info.description
|
||||||
|
config["type"] = str(field_info.annotation)
|
||||||
|
|
||||||
|
configs[config_key] = config
|
||||||
|
|
||||||
|
# Generate markdown for the main table
|
||||||
markdown = "# Configuration Table\n\n"
|
markdown = "# Configuration Table\n\n"
|
||||||
|
|
||||||
# Generate tables for each top level config
|
# Generate table for general configuration names
|
||||||
for field_name, field_info in config_eos.__class__.model_fields.items():
|
general_configs = {k: v for k, v in configs.items() if k in GENERAL_CONFIGS}
|
||||||
field_type = field_info.annotation
|
for k in general_configs.keys():
|
||||||
markdown += generate_config_table_md(
|
del configs[k] # Remove general configs from the main configs dictionary
|
||||||
field_type, [field_name], f"EOS_{field_name.upper()}__", True
|
markdown += generate_config_table_md(general_configs, "General Configuration Values")
|
||||||
)
|
|
||||||
|
|
||||||
# Full config
|
non_prefixed_configs = {k: v for k, v in configs.items()}
|
||||||
markdown += "## Full example Config\n\n"
|
|
||||||
markdown += "```{eval-rst}\n"
|
|
||||||
markdown += ".. code-block:: json\n\n"
|
|
||||||
# Test for valid config first
|
|
||||||
config_eos.merge_settings_from_dict(global_config_dict)
|
|
||||||
markdown += textwrap.indent(json.dumps(global_config_dict, indent=4), " ")
|
|
||||||
markdown += "\n"
|
|
||||||
markdown += "```\n\n"
|
|
||||||
|
|
||||||
# Assure there is no double \n at end of file
|
# Generate tables for each prefix (sorted by value) and remove prefixed configs from the main dictionary
|
||||||
|
sorted_prefixes = sorted(CONFIG_PREFIXES.items(), key=lambda item: item[1])
|
||||||
|
for prefix, title in sorted_prefixes:
|
||||||
|
prefixed_configs = {k: v for k, v in configs.items() if k.startswith(prefix)}
|
||||||
|
for k in prefixed_configs.keys():
|
||||||
|
del non_prefixed_configs[k]
|
||||||
|
markdown += generate_config_table_md(prefixed_configs, title)
|
||||||
|
|
||||||
|
# Generate markdown for the remaining non-prefixed configs if any
|
||||||
|
if non_prefixed_configs:
|
||||||
|
markdown += generate_config_table_md(non_prefixed_configs, "Other Configuration Values")
|
||||||
|
|
||||||
|
# Assure the is no double \n at end of file
|
||||||
markdown = markdown.rstrip("\n")
|
markdown = markdown.rstrip("\n")
|
||||||
markdown += "\n"
|
markdown += "\n"
|
||||||
|
|
||||||
# Assure log path does not leak to documentation
|
|
||||||
markdown = re.sub(
|
|
||||||
r'(?<=["\'])/[^"\']*/output/eos\.log(?=["\'])',
|
|
||||||
'/home/user/.local/share/net.akkudoktoreos.net/output/eos.log',
|
|
||||||
markdown
|
|
||||||
)
|
|
||||||
|
|
||||||
# Assure timezone name does not leak to documentation
|
|
||||||
tz_name = to_datetime().timezone_name
|
|
||||||
markdown = re.sub(re.escape(tz_name), "Europe/Berlin", markdown, flags=re.IGNORECASE)
|
|
||||||
# Also replace UTC, as GitHub CI always is on UTC
|
|
||||||
markdown = re.sub(re.escape("UTC"), "Europe/Berlin", markdown, flags=re.IGNORECASE)
|
|
||||||
|
|
||||||
|
|
||||||
return markdown
|
return markdown
|
||||||
|
|
||||||
|
|
||||||
@@ -388,15 +145,12 @@ def main():
|
|||||||
)
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
config_eos = get_config()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config_md = generate_config_md(config_eos)
|
config_md = generate_config_md()
|
||||||
if os.name == "nt":
|
|
||||||
config_md = config_md.replace("\\\\", "/")
|
|
||||||
if args.output_file:
|
if args.output_file:
|
||||||
# Write to file
|
# Write to file
|
||||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
with open(args.output_file, "w", encoding="utf8") as f:
|
||||||
f.write(config_md)
|
f.write(config_md)
|
||||||
else:
|
else:
|
||||||
# Write to std output
|
# Write to std output
|
||||||
@@ -404,8 +158,7 @@ def main():
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error during Configuration Specification generation: {e}", file=sys.stderr)
|
print(f"Error during Configuration Specification generation: {e}", file=sys.stderr)
|
||||||
# keep throwing error to debug potential problems (e.g. invalid examples)
|
sys.exit(1)
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ Example:
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from fastapi.openapi.utils import get_openapi
|
from fastapi.openapi.utils import get_openapi
|
||||||
@@ -38,14 +37,6 @@ def generate_openapi() -> dict:
|
|||||||
routes=app.routes,
|
routes=app.routes,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Fix file path for general settings to not show local/test file path
|
|
||||||
general = openapi_spec["components"]["schemas"]["ConfigEOS"]["properties"]["general"]["default"]
|
|
||||||
general["config_file_path"] = "/home/user/.config/net.akkudoktoreos.net/EOS.config.json"
|
|
||||||
general["config_folder_path"] = "/home/user/.config/net.akkudoktoreos.net"
|
|
||||||
# Fix file path for logging settings to not show local/test file path
|
|
||||||
logging = openapi_spec["components"]["schemas"]["ConfigEOS"]["properties"]["logging"]["default"]
|
|
||||||
logging["file_path"] = "/home/user/.local/share/net.akkudoktoreos.net/output/eos.log"
|
|
||||||
|
|
||||||
return openapi_spec
|
return openapi_spec
|
||||||
|
|
||||||
|
|
||||||
@@ -63,7 +54,7 @@ def main():
|
|||||||
openapi_spec_str = json.dumps(openapi_spec, indent=2)
|
openapi_spec_str = json.dumps(openapi_spec, indent=2)
|
||||||
if args.output_file:
|
if args.output_file:
|
||||||
# Write to file
|
# Write to file
|
||||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
with open(args.output_file, "w", encoding="utf8") as f:
|
||||||
f.write(openapi_spec_str)
|
f.write(openapi_spec_str)
|
||||||
else:
|
else:
|
||||||
# Write to std output
|
# Write to std output
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import git
|
import git
|
||||||
@@ -285,11 +284,9 @@ def main():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
openapi_md = generate_openapi_md()
|
openapi_md = generate_openapi_md()
|
||||||
if os.name == "nt":
|
|
||||||
openapi_md = openapi_md.replace("127.0.0.1", "127.0.0.1")
|
|
||||||
if args.output_file:
|
if args.output_file:
|
||||||
# Write to file
|
# Write to file
|
||||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
with open(args.output_file, "w", encoding="utf8") as f:
|
||||||
f.write(openapi_md)
|
f.write(openapi_md)
|
||||||
else:
|
else:
|
||||||
# Write to std output
|
# Write to std output
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
# Placeholder for gitlint user rules (see https://jorisroovers.com/gitlint/latest/rules/user_defined_rules/).
|
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
|
||||||
import cProfile
|
import cProfile
|
||||||
import json
|
import json
|
||||||
import pstats
|
import pstats
|
||||||
@@ -10,120 +9,68 @@ import time
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
from akkudoktoreos.config.config import get_config
|
from akkudoktoreos.config.config import get_config
|
||||||
from akkudoktoreos.core.ems import get_ems
|
from akkudoktoreos.core.ems import get_ems
|
||||||
from akkudoktoreos.core.emsettings import EnergyManagementMode
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.optimization.genetic.geneticparams import (
|
from akkudoktoreos.optimization.genetic import (
|
||||||
GeneticOptimizationParameters,
|
OptimizationParameters,
|
||||||
|
optimization_problem,
|
||||||
)
|
)
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
from akkudoktoreos.prediction.prediction import get_prediction
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
|
||||||
|
|
||||||
config_eos = get_config()
|
get_logger(__name__, logging_level="DEBUG")
|
||||||
prediction_eos = get_prediction()
|
|
||||||
ems_eos = get_ems()
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_optimization_real_parameters() -> GeneticOptimizationParameters:
|
def prepare_optimization_real_parameters() -> OptimizationParameters:
|
||||||
"""Prepare and return optimization parameters with real world data.
|
"""Prepare and return optimization parameters with real world data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
GeneticOptimizationParameters: Configured optimization parameters
|
OptimizationParameters: Configured optimization parameters
|
||||||
"""
|
"""
|
||||||
# Make a config
|
# Make a config
|
||||||
settings = {
|
settings = {
|
||||||
"general": {
|
# -- General --
|
||||||
"latitude": 52.52,
|
"prediction_hours": 48,
|
||||||
"longitude": 13.405,
|
"prediction_historic_hours": 24,
|
||||||
},
|
"latitude": 52.52,
|
||||||
"prediction": {
|
"longitude": 13.405,
|
||||||
"hours": 48,
|
# -- Predictions --
|
||||||
"historic_hours": 24,
|
|
||||||
},
|
|
||||||
"optimization": {
|
|
||||||
"horizon_hours": 24,
|
|
||||||
"interval": 3600,
|
|
||||||
"genetic": {
|
|
||||||
"individuals": 300,
|
|
||||||
"generations": 400,
|
|
||||||
"seed": None,
|
|
||||||
"penalties": {
|
|
||||||
"ev_soc_miss": 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
# PV Forecast
|
# PV Forecast
|
||||||
"pvforecast": {
|
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||||
"provider": "PVForecastAkkudoktor",
|
"pvforecast0_peakpower": 5.0,
|
||||||
"planes": [
|
"pvforecast0_surface_azimuth": -10,
|
||||||
{
|
"pvforecast0_surface_tilt": 7,
|
||||||
"peakpower": 5.0,
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
"surface_azimuth": -10,
|
"pvforecast0_inverter_paco": 10000,
|
||||||
"surface_tilt": 7,
|
"pvforecast1_peakpower": 4.8,
|
||||||
"userhorizon": [20, 27, 22, 20],
|
"pvforecast1_surface_azimuth": -90,
|
||||||
"inverter_paco": 10000,
|
"pvforecast1_surface_tilt": 7,
|
||||||
},
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
{
|
"pvforecast1_inverter_paco": 10000,
|
||||||
"peakpower": 4.8,
|
"pvforecast2_peakpower": 1.4,
|
||||||
"surface_azimuth": -90,
|
"pvforecast2_surface_azimuth": -40,
|
||||||
"surface_tilt": 7,
|
"pvforecast2_surface_tilt": 60,
|
||||||
"userhorizon": [30, 30, 30, 50],
|
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||||
"inverter_paco": 10000,
|
"pvforecast2_inverter_paco": 2000,
|
||||||
},
|
"pvforecast3_peakpower": 1.6,
|
||||||
{
|
"pvforecast3_surface_azimuth": 5,
|
||||||
"peakpower": 1.4,
|
"pvforecast3_surface_tilt": 45,
|
||||||
"surface_azimuth": -40,
|
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||||
"surface_tilt": 60,
|
"pvforecast3_inverter_paco": 1400,
|
||||||
"userhorizon": [60, 30, 0, 30],
|
"pvforecast4_peakpower": None,
|
||||||
"inverter_paco": 2000,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"peakpower": 1.6,
|
|
||||||
"surface_azimuth": 5,
|
|
||||||
"surface_tilt": 45,
|
|
||||||
"userhorizon": [45, 25, 30, 60],
|
|
||||||
"inverter_paco": 1400,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
# Weather Forecast
|
# Weather Forecast
|
||||||
"weather": {
|
"weather_provider": "ClearOutside",
|
||||||
"provider": "ClearOutside",
|
|
||||||
},
|
|
||||||
# Electricity Price Forecast
|
# Electricity Price Forecast
|
||||||
"elecprice": {
|
"elecprice_provider": "ElecPriceAkkudoktor",
|
||||||
"provider": "ElecPriceAkkudoktor",
|
|
||||||
},
|
|
||||||
# Load Forecast
|
# Load Forecast
|
||||||
"load": {
|
"load_provider": "LoadAkkudoktor",
|
||||||
"provider": "LoadAkkudoktor",
|
"loadakkudoktor_year_energy": 5000, # Energy consumption per year in kWh
|
||||||
"provider_settings": {
|
|
||||||
"LoadAkkudoktor": {
|
|
||||||
"loadakkudoktor_year_energy_kwh": 5000, # Energy consumption per year in kWh
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
# -- Simulations --
|
# -- Simulations --
|
||||||
# Assure we have charge rates for the EV
|
|
||||||
"devices": {
|
|
||||||
"max_electric_vehicles": 1,
|
|
||||||
"electric_vehicles": [
|
|
||||||
{
|
|
||||||
"charge_rates": [
|
|
||||||
0.0,
|
|
||||||
6.0 / 16.0,
|
|
||||||
8.0 / 16.0,
|
|
||||||
10.0 / 16.0,
|
|
||||||
12.0 / 16.0,
|
|
||||||
14.0 / 16.0,
|
|
||||||
1.0,
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
config_eos = get_config()
|
||||||
|
prediction_eos = get_prediction()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
# Update/ set configuration
|
# Update/ set configuration
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
@@ -131,14 +78,14 @@ def prepare_optimization_real_parameters() -> GeneticOptimizationParameters:
|
|||||||
# Get current prediction data for optimization run
|
# Get current prediction data for optimization run
|
||||||
ems_eos.set_start_datetime()
|
ems_eos.set_start_datetime()
|
||||||
print(
|
print(
|
||||||
f"Real data prediction from {prediction_eos.ems_start_datetime} to {prediction_eos.end_datetime}"
|
f"Real data prediction from {prediction_eos.start_datetime} to {prediction_eos.end_datetime}"
|
||||||
)
|
)
|
||||||
prediction_eos.update_data()
|
prediction_eos.update_data()
|
||||||
|
|
||||||
# PV Forecast (in W)
|
# PV Forecast (in W)
|
||||||
pv_forecast = prediction_eos.key_to_array(
|
pv_forecast = prediction_eos.key_to_array(
|
||||||
key="pvforecast_ac_power",
|
key="pvforecast_ac_power",
|
||||||
start_datetime=prediction_eos.ems_start_datetime,
|
start_datetime=prediction_eos.start_datetime,
|
||||||
end_datetime=prediction_eos.end_datetime,
|
end_datetime=prediction_eos.end_datetime,
|
||||||
)
|
)
|
||||||
print(f"pv_forecast: {pv_forecast}")
|
print(f"pv_forecast: {pv_forecast}")
|
||||||
@@ -146,7 +93,7 @@ def prepare_optimization_real_parameters() -> GeneticOptimizationParameters:
|
|||||||
# Temperature Forecast (in degree C)
|
# Temperature Forecast (in degree C)
|
||||||
temperature_forecast = prediction_eos.key_to_array(
|
temperature_forecast = prediction_eos.key_to_array(
|
||||||
key="weather_temp_air",
|
key="weather_temp_air",
|
||||||
start_datetime=prediction_eos.ems_start_datetime,
|
start_datetime=prediction_eos.start_datetime,
|
||||||
end_datetime=prediction_eos.end_datetime,
|
end_datetime=prediction_eos.end_datetime,
|
||||||
)
|
)
|
||||||
print(f"temperature_forecast: {temperature_forecast}")
|
print(f"temperature_forecast: {temperature_forecast}")
|
||||||
@@ -154,7 +101,7 @@ def prepare_optimization_real_parameters() -> GeneticOptimizationParameters:
|
|||||||
# Electricity Price (in Euro per Wh)
|
# Electricity Price (in Euro per Wh)
|
||||||
strompreis_euro_pro_wh = prediction_eos.key_to_array(
|
strompreis_euro_pro_wh = prediction_eos.key_to_array(
|
||||||
key="elecprice_marketprice_wh",
|
key="elecprice_marketprice_wh",
|
||||||
start_datetime=prediction_eos.ems_start_datetime,
|
start_datetime=prediction_eos.start_datetime,
|
||||||
end_datetime=prediction_eos.end_datetime,
|
end_datetime=prediction_eos.end_datetime,
|
||||||
)
|
)
|
||||||
print(f"strompreis_euro_pro_wh: {strompreis_euro_pro_wh}")
|
print(f"strompreis_euro_pro_wh: {strompreis_euro_pro_wh}")
|
||||||
@@ -162,7 +109,7 @@ def prepare_optimization_real_parameters() -> GeneticOptimizationParameters:
|
|||||||
# Overall System Load (in W)
|
# Overall System Load (in W)
|
||||||
gesamtlast = prediction_eos.key_to_array(
|
gesamtlast = prediction_eos.key_to_array(
|
||||||
key="load_mean",
|
key="load_mean",
|
||||||
start_datetime=prediction_eos.ems_start_datetime,
|
start_datetime=prediction_eos.start_datetime,
|
||||||
end_datetime=prediction_eos.end_datetime,
|
end_datetime=prediction_eos.end_datetime,
|
||||||
)
|
)
|
||||||
print(f"gesamtlast: {gesamtlast}")
|
print(f"gesamtlast: {gesamtlast}")
|
||||||
@@ -172,7 +119,7 @@ def prepare_optimization_real_parameters() -> GeneticOptimizationParameters:
|
|||||||
print(f"start_solution: {start_solution}")
|
print(f"start_solution: {start_solution}")
|
||||||
|
|
||||||
# Define parameters for the optimization problem
|
# Define parameters for the optimization problem
|
||||||
return GeneticOptimizationParameters(
|
return OptimizationParameters(
|
||||||
**{
|
**{
|
||||||
"ems": {
|
"ems": {
|
||||||
"preis_euro_pro_wh_akku": 0e-05,
|
"preis_euro_pro_wh_akku": 0e-05,
|
||||||
@@ -182,73 +129,32 @@ def prepare_optimization_real_parameters() -> GeneticOptimizationParameters:
|
|||||||
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
||||||
},
|
},
|
||||||
"pv_akku": {
|
"pv_akku": {
|
||||||
"device_id": "battery 1",
|
|
||||||
"capacity_wh": 26400,
|
"capacity_wh": 26400,
|
||||||
"initial_soc_percentage": 15,
|
"initial_soc_percentage": 15,
|
||||||
"min_soc_percentage": 15,
|
"min_soc_percentage": 15,
|
||||||
},
|
},
|
||||||
"inverter": {
|
|
||||||
"device_id": "inverter 1",
|
|
||||||
"max_power_wh": 10000,
|
|
||||||
"battery_id": "battery 1",
|
|
||||||
},
|
|
||||||
"eauto": {
|
"eauto": {
|
||||||
"device_id": "electric vehicle 1",
|
|
||||||
"min_soc_percentage": 50,
|
"min_soc_percentage": 50,
|
||||||
"capacity_wh": 60000,
|
"capacity_wh": 60000,
|
||||||
"charging_efficiency": 0.95,
|
"charging_efficiency": 0.95,
|
||||||
"max_charge_power_w": 11040,
|
"max_charge_power_w": 11040,
|
||||||
"initial_soc_percentage": 5,
|
"initial_soc_percentage": 5,
|
||||||
},
|
},
|
||||||
|
"inverter": {
|
||||||
|
"max_power_wh": 10000,
|
||||||
|
},
|
||||||
"temperature_forecast": temperature_forecast,
|
"temperature_forecast": temperature_forecast,
|
||||||
"start_solution": start_solution,
|
"start_solution": start_solution,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def prepare_optimization_parameters() -> GeneticOptimizationParameters:
|
def prepare_optimization_parameters() -> OptimizationParameters:
|
||||||
"""Prepare and return optimization parameters with predefined data.
|
"""Prepare and return optimization parameters with predefined data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
GeneticOptimizationParameters: Configured optimization parameters
|
OptimizationParameters: Configured optimization parameters
|
||||||
"""
|
"""
|
||||||
# Initialize the optimization problem using the default configuration
|
|
||||||
config_eos.merge_settings_from_dict(
|
|
||||||
{
|
|
||||||
"prediction": {"hours": 48},
|
|
||||||
"optimization": {
|
|
||||||
"horizon_hours": 48,
|
|
||||||
"interval": 3600,
|
|
||||||
"genetic": {
|
|
||||||
"individuals": 300,
|
|
||||||
"generations": 400,
|
|
||||||
"seed": None,
|
|
||||||
"penalties": {
|
|
||||||
"ev_soc_miss": 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
# Assure we have charge rates for the EV
|
|
||||||
"devices": {
|
|
||||||
"max_electric_vehicles": 1,
|
|
||||||
"electric_vehicles": [
|
|
||||||
{
|
|
||||||
"device_id": "Default EV",
|
|
||||||
"charge_rates": [
|
|
||||||
0.0,
|
|
||||||
6.0 / 16.0,
|
|
||||||
8.0 / 16.0,
|
|
||||||
10.0 / 16.0,
|
|
||||||
12.0 / 16.0,
|
|
||||||
14.0 / 16.0,
|
|
||||||
1.0,
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# PV Forecast (in W)
|
# PV Forecast (in W)
|
||||||
pv_forecast = np.zeros(48)
|
pv_forecast = np.zeros(48)
|
||||||
pv_forecast[12] = 5000
|
pv_forecast[12] = 5000
|
||||||
@@ -367,7 +273,7 @@ def prepare_optimization_parameters() -> GeneticOptimizationParameters:
|
|||||||
start_solution = None
|
start_solution = None
|
||||||
|
|
||||||
# Define parameters for the optimization problem
|
# Define parameters for the optimization problem
|
||||||
return GeneticOptimizationParameters(
|
return OptimizationParameters(
|
||||||
**{
|
**{
|
||||||
"ems": {
|
"ems": {
|
||||||
"preis_euro_pro_wh_akku": 0e-05,
|
"preis_euro_pro_wh_akku": 0e-05,
|
||||||
@@ -377,24 +283,20 @@ def prepare_optimization_parameters() -> GeneticOptimizationParameters:
|
|||||||
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
||||||
},
|
},
|
||||||
"pv_akku": {
|
"pv_akku": {
|
||||||
"device_id": "battery 1",
|
|
||||||
"capacity_wh": 26400,
|
"capacity_wh": 26400,
|
||||||
"initial_soc_percentage": 15,
|
"initial_soc_percentage": 15,
|
||||||
"min_soc_percentage": 15,
|
"min_soc_percentage": 15,
|
||||||
},
|
},
|
||||||
"inverter": {
|
|
||||||
"device_id": "inverter 1",
|
|
||||||
"max_power_wh": 10000,
|
|
||||||
"battery_id": "battery 1",
|
|
||||||
},
|
|
||||||
"eauto": {
|
"eauto": {
|
||||||
"device_id": "electric vehicle 1",
|
|
||||||
"min_soc_percentage": 50,
|
"min_soc_percentage": 50,
|
||||||
"capacity_wh": 60000,
|
"capacity_wh": 60000,
|
||||||
"charging_efficiency": 0.95,
|
"charging_efficiency": 0.95,
|
||||||
"max_charge_power_w": 11040,
|
"max_charge_power_w": 11040,
|
||||||
"initial_soc_percentage": 5,
|
"initial_soc_percentage": 5,
|
||||||
},
|
},
|
||||||
|
"inverter": {
|
||||||
|
"max_power_wh": 10000,
|
||||||
|
},
|
||||||
"temperature_forecast": temperature_forecast,
|
"temperature_forecast": temperature_forecast,
|
||||||
"start_solution": start_solution,
|
"start_solution": start_solution,
|
||||||
}
|
}
|
||||||
@@ -416,33 +318,25 @@ def run_optimization(
|
|||||||
# Prepare parameters
|
# Prepare parameters
|
||||||
if parameters_file:
|
if parameters_file:
|
||||||
with open(parameters_file, "r") as f:
|
with open(parameters_file, "r") as f:
|
||||||
parameters = GeneticOptimizationParameters(**json.load(f))
|
parameters = OptimizationParameters(**json.load(f))
|
||||||
elif real_world:
|
elif real_world:
|
||||||
parameters = prepare_optimization_real_parameters()
|
parameters = prepare_optimization_real_parameters()
|
||||||
else:
|
else:
|
||||||
parameters = prepare_optimization_parameters()
|
parameters = prepare_optimization_parameters()
|
||||||
logger.info("Optimization Parameters:")
|
|
||||||
logger.info(parameters.model_dump_json(indent=4))
|
|
||||||
|
|
||||||
if start_hour is None:
|
if verbose:
|
||||||
start_datetime = None
|
print("\nOptimization Parameters:")
|
||||||
else:
|
print(parameters.model_dump_json(indent=4))
|
||||||
start_datetime = to_datetime().set(hour=start_hour)
|
|
||||||
|
|
||||||
asyncio.run(
|
# Initialize the optimization problem using the default configuration
|
||||||
ems_eos.run(
|
config_eos = get_config()
|
||||||
start_datetime=start_datetime,
|
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 48})
|
||||||
mode=EnergyManagementMode.OPTIMIZATION,
|
opt_class = optimization_problem(verbose=verbose, fixed_seed=seed)
|
||||||
genetic_parameters=parameters,
|
|
||||||
genetic_individuals=ngen,
|
|
||||||
genetic_seed=seed,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
solution = ems_eos.genetic_solution()
|
# Perform the optimisation based on the provided parameters and start hour
|
||||||
if solution is None:
|
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour, ngen=ngen)
|
||||||
return None
|
|
||||||
return solution.model_dump_json()
|
return result.model_dump_json()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -16,47 +16,32 @@ prediction_eos = get_prediction()
|
|||||||
def config_pvforecast() -> dict:
|
def config_pvforecast() -> dict:
|
||||||
"""Configure settings for PV forecast."""
|
"""Configure settings for PV forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"general": {
|
"prediction_hours": 48,
|
||||||
"latitude": 52.52,
|
"prediction_historic_hours": 24,
|
||||||
"longitude": 13.405,
|
"latitude": 52.52,
|
||||||
},
|
"longitude": 13.405,
|
||||||
"prediction": {
|
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||||
"hours": 48,
|
"pvforecast0_peakpower": 5.0,
|
||||||
"historic_hours": 24,
|
"pvforecast0_surface_azimuth": -10,
|
||||||
},
|
"pvforecast0_surface_tilt": 7,
|
||||||
"pvforecast": {
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
"provider": "PVForecastAkkudoktor",
|
"pvforecast0_inverter_paco": 10000,
|
||||||
"planes": [
|
"pvforecast1_peakpower": 4.8,
|
||||||
{
|
"pvforecast1_surface_azimuth": -90,
|
||||||
"peakpower": 5.0,
|
"pvforecast1_surface_tilt": 7,
|
||||||
"surface_azimuth": -10,
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
"surface_tilt": 7,
|
"pvforecast1_inverter_paco": 10000,
|
||||||
"userhorizon": [20, 27, 22, 20],
|
"pvforecast2_peakpower": 1.4,
|
||||||
"inverter_paco": 10000,
|
"pvforecast2_surface_azimuth": -40,
|
||||||
},
|
"pvforecast2_surface_tilt": 60,
|
||||||
{
|
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||||
"peakpower": 4.8,
|
"pvforecast2_inverter_paco": 2000,
|
||||||
"surface_azimuth": -90,
|
"pvforecast3_peakpower": 1.6,
|
||||||
"surface_tilt": 7,
|
"pvforecast3_surface_azimuth": 5,
|
||||||
"userhorizon": [30, 30, 30, 50],
|
"pvforecast3_surface_tilt": 45,
|
||||||
"inverter_paco": 10000,
|
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||||
},
|
"pvforecast3_inverter_paco": 1400,
|
||||||
{
|
"pvforecast4_peakpower": None,
|
||||||
"peakpower": 1.4,
|
|
||||||
"surface_azimuth": -40,
|
|
||||||
"surface_tilt": 60,
|
|
||||||
"userhorizon": [60, 30, 0, 30],
|
|
||||||
"inverter_paco": 2000,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"peakpower": 1.6,
|
|
||||||
"surface_azimuth": 5,
|
|
||||||
"surface_tilt": 45,
|
|
||||||
"userhorizon": [45, 25, 30, 60],
|
|
||||||
"inverter_paco": 1400,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -64,15 +49,10 @@ def config_pvforecast() -> dict:
|
|||||||
def config_weather() -> dict:
|
def config_weather() -> dict:
|
||||||
"""Configure settings for weather forecast."""
|
"""Configure settings for weather forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"general": {
|
"prediction_hours": 48,
|
||||||
"latitude": 52.52,
|
"prediction_historic_hours": 24,
|
||||||
"longitude": 13.405,
|
"latitude": 52.52,
|
||||||
},
|
"longitude": 13.405,
|
||||||
"prediction": {
|
|
||||||
"hours": 48,
|
|
||||||
"historic_hours": 24,
|
|
||||||
},
|
|
||||||
"weather": dict(),
|
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -80,31 +60,10 @@ def config_weather() -> dict:
|
|||||||
def config_elecprice() -> dict:
|
def config_elecprice() -> dict:
|
||||||
"""Configure settings for electricity price forecast."""
|
"""Configure settings for electricity price forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"general": {
|
"prediction_hours": 48,
|
||||||
"latitude": 52.52,
|
"prediction_historic_hours": 24,
|
||||||
"longitude": 13.405,
|
"latitude": 52.52,
|
||||||
},
|
"longitude": 13.405,
|
||||||
"prediction": {
|
|
||||||
"hours": 48,
|
|
||||||
"historic_hours": 24,
|
|
||||||
},
|
|
||||||
"elecprice": dict(),
|
|
||||||
}
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
def config_feedintarifffixed() -> dict:
|
|
||||||
"""Configure settings for feed in tariff forecast."""
|
|
||||||
settings = {
|
|
||||||
"general": {
|
|
||||||
"latitude": 52.52,
|
|
||||||
"longitude": 13.405,
|
|
||||||
},
|
|
||||||
"prediction": {
|
|
||||||
"hours": 48,
|
|
||||||
"historic_hours": 24,
|
|
||||||
},
|
|
||||||
"feedintariff": dict(),
|
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -112,14 +71,10 @@ def config_feedintarifffixed() -> dict:
|
|||||||
def config_load() -> dict:
|
def config_load() -> dict:
|
||||||
"""Configure settings for load forecast."""
|
"""Configure settings for load forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"general": {
|
"prediction_hours": 48,
|
||||||
"latitude": 52.52,
|
"prediction_historic_hours": 24,
|
||||||
"longitude": 13.405,
|
"latitude": 52.52,
|
||||||
},
|
"longitude": 13.405,
|
||||||
"prediction": {
|
|
||||||
"hours": 48,
|
|
||||||
"historic_hours": 24,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -137,44 +92,30 @@ def run_prediction(provider_id: str, verbose: bool = False) -> str:
|
|||||||
# Initialize the oprediction
|
# Initialize the oprediction
|
||||||
config_eos = get_config()
|
config_eos = get_config()
|
||||||
prediction_eos = get_prediction()
|
prediction_eos = get_prediction()
|
||||||
|
if verbose:
|
||||||
|
print(f"\nProvider ID: {provider_id}")
|
||||||
if provider_id in ("PVForecastAkkudoktor",):
|
if provider_id in ("PVForecastAkkudoktor",):
|
||||||
settings = config_pvforecast()
|
settings = config_pvforecast()
|
||||||
forecast = "pvforecast"
|
settings["pvforecast_provider"] = provider_id
|
||||||
elif provider_id in ("BrightSky", "ClearOutside"):
|
elif provider_id in ("BrightSky", "ClearOutside"):
|
||||||
settings = config_weather()
|
settings = config_weather()
|
||||||
forecast = "weather"
|
settings["weather_provider"] = provider_id
|
||||||
elif provider_id in ("ElecPriceAkkudoktor",):
|
elif provider_id in ("ElecPriceAkkudoktor",):
|
||||||
settings = config_elecprice()
|
settings = config_elecprice()
|
||||||
forecast = "elecprice"
|
settings["elecprice_provider"] = provider_id
|
||||||
elif provider_id in ("FeedInTariffFixed",):
|
|
||||||
settings = config_feedintarifffixed()
|
|
||||||
forecast = "feedintariff"
|
|
||||||
elif provider_id in ("LoadAkkudoktor",):
|
elif provider_id in ("LoadAkkudoktor",):
|
||||||
settings = config_load()
|
settings = config_elecprice()
|
||||||
forecast = "loadforecast"
|
settings["loadakkudoktor_year_energy"] = 1000
|
||||||
settings["load"]["LoadAkkudoktor"]["loadakkudoktor_year_energy_wh"] = 1000
|
settings["load_provider"] = provider_id
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unknown provider '{provider_id}'.")
|
raise ValueError(f"Unknown provider '{provider_id}'.")
|
||||||
settings[forecast]["provider"] = provider_id
|
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
|
||||||
provider = prediction_eos.provider_by_id(provider_id)
|
|
||||||
|
|
||||||
prediction_eos.update_data()
|
prediction_eos.update_data()
|
||||||
|
|
||||||
# Return result of prediction
|
# Return result of prediction
|
||||||
|
provider = prediction_eos.provider_by_id(provider_id)
|
||||||
if verbose:
|
if verbose:
|
||||||
print(f"\nProvider ID: {provider.provider_id()}")
|
|
||||||
print("----------")
|
|
||||||
print("\nSettings\n----------")
|
|
||||||
print(settings)
|
|
||||||
print("\nProvider\n----------")
|
|
||||||
print(f"elecprice.provider: {config_eos.elecprice.provider}")
|
|
||||||
print(f"feedintariff.provider: {config_eos.feedintariff.provider}")
|
|
||||||
print(f"load.provider: {config_eos.load.provider}")
|
|
||||||
print(f"pvforecast.provider: {config_eos.pvforecast.provider}")
|
|
||||||
print(f"weather.provider: {config_eos.weather.provider}")
|
|
||||||
print(f"enabled: {provider.enabled()}")
|
|
||||||
for key in provider.record_keys:
|
for key in provider.record_keys:
|
||||||
print(f"\n{key}\n----------")
|
print(f"\n{key}\n----------")
|
||||||
print(f"Array: {provider.key_to_array(key)}")
|
print(f"Array: {provider.key_to_array(key)}")
|
||||||
@@ -9,42 +9,37 @@ Key features:
|
|||||||
- Managing directory setups for the application
|
- Managing directory setups for the application
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, ClassVar, Optional, Type
|
from typing import Any, ClassVar, List, Optional
|
||||||
|
|
||||||
import pydantic_settings
|
|
||||||
from loguru import logger
|
|
||||||
from platformdirs import user_config_dir, user_data_dir
|
from platformdirs import user_config_dir, user_data_dir
|
||||||
from pydantic import Field, computed_field, field_validator
|
from pydantic import Field, ValidationError, computed_field
|
||||||
|
|
||||||
# settings
|
# settings
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.config.configmigrate import migrate_config_data, migrate_config_file
|
|
||||||
from akkudoktoreos.core.cachesettings import CacheCommonSettings
|
|
||||||
from akkudoktoreos.core.coreabc import SingletonMixin
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
from akkudoktoreos.core.decorators import classproperty
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.emsettings import (
|
|
||||||
EnergyManagementCommonSettings,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.core.logsettings import LoggingCommonSettings
|
from akkudoktoreos.core.logsettings import LoggingCommonSettings
|
||||||
from akkudoktoreos.core.pydantic import PydanticModelNestedValueMixin, merge_models
|
|
||||||
from akkudoktoreos.core.version import __version__
|
|
||||||
from akkudoktoreos.devices.devices import DevicesCommonSettings
|
from akkudoktoreos.devices.devices import DevicesCommonSettings
|
||||||
from akkudoktoreos.measurement.measurement import MeasurementCommonSettings
|
from akkudoktoreos.measurement.measurement import MeasurementCommonSettings
|
||||||
from akkudoktoreos.optimization.optimization import OptimizationCommonSettings
|
from akkudoktoreos.optimization.optimization import OptimizationCommonSettings
|
||||||
from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
||||||
from akkudoktoreos.prediction.feedintariff import FeedInTariffCommonSettings
|
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettings
|
||||||
from akkudoktoreos.prediction.load import LoadCommonSettings
|
from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||||
|
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||||
|
from akkudoktoreos.prediction.loadimport import LoadImportCommonSettings
|
||||||
from akkudoktoreos.prediction.prediction import PredictionCommonSettings
|
from akkudoktoreos.prediction.prediction import PredictionCommonSettings
|
||||||
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||||
|
from akkudoktoreos.prediction.pvforecastimport import PVForecastImportCommonSettings
|
||||||
from akkudoktoreos.prediction.weather import WeatherCommonSettings
|
from akkudoktoreos.prediction.weather import WeatherCommonSettings
|
||||||
|
from akkudoktoreos.prediction.weatherimport import WeatherImportCommonSettings
|
||||||
from akkudoktoreos.server.server import ServerCommonSettings
|
from akkudoktoreos.server.server import ServerCommonSettings
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_timezone
|
|
||||||
from akkudoktoreos.utils.utils import UtilsCommonSettings
|
from akkudoktoreos.utils.utils import UtilsCommonSettings
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_absolute_path(
|
def get_absolute_path(
|
||||||
basepath: Optional[Path | str], subpath: Optional[Path | str]
|
basepath: Optional[Path | str], subpath: Optional[Path | str]
|
||||||
@@ -64,192 +59,61 @@ def get_absolute_path(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class GeneralSettings(SettingsBaseModel):
|
class ConfigCommonSettings(SettingsBaseModel):
|
||||||
"""Settings for common configuration.
|
"""Settings for common configuration."""
|
||||||
|
|
||||||
General configuration to set directories of cache and output files and system location (latitude
|
|
||||||
and longitude).
|
|
||||||
Validators ensure each parameter is within a specified range. A computed property, `timezone`,
|
|
||||||
determines the time zone based on latitude and longitude.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
|
||||||
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
|
||||||
|
|
||||||
Properties:
|
|
||||||
timezone (Optional[str]): Computed time zone string based on the specified latitude
|
|
||||||
and longitude.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_config_folder_path: ClassVar[Optional[Path]] = None
|
|
||||||
_config_file_path: ClassVar[Optional[Path]] = None
|
|
||||||
|
|
||||||
version: str = Field(
|
|
||||||
default=__version__,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Configuration file version. Used to check compatibility."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
data_folder_path: Optional[Path] = Field(
|
data_folder_path: Optional[Path] = Field(
|
||||||
default=None,
|
default=None, description="Path to EOS data directory."
|
||||||
json_schema_extra={
|
|
||||||
"description": "Path to EOS data directory.",
|
|
||||||
"examples": [None, "/home/eos/data"],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
data_output_subpath: Optional[Path] = Field(
|
data_output_subpath: Optional[Path] = Field(
|
||||||
default="output",
|
"output", description="Sub-path for the EOS output data directory."
|
||||||
json_schema_extra={"description": "Sub-path for the EOS output data directory."},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
latitude: Optional[float] = Field(
|
data_cache_subpath: Optional[Path] = Field(
|
||||||
default=52.52,
|
"cache", description="Sub-path for the EOS cache data directory."
|
||||||
ge=-90.0,
|
|
||||||
le=90.0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
longitude: Optional[float] = Field(
|
|
||||||
default=13.405,
|
|
||||||
ge=-180.0,
|
|
||||||
le=180.0,
|
|
||||||
json_schema_extra={"description": "Longitude in decimal degrees, within -180 to 180 (°)"},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Computed fields
|
# Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def timezone(self) -> Optional[str]:
|
|
||||||
"""Compute timezone based on latitude and longitude."""
|
|
||||||
if self.latitude and self.longitude:
|
|
||||||
return to_timezone(location=(self.latitude, self.longitude), as_string=True)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def data_output_path(self) -> Optional[Path]:
|
def data_output_path(self) -> Optional[Path]:
|
||||||
"""Compute data_output_path based on data_folder_path."""
|
"""Compute data_output_path based on data_folder_path."""
|
||||||
return get_absolute_path(self.data_folder_path, self.data_output_subpath)
|
return get_absolute_path(self.data_folder_path, self.data_output_subpath)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def config_folder_path(self) -> Optional[Path]:
|
def data_cache_path(self) -> Optional[Path]:
|
||||||
"""Path to EOS configuration directory."""
|
"""Compute data_cache_path based on data_folder_path."""
|
||||||
return self._config_folder_path
|
return get_absolute_path(self.data_folder_path, self.data_cache_subpath)
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def config_file_path(self) -> Optional[Path]:
|
|
||||||
"""Path to EOS configuration file."""
|
|
||||||
return self._config_file_path
|
|
||||||
|
|
||||||
compatible_versions: ClassVar[list[str]] = [__version__]
|
|
||||||
|
|
||||||
@field_validator("version")
|
|
||||||
@classmethod
|
|
||||||
def check_version(cls, v: str) -> str:
|
|
||||||
if v not in cls.compatible_versions:
|
|
||||||
error = (
|
|
||||||
f"Incompatible configuration version '{v}'. "
|
|
||||||
f"Expected one of: {', '.join(cls.compatible_versions)}."
|
|
||||||
)
|
|
||||||
logger.error(error)
|
|
||||||
raise ValueError(error)
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsEOS(pydantic_settings.BaseSettings, PydanticModelNestedValueMixin):
|
class SettingsEOS(
|
||||||
"""Settings for all EOS.
|
ConfigCommonSettings,
|
||||||
|
LoggingCommonSettings,
|
||||||
|
DevicesCommonSettings,
|
||||||
|
MeasurementCommonSettings,
|
||||||
|
OptimizationCommonSettings,
|
||||||
|
PredictionCommonSettings,
|
||||||
|
ElecPriceCommonSettings,
|
||||||
|
ElecPriceImportCommonSettings,
|
||||||
|
LoadCommonSettings,
|
||||||
|
LoadAkkudoktorCommonSettings,
|
||||||
|
LoadImportCommonSettings,
|
||||||
|
PVForecastCommonSettings,
|
||||||
|
PVForecastImportCommonSettings,
|
||||||
|
WeatherCommonSettings,
|
||||||
|
WeatherImportCommonSettings,
|
||||||
|
ServerCommonSettings,
|
||||||
|
UtilsCommonSettings,
|
||||||
|
):
|
||||||
|
"""Settings for all EOS."""
|
||||||
|
|
||||||
Only used to update the configuration with specific settings.
|
pass
|
||||||
"""
|
|
||||||
|
|
||||||
general: Optional[GeneralSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "General Settings"}
|
|
||||||
)
|
|
||||||
cache: Optional[CacheCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Cache Settings"}
|
|
||||||
)
|
|
||||||
ems: Optional[EnergyManagementCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Energy Management Settings"}
|
|
||||||
)
|
|
||||||
logging: Optional[LoggingCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Logging Settings"}
|
|
||||||
)
|
|
||||||
devices: Optional[DevicesCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Devices Settings"}
|
|
||||||
)
|
|
||||||
measurement: Optional[MeasurementCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Measurement Settings"}
|
|
||||||
)
|
|
||||||
optimization: Optional[OptimizationCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Optimization Settings"}
|
|
||||||
)
|
|
||||||
prediction: Optional[PredictionCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Prediction Settings"}
|
|
||||||
)
|
|
||||||
elecprice: Optional[ElecPriceCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Electricity Price Settings"}
|
|
||||||
)
|
|
||||||
feedintariff: Optional[FeedInTariffCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Feed In Tariff Settings"}
|
|
||||||
)
|
|
||||||
load: Optional[LoadCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Load Settings"}
|
|
||||||
)
|
|
||||||
pvforecast: Optional[PVForecastCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "PV Forecast Settings"}
|
|
||||||
)
|
|
||||||
weather: Optional[WeatherCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Weather Settings"}
|
|
||||||
)
|
|
||||||
server: Optional[ServerCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Server Settings"}
|
|
||||||
)
|
|
||||||
utils: Optional[UtilsCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Utilities Settings"}
|
|
||||||
)
|
|
||||||
|
|
||||||
model_config = pydantic_settings.SettingsConfigDict(
|
|
||||||
env_nested_delimiter="__",
|
|
||||||
nested_model_default_partial_update=True,
|
|
||||||
env_prefix="EOS_",
|
|
||||||
ignored_types=(classproperty,),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsEOSDefaults(SettingsEOS):
|
class ConfigEOS(SingletonMixin, SettingsEOS):
|
||||||
"""Settings for all of EOS with defaults.
|
|
||||||
|
|
||||||
Used by ConfigEOS instance to make all fields available.
|
|
||||||
"""
|
|
||||||
|
|
||||||
general: GeneralSettings = GeneralSettings()
|
|
||||||
cache: CacheCommonSettings = CacheCommonSettings()
|
|
||||||
ems: EnergyManagementCommonSettings = EnergyManagementCommonSettings()
|
|
||||||
logging: LoggingCommonSettings = LoggingCommonSettings()
|
|
||||||
devices: DevicesCommonSettings = DevicesCommonSettings()
|
|
||||||
measurement: MeasurementCommonSettings = MeasurementCommonSettings()
|
|
||||||
optimization: OptimizationCommonSettings = OptimizationCommonSettings()
|
|
||||||
prediction: PredictionCommonSettings = PredictionCommonSettings()
|
|
||||||
elecprice: ElecPriceCommonSettings = ElecPriceCommonSettings()
|
|
||||||
feedintariff: FeedInTariffCommonSettings = FeedInTariffCommonSettings()
|
|
||||||
load: LoadCommonSettings = LoadCommonSettings()
|
|
||||||
pvforecast: PVForecastCommonSettings = PVForecastCommonSettings()
|
|
||||||
weather: WeatherCommonSettings = WeatherCommonSettings()
|
|
||||||
server: ServerCommonSettings = ServerCommonSettings()
|
|
||||||
utils: UtilsCommonSettings = UtilsCommonSettings()
|
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
|
||||||
# Just for usage in configmigrate, finally overwritten when used by ConfigEOS.
|
|
||||||
# This is mutable, so pydantic does not set a hash.
|
|
||||||
return id(self)
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|
||||||
"""Singleton configuration handler for the EOS application.
|
"""Singleton configuration handler for the EOS application.
|
||||||
|
|
||||||
ConfigEOS extends `SettingsEOS` with support for default configuration paths and automatic
|
ConfigEOS extends `SettingsEOS` with support for default configuration paths and automatic
|
||||||
@@ -279,6 +143,8 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
in one part of the application reflects across all references to this class.
|
in one part of the application reflects across all references to this class.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
|
_settings (ClassVar[SettingsEOS]): Holds application-wide settings.
|
||||||
|
_file_settings (ClassVar[SettingsEOS]): Stores configuration loaded from file.
|
||||||
config_folder_path (Optional[Path]): Path to the configuration directory.
|
config_folder_path (Optional[Path]): Path to the configuration directory.
|
||||||
config_file_path (Optional[Path]): Path to the configuration file.
|
config_file_path (Optional[Path]): Path to the configuration file.
|
||||||
|
|
||||||
@@ -289,7 +155,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
To initialize and access configuration attributes (only one instance is created):
|
To initialize and access configuration attributes (only one instance is created):
|
||||||
```python
|
```python
|
||||||
config_eos = ConfigEOS() # Always returns the same instance
|
config_eos = ConfigEOS() # Always returns the same instance
|
||||||
print(config_eos.prediction.hours) # Access a setting from the loaded configuration
|
print(config_eos.prediction_hours) # Access a setting from the loaded configuration
|
||||||
```
|
```
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -301,150 +167,111 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
ENCODING: ClassVar[str] = "UTF-8"
|
ENCODING: ClassVar[str] = "UTF-8"
|
||||||
CONFIG_FILE_NAME: ClassVar[str] = "EOS.config.json"
|
CONFIG_FILE_NAME: ClassVar[str] = "EOS.config.json"
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
_settings: ClassVar[Optional[SettingsEOS]] = None
|
||||||
# ConfigEOS is a singleton
|
_file_settings: ClassVar[Optional[SettingsEOS]] = None
|
||||||
return hash("config_eos")
|
|
||||||
|
|
||||||
def __eq__(self, other: Any) -> bool:
|
_config_folder_path: Optional[Path] = None
|
||||||
if not isinstance(other, ConfigEOS):
|
_config_file_path: Optional[Path] = None
|
||||||
return False
|
|
||||||
# ConfigEOS is a singleton
|
|
||||||
return True
|
|
||||||
|
|
||||||
@classmethod
|
# Computed fields
|
||||||
def settings_customise_sources(
|
@computed_field # type: ignore[prop-decorator]
|
||||||
cls,
|
@property
|
||||||
settings_cls: Type[pydantic_settings.BaseSettings],
|
def config_folder_path(self) -> Optional[Path]:
|
||||||
init_settings: pydantic_settings.PydanticBaseSettingsSource,
|
"""Path to EOS configuration directory."""
|
||||||
env_settings: pydantic_settings.PydanticBaseSettingsSource,
|
return self._config_folder_path
|
||||||
dotenv_settings: pydantic_settings.PydanticBaseSettingsSource,
|
|
||||||
file_secret_settings: pydantic_settings.PydanticBaseSettingsSource,
|
|
||||||
) -> tuple[pydantic_settings.PydanticBaseSettingsSource, ...]:
|
|
||||||
"""Customizes the order and handling of settings sources for a pydantic_settings.BaseSettings subclass.
|
|
||||||
|
|
||||||
This method determines the sources for application configuration settings, including
|
@computed_field # type: ignore[prop-decorator]
|
||||||
environment variables, dotenv files and JSON configuration files.
|
@property
|
||||||
It ensures that a default configuration file exists and creates one if necessary.
|
def config_file_path(self) -> Optional[Path]:
|
||||||
|
"""Path to EOS configuration file."""
|
||||||
|
return self._config_file_path
|
||||||
|
|
||||||
Args:
|
@computed_field # type: ignore[prop-decorator]
|
||||||
settings_cls (Type[pydantic_settings.BaseSettings]): The Pydantic BaseSettings class for
|
@property
|
||||||
which sources are customized.
|
def config_default_file_path(self) -> Path:
|
||||||
init_settings (pydantic_settings.PydanticBaseSettingsSource): The initial settings source, typically passed at runtime.
|
|
||||||
env_settings (pydantic_settings.PydanticBaseSettingsSource): Settings sourced from environment variables.
|
|
||||||
dotenv_settings (pydantic_settings.PydanticBaseSettingsSource): Settings sourced from a dotenv file.
|
|
||||||
file_secret_settings (pydantic_settings.PydanticBaseSettingsSource): Unused (needed for parent class interface).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[pydantic_settings.PydanticBaseSettingsSource, ...]: A tuple of settings sources in the order they should be applied.
|
|
||||||
|
|
||||||
Behavior:
|
|
||||||
1. Checks for the existence of a JSON configuration file in the expected location.
|
|
||||||
2. If the configuration file does not exist, creates the directory (if needed) and attempts to copy a
|
|
||||||
default configuration file to the location. If the copy fails, uses the default configuration file directly.
|
|
||||||
3. Creates a `pydantic_settings.JsonConfigSettingsSource` for both the configuration file and the default configuration file.
|
|
||||||
4. Updates class attributes `GeneralSettings._config_folder_path` and
|
|
||||||
`GeneralSettings._config_file_path` to reflect the determined paths.
|
|
||||||
5. Returns a tuple containing all provided and newly created settings sources in the desired order.
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
- This method logs a warning if the default configuration file cannot be copied.
|
|
||||||
- It ensures that a fallback to the default configuration file is always possible.
|
|
||||||
"""
|
|
||||||
# Ensure we know and have the config folder path and the config file
|
|
||||||
config_file, exists = cls._get_config_file_path()
|
|
||||||
config_dir = config_file.parent
|
|
||||||
if not exists:
|
|
||||||
config_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
try:
|
|
||||||
shutil.copy2(cls.config_default_file_path, config_file)
|
|
||||||
except Exception as exc:
|
|
||||||
logger.warning(f"Could not copy default config: {exc}. Using default config...")
|
|
||||||
config_file = cls.config_default_file_path
|
|
||||||
config_dir = config_file.parent
|
|
||||||
# Remember config_dir and config file
|
|
||||||
GeneralSettings._config_folder_path = config_dir
|
|
||||||
GeneralSettings._config_file_path = config_file
|
|
||||||
|
|
||||||
# All the settings sources in priority sequence
|
|
||||||
setting_sources = [
|
|
||||||
init_settings,
|
|
||||||
env_settings,
|
|
||||||
dotenv_settings,
|
|
||||||
]
|
|
||||||
|
|
||||||
# Apend file settings to sources
|
|
||||||
file_settings: Optional[pydantic_settings.JsonConfigSettingsSource] = None
|
|
||||||
try:
|
|
||||||
backup_file = config_file.with_suffix(f".{to_datetime(as_string='YYYYMMDDHHmmss')}")
|
|
||||||
if migrate_config_file(config_file, backup_file):
|
|
||||||
# If the config file does have the correct version add it as settings source
|
|
||||||
file_settings = pydantic_settings.JsonConfigSettingsSource(
|
|
||||||
settings_cls, json_file=config_file
|
|
||||||
)
|
|
||||||
setting_sources.append(file_settings)
|
|
||||||
except Exception as ex:
|
|
||||||
logger.error(
|
|
||||||
f"Error reading config file '{config_file}' (falling back to default config): {ex}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Append default settings to sources
|
|
||||||
default_settings = pydantic_settings.JsonConfigSettingsSource(
|
|
||||||
settings_cls, json_file=cls.config_default_file_path
|
|
||||||
)
|
|
||||||
setting_sources.append(default_settings)
|
|
||||||
|
|
||||||
return tuple(setting_sources)
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def config_default_file_path(cls) -> Path:
|
|
||||||
"""Compute the default config file path."""
|
"""Compute the default config file path."""
|
||||||
return cls.package_root_path.joinpath("data/default.config.json")
|
return self.package_root_path.joinpath("data/default.config.json")
|
||||||
|
|
||||||
@classproperty
|
@computed_field # type: ignore[prop-decorator]
|
||||||
def package_root_path(cls) -> Path:
|
@property
|
||||||
|
def package_root_path(self) -> Path:
|
||||||
"""Compute the package root path."""
|
"""Compute the package root path."""
|
||||||
return Path(__file__).parent.parent.resolve()
|
return Path(__file__).parent.parent.resolve()
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def config_keys(self) -> List[str]:
|
||||||
|
"""Returns the keys of all fields in the configuration."""
|
||||||
|
key_list = []
|
||||||
|
key_list.extend(list(self.model_fields.keys()))
|
||||||
|
key_list.extend(list(self.__pydantic_decorators__.computed_fields.keys()))
|
||||||
|
return key_list
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def config_keys_read_only(self) -> List[str]:
|
||||||
|
"""Returns the keys of all read only fields in the configuration."""
|
||||||
|
key_list = []
|
||||||
|
key_list.extend(list(self.__pydantic_decorators__.computed_fields.keys()))
|
||||||
|
return key_list
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
"""Initializes the singleton ConfigEOS instance.
|
"""Initializes the singleton ConfigEOS instance.
|
||||||
|
|
||||||
Configuration data is loaded from a configuration file or a default one is created if none
|
Configuration data is loaded from a configuration file or a default one is created if none
|
||||||
exists.
|
exists.
|
||||||
"""
|
"""
|
||||||
logger.debug("Config init with parameters {} {}", args, kwargs)
|
super().__init__()
|
||||||
# Check for singleton guard
|
self.from_config_file()
|
||||||
if hasattr(self, "_initialized"):
|
self.update()
|
||||||
return
|
|
||||||
self._setup(self, *args, **kwargs)
|
|
||||||
|
|
||||||
def _setup(self, *args: Any, **kwargs: Any) -> None:
|
@property
|
||||||
"""Re-initialize global settings."""
|
def settings(self) -> Optional[SettingsEOS]:
|
||||||
logger.debug("Config setup with parameters {} {}", args, kwargs)
|
"""Returns global settings for EOS.
|
||||||
# Assure settings base knows the singleton EOS configuration
|
|
||||||
SettingsBaseModel.config = self
|
|
||||||
# (Re-)load settings - call base class init
|
|
||||||
SettingsEOSDefaults.__init__(self, *args, **kwargs)
|
|
||||||
# Init config file and data folder pathes
|
|
||||||
self._create_initial_config_file()
|
|
||||||
self._update_data_folder_path()
|
|
||||||
self._initialized = True
|
|
||||||
logger.debug("Config setup:\n{}", self)
|
|
||||||
|
|
||||||
def merge_settings(self, settings: SettingsEOS) -> None:
|
Settings generally provide configuration for EOS and are typically set only once.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SettingsEOS: The settings for EOS or None.
|
||||||
|
"""
|
||||||
|
return ConfigEOS._settings
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _merge_and_update_settings(cls, settings: SettingsEOS) -> None:
|
||||||
|
"""Merge new and available settings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
settings (SettingsEOS): The new settings to apply.
|
||||||
|
"""
|
||||||
|
for key in SettingsEOS.model_fields:
|
||||||
|
if value := getattr(settings, key, None):
|
||||||
|
setattr(cls._settings, key, value)
|
||||||
|
|
||||||
|
def merge_settings(self, settings: SettingsEOS, force: Optional[bool] = None) -> None:
|
||||||
"""Merges the provided settings into the global settings for EOS, with optional overwrite.
|
"""Merges the provided settings into the global settings for EOS, with optional overwrite.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
settings (SettingsEOS): The settings to apply globally.
|
settings (SettingsEOS): The settings to apply globally.
|
||||||
|
force (Optional[bool]): If True, overwrites the existing settings completely.
|
||||||
|
If False, the new settings are merged to the existing ones with priority for
|
||||||
|
the new ones. Defaults to False.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If the `settings` is not a `SettingsEOS` instance.
|
ValueError: If settings are already set and `force` is not True or
|
||||||
|
if the `settings` is not a `SettingsEOS` instance.
|
||||||
"""
|
"""
|
||||||
if not isinstance(settings, SettingsEOS):
|
if not isinstance(settings, SettingsEOS):
|
||||||
error_msg = f"Settings must be an instance of SettingsEOS: '{settings}'."
|
raise ValueError(f"Settings must be an instance of SettingsEOS: '{settings}'.")
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
self.merge_settings_from_dict(settings.model_dump(exclude_none=True, exclude_unset=True))
|
if ConfigEOS._settings is None or force:
|
||||||
|
ConfigEOS._settings = settings
|
||||||
|
else:
|
||||||
|
self._merge_and_update_settings(settings)
|
||||||
|
|
||||||
|
# Update configuration after merging
|
||||||
|
self.update()
|
||||||
|
|
||||||
def merge_settings_from_dict(self, data: dict) -> None:
|
def merge_settings_from_dict(self, data: dict) -> None:
|
||||||
"""Merges the provided dictionary data into the current instance.
|
"""Merges the provided dictionary data into the current instance.
|
||||||
@@ -462,170 +289,141 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
|
|
||||||
Example:
|
Example:
|
||||||
>>> config = get_config()
|
>>> config = get_config()
|
||||||
>>> new_data = {"prediction": {"hours": 24}, "server": {"port": 8000}}
|
>>> new_data = {"prediction_hours": 24, "server_eos_port": 8000}
|
||||||
>>> config.merge_settings_from_dict(new_data)
|
>>> config.merge_settings_from_dict(new_data)
|
||||||
"""
|
"""
|
||||||
self._setup(**merge_models(self, data))
|
# Create new settings instance with reset optional fields and merged data
|
||||||
|
settings = SettingsEOS.from_dict(data)
|
||||||
|
self.merge_settings(settings)
|
||||||
|
|
||||||
def reset_settings(self) -> None:
|
def reset_settings(self) -> None:
|
||||||
"""Reset all changed settings to environment/config file defaults.
|
"""Reset all available settings.
|
||||||
|
|
||||||
This functions basically deletes the settings provided before.
|
This functions basically deletes the settings provided before.
|
||||||
"""
|
"""
|
||||||
self._setup()
|
ConfigEOS._settings = None
|
||||||
|
|
||||||
def revert_settings(self, backup_id: str) -> None:
|
|
||||||
"""Revert application settings to a stored backup.
|
|
||||||
|
|
||||||
This method restores configuration values from a backup file identified
|
|
||||||
by `backup_id`. The backup is expected to exist alongside the main
|
|
||||||
configuration file, using the main config file's path but with the given
|
|
||||||
suffix. Any settings previously applied will be overwritten.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
backup_id (str): The suffix used to locate the backup configuration
|
|
||||||
file. Example: ``".bak"`` or ``".backup"``.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
None: The method does not return a value.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the backup file cannot be found at the constructed path.
|
|
||||||
json.JSONDecodeError: If the backup file exists but contains invalid JSON.
|
|
||||||
TypeError: If the unpacked backup data fails to match the signature
|
|
||||||
required by ``self._setup()``.
|
|
||||||
OSError: If reading the backup file fails due to I/O issues.
|
|
||||||
"""
|
|
||||||
backup_file_path = self.general.config_file_path.with_suffix(f".{backup_id}")
|
|
||||||
if not backup_file_path.exists():
|
|
||||||
error_msg = f"Configuration backup `{backup_id}` not found."
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
with backup_file_path.open("r", encoding="utf-8") as f:
|
|
||||||
backup_data: dict[str, Any] = json.load(f)
|
|
||||||
backup_settings = migrate_config_data(backup_data)
|
|
||||||
|
|
||||||
self._setup(**backup_settings.model_dump(exclude_none=True, exclude_unset=True))
|
|
||||||
|
|
||||||
def list_backups(self) -> dict[str, dict[str, Any]]:
|
|
||||||
"""List available configuration backup files and extract metadata.
|
|
||||||
|
|
||||||
Backup files are identified by sharing the same stem as the main config
|
|
||||||
file but having a different suffix. Each backup file is assumed to contain
|
|
||||||
a JSON object.
|
|
||||||
|
|
||||||
The returned dictionary uses `backup_id` (suffix) as keys. The value for
|
|
||||||
each key is a dictionary including:
|
|
||||||
- ``storage_time``: The file modification timestamp in ISO-8601 format.
|
|
||||||
- ``version``: Version information found in the backup file
|
|
||||||
(defaults to ``"unknown"``).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, dict[str, Any]]: Mapping of backup identifiers to metadata.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
OSError: If directory scanning or file reading fails.
|
|
||||||
json.JSONDecodeError: If a backup file cannot be parsed as JSON.
|
|
||||||
"""
|
|
||||||
result: dict[str, dict[str, Any]] = {}
|
|
||||||
|
|
||||||
base_path: Path = self.general.config_file_path
|
|
||||||
parent = base_path.parent
|
|
||||||
stem = base_path.stem
|
|
||||||
|
|
||||||
# Iterate files next to config file
|
|
||||||
for file in parent.iterdir():
|
|
||||||
if file.is_file() and file.stem == stem and file != base_path:
|
|
||||||
backup_id = file.suffix[1:]
|
|
||||||
|
|
||||||
# Read version from file
|
|
||||||
with file.open("r", encoding="utf-8") as f:
|
|
||||||
data: dict[str, Any] = json.load(f)
|
|
||||||
|
|
||||||
# Extract version safely
|
|
||||||
version = data.get("general", {}).get("version", "unknown")
|
|
||||||
|
|
||||||
# Read file modification time (OS-independent)
|
|
||||||
ts = file.stat().st_mtime
|
|
||||||
storage_time = to_datetime(ts, as_string=True)
|
|
||||||
result[backup_id] = {
|
|
||||||
"date_time": storage_time,
|
|
||||||
"version": version,
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _create_initial_config_file(self) -> None:
|
|
||||||
if self.general.config_file_path and not self.general.config_file_path.exists():
|
|
||||||
self.general.config_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
try:
|
|
||||||
with self.general.config_file_path.open("w", encoding="utf-8", newline="\n") as f:
|
|
||||||
f.write(self.model_dump_json(indent=4))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Could not write configuration file '{self.general.config_file_path}': {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _update_data_folder_path(self) -> None:
|
def _update_data_folder_path(self) -> None:
|
||||||
"""Updates path to the data directory."""
|
"""Updates path to the data directory."""
|
||||||
# From Settings
|
# From Settings
|
||||||
if data_dir := self.general.data_folder_path:
|
if self.settings and (data_dir := self.settings.data_folder_path):
|
||||||
try:
|
try:
|
||||||
data_dir.mkdir(parents=True, exist_ok=True)
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
self.general.data_folder_path = data_dir
|
self.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except:
|
||||||
logger.warning(f"Could not setup data dir: {e}")
|
pass
|
||||||
# From EOS_DIR env
|
# From EOS_DIR env
|
||||||
if env_dir := os.getenv(self.EOS_DIR):
|
env_dir = os.getenv(self.EOS_DIR)
|
||||||
|
if env_dir is not None:
|
||||||
try:
|
try:
|
||||||
data_dir = Path(env_dir).resolve()
|
data_dir = Path(env_dir).resolve()
|
||||||
data_dir.mkdir(parents=True, exist_ok=True)
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
self.general.data_folder_path = data_dir
|
self.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except:
|
||||||
logger.warning(f"Could not setup data dir: {e}")
|
pass
|
||||||
|
# From configuration file
|
||||||
|
if self._file_settings and (data_dir := self._file_settings.data_folder_path):
|
||||||
|
try:
|
||||||
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.data_folder_path = data_dir
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
# From platform specific default path
|
# From platform specific default path
|
||||||
try:
|
try:
|
||||||
data_dir = Path(user_data_dir(self.APP_NAME, self.APP_AUTHOR))
|
data_dir = Path(user_data_dir(self.APP_NAME, self.APP_AUTHOR))
|
||||||
if data_dir is not None:
|
if data_dir is not None:
|
||||||
data_dir.mkdir(parents=True, exist_ok=True)
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
self.general.data_folder_path = data_dir
|
self.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except:
|
||||||
logger.warning(f"Could not setup data dir: {e}")
|
pass
|
||||||
# Current working directory
|
# Current working directory
|
||||||
data_dir = Path.cwd()
|
data_dir = Path.cwd()
|
||||||
self.general.data_folder_path = data_dir
|
self.data_folder_path = data_dir
|
||||||
|
|
||||||
@classmethod
|
def _get_config_file_path(self) -> tuple[Path, bool]:
|
||||||
def _get_config_file_path(cls) -> tuple[Path, bool]:
|
"""Finds the a valid configuration file or returns the desired path for a new config file.
|
||||||
"""Find a valid configuration file or return the desired path for a new config file.
|
|
||||||
|
|
||||||
Searches:
|
|
||||||
1. environment variable directory
|
|
||||||
2. user configuration directory
|
|
||||||
3. current working directory
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
tuple[Path, bool]: The path to the configuration file and if there is already a config file there
|
tuple[Path, bool]: The path to the configuration directory and if there is already a config file there
|
||||||
"""
|
"""
|
||||||
config_dirs = []
|
config_dirs = []
|
||||||
env_base_dir = os.getenv(cls.EOS_DIR)
|
env_base_dir = os.getenv(self.EOS_DIR)
|
||||||
env_config_dir = os.getenv(cls.EOS_CONFIG_DIR)
|
env_config_dir = os.getenv(self.EOS_CONFIG_DIR)
|
||||||
env_dir = get_absolute_path(env_base_dir, env_config_dir)
|
env_dir = get_absolute_path(env_base_dir, env_config_dir)
|
||||||
logger.debug(f"Environment config dir: '{env_dir}'")
|
logger.debug(f"Envionment config dir: '{env_dir}'")
|
||||||
if env_dir is not None:
|
if env_dir is not None:
|
||||||
config_dirs.append(env_dir.resolve())
|
config_dirs.append(env_dir.resolve())
|
||||||
config_dirs.append(Path(user_config_dir(cls.APP_NAME, cls.APP_AUTHOR)))
|
config_dirs.append(Path(user_config_dir(self.APP_NAME)))
|
||||||
config_dirs.append(Path.cwd())
|
config_dirs.append(Path.cwd())
|
||||||
for cdir in config_dirs:
|
for cdir in config_dirs:
|
||||||
cfile = cdir.joinpath(cls.CONFIG_FILE_NAME)
|
cfile = cdir.joinpath(self.CONFIG_FILE_NAME)
|
||||||
if cfile.exists():
|
if cfile.exists():
|
||||||
logger.debug(f"Found config file: '{cfile}'")
|
logger.debug(f"Found config file: '{cfile}'")
|
||||||
return cfile, True
|
return cfile, True
|
||||||
return config_dirs[0].joinpath(cls.CONFIG_FILE_NAME), False
|
return config_dirs[0].joinpath(self.CONFIG_FILE_NAME), False
|
||||||
|
|
||||||
|
def settings_from_config_file(self) -> tuple[SettingsEOS, Path]:
|
||||||
|
"""Load settings from the configuration file.
|
||||||
|
|
||||||
|
If the config file does not exist, it will be created.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple of settings and path
|
||||||
|
settings (SettingsEOS): The settings defined by the EOS configuration file.
|
||||||
|
path (pathlib.Path): The path of the configuration file.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the configuration file is invalid or incomplete.
|
||||||
|
"""
|
||||||
|
config_file, exists = self._get_config_file_path()
|
||||||
|
config_dir = config_file.parent
|
||||||
|
|
||||||
|
# Create config directory and copy default config if file does not exist
|
||||||
|
if not exists:
|
||||||
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
try:
|
||||||
|
shutil.copy2(self.config_default_file_path, config_file)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(f"Could not copy default config: {exc}. Using default config...")
|
||||||
|
config_file = self.config_default_file_path
|
||||||
|
config_dir = config_file.parent
|
||||||
|
|
||||||
|
# Load and validate the configuration file
|
||||||
|
with config_file.open("r", encoding=self.ENCODING) as f_in:
|
||||||
|
try:
|
||||||
|
json_str = f_in.read()
|
||||||
|
settings = SettingsEOS.model_validate_json(json_str)
|
||||||
|
except ValidationError as exc:
|
||||||
|
raise ValueError(f"Configuration '{config_file}' is incomplete or not valid: {exc}")
|
||||||
|
|
||||||
|
return settings, config_file
|
||||||
|
|
||||||
|
def from_config_file(self) -> tuple[SettingsEOS, Path]:
|
||||||
|
"""Load the configuration file settings for EOS.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple of settings and path
|
||||||
|
settings (SettingsEOS): The settings defined by the EOS configuration file.
|
||||||
|
path (pathlib.Path): The path of the configuration file.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the configuration file is invalid or incomplete.
|
||||||
|
"""
|
||||||
|
# Load settings from config file
|
||||||
|
ConfigEOS._file_settings, config_file = self.settings_from_config_file()
|
||||||
|
|
||||||
|
# Update configuration in memory
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
# Everything worked, remember the values
|
||||||
|
self._config_folder_path = config_file.parent
|
||||||
|
self._config_file_path = config_file
|
||||||
|
|
||||||
|
return ConfigEOS._file_settings, config_file
|
||||||
|
|
||||||
def to_config_file(self) -> None:
|
def to_config_file(self) -> None:
|
||||||
"""Saves the current configuration to the configuration file.
|
"""Saves the current configuration to the configuration file.
|
||||||
@@ -635,24 +433,77 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
Raises:
|
Raises:
|
||||||
ValueError: If the configuration file path is not specified or can not be written to.
|
ValueError: If the configuration file path is not specified or can not be written to.
|
||||||
"""
|
"""
|
||||||
if not self.general.config_file_path:
|
if not self.config_file_path:
|
||||||
raise ValueError("Configuration file path unknown.")
|
raise ValueError("Configuration file path unknown.")
|
||||||
with self.general.config_file_path.open("w", encoding="utf-8", newline="\n") as f_out:
|
with self.config_file_path.open("w", encoding=self.ENCODING) as f_out:
|
||||||
json_str = super().model_dump_json(indent=4)
|
try:
|
||||||
f_out.write(json_str)
|
json_str = super().to_json()
|
||||||
|
# Write to file
|
||||||
|
f_out.write(json_str)
|
||||||
|
# Also remember as actual settings
|
||||||
|
ConfigEOS._file_settings = SettingsEOS.model_validate_json(json_str)
|
||||||
|
except ValidationError as exc:
|
||||||
|
raise ValueError(f"Could not update '{self.config_file_path}': {exc}")
|
||||||
|
|
||||||
|
def _config_value(self, key: str) -> Any:
|
||||||
|
"""Retrieves the configuration value for a specific key, following a priority order.
|
||||||
|
|
||||||
|
Values are fetched in the following order:
|
||||||
|
1. Settings.
|
||||||
|
2. Environment variables.
|
||||||
|
3. EOS configuration file.
|
||||||
|
4. Current configuration.
|
||||||
|
5. Field default constants.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): The configuration key to retrieve.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Any: The configuration value, or None if not found.
|
||||||
|
"""
|
||||||
|
# Settings
|
||||||
|
if ConfigEOS._settings:
|
||||||
|
if (value := getattr(self.settings, key, None)) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
|
if (value := os.getenv(key)) is not None:
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except ValueError:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# EOS configuration file.
|
||||||
|
if self._file_settings:
|
||||||
|
if (value := getattr(self._file_settings, key, None)) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Current configuration - key is valid as called by update().
|
||||||
|
if (value := getattr(self, key, None)) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Field default constants
|
||||||
|
if (value := ConfigEOS.model_fields[key].default) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
logger.debug(f"Value for configuration key '{key}' not found or is {value}")
|
||||||
|
return None
|
||||||
|
|
||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Updates all configuration fields.
|
"""Updates all configuration fields.
|
||||||
|
|
||||||
This method updates all configuration fields using the following order for value retrieval:
|
This method updates all configuration fields using the following order for value retrieval:
|
||||||
1. Current settings.
|
1. Settings.
|
||||||
2. Environment variables.
|
2. Environment variables.
|
||||||
3. EOS configuration file.
|
3. EOS configuration file.
|
||||||
4. Field default constants.
|
4. Current configuration.
|
||||||
|
5. Field default constants.
|
||||||
|
|
||||||
The first non None value in priority order is taken.
|
The first non None value in priority order is taken.
|
||||||
"""
|
"""
|
||||||
self._setup(**self.model_dump())
|
self._update_data_folder_path()
|
||||||
|
for key in self.model_fields:
|
||||||
|
setattr(self, key, self._config_value(key))
|
||||||
|
|
||||||
|
|
||||||
def get_config() -> ConfigEOS:
|
def get_config() -> ConfigEOS:
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
"""Abstract and base classes for configuration."""
|
"""Abstract and base classes for configuration."""
|
||||||
|
|
||||||
from typing import Any, ClassVar
|
|
||||||
|
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
|
||||||
|
|
||||||
class SettingsBaseModel(PydanticBaseModel):
|
class SettingsBaseModel(PydanticBaseModel):
|
||||||
"""Base model class for all settings configurations."""
|
"""Base model class for all settings configurations.
|
||||||
|
|
||||||
# EOS configuration - set by ConfigEOS
|
Note:
|
||||||
config: ClassVar[Any] = None
|
Settings property names shall be disjunctive to all existing settings' property names.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|||||||
@@ -1,252 +0,0 @@
|
|||||||
"""Migrate config file to actual version."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import shutil
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Set, Tuple, Union
|
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
from akkudoktoreos.core.version import __version__
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
# There are circular dependencies - only import here for type checking
|
|
||||||
from akkudoktoreos.config.config import SettingsEOSDefaults
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Global migration map constant
|
|
||||||
# -----------------------------
|
|
||||||
# key: old JSON path, value: either
|
|
||||||
# - str (new model path)
|
|
||||||
# - tuple[str, Callable[[Any], Any]] (new path + transform)
|
|
||||||
# - None (drop)
|
|
||||||
MIGRATION_MAP: Dict[str, Union[str, Tuple[str, Callable[[Any], Any]], None]] = {
|
|
||||||
# 0.1.0 -> 0.2.0
|
|
||||||
"devices/batteries/0/initial_soc_percentage": None,
|
|
||||||
"devices/electric_vehicles/0/initial_soc_percentage": None,
|
|
||||||
"elecprice/provider_settings/import_file_path": "elecprice/provider_settings/ElecPriceImport/import_file_path",
|
|
||||||
"elecprice/provider_settings/import_json": "elecprice/provider_settings/ElecPriceImport/import_json",
|
|
||||||
"load/provider_settings/import_file_path": "load/provider_settings/LoadImport/import_file_path",
|
|
||||||
"load/provider_settings/import_json": "load/provider_settings/LoadImport/import_json",
|
|
||||||
"load/provider_settings/loadakkudoktor_year_energy": "load/provider_settings/LoadAkkudoktor/loadakkudoktor_year_energy_kwh",
|
|
||||||
"load/provider_settings/load_vrm_idsite": "load/provider_settings/LoadVrm/load_vrm_idsite",
|
|
||||||
"load/provider_settings/load_vrm_token": "load/provider_settings/LoadVrm/load_vrm_token",
|
|
||||||
"logging/level": "logging/console_level",
|
|
||||||
"logging/root_level": None,
|
|
||||||
"measurement/load0_name": "measurement/load_emr_keys/0",
|
|
||||||
"measurement/load1_name": "measurement/load_emr_keys/1",
|
|
||||||
"measurement/load2_name": "measurement/load_emr_keys/2",
|
|
||||||
"measurement/load3_name": "measurement/load_emr_keys/3",
|
|
||||||
"measurement/load4_name": "measurement/load_emr_keys/4",
|
|
||||||
"optimization/ev_available_charge_rates_percent": (
|
|
||||||
"devices/electric_vehicles/0/charge_rates",
|
|
||||||
lambda v: [x / 100 for x in v],
|
|
||||||
),
|
|
||||||
"optimization/hours": "optimization/horizon_hours",
|
|
||||||
"optimization/penalty": ("optimization/genetic/penalties/ev_soc_miss", lambda v: float(v)),
|
|
||||||
"pvforecast/provider_settings/import_file_path": "pvforecast/provider_settings/PVForecastImport/import_file_path",
|
|
||||||
"pvforecast/provider_settings/import_json": "pvforecast/provider_settings/PVForecastImport/import_json",
|
|
||||||
"pvforecast/provider_settings/load_vrm_idsite": "pvforecast/provider_settings/PVForecastVrm/load_vrm_idsite",
|
|
||||||
"pvforecast/provider_settings/load_vrm_token": "pvforecast/provider_settings/PVForecastVrm/load_vrm_token",
|
|
||||||
"weather/provider_settings/import_file_path": "weather/provider_settings/WeatherImport/import_file_path",
|
|
||||||
"weather/provider_settings/import_json": "weather/provider_settings/WeatherImport/import_json",
|
|
||||||
}
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Global migration stats
|
|
||||||
# -----------------------------
|
|
||||||
migrated_source_paths: Set[str] = set()
|
|
||||||
mapped_count: int = 0
|
|
||||||
auto_count: int = 0
|
|
||||||
skipped_paths: List[str] = []
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_config_data(config_data: Dict[str, Any]) -> "SettingsEOSDefaults":
|
|
||||||
"""Migrate configuration data to the current version settings.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
SettingsEOSDefaults: The migrated settings.
|
|
||||||
"""
|
|
||||||
global migrated_source_paths, mapped_count, auto_count, skipped_paths
|
|
||||||
|
|
||||||
# Reset globals at the start of each migration
|
|
||||||
migrated_source_paths = set()
|
|
||||||
mapped_count = 0
|
|
||||||
auto_count = 0
|
|
||||||
skipped_paths = []
|
|
||||||
|
|
||||||
from akkudoktoreos.config.config import SettingsEOSDefaults
|
|
||||||
|
|
||||||
new_config = SettingsEOSDefaults()
|
|
||||||
|
|
||||||
# 1) Apply explicit migration map
|
|
||||||
for old_path, mapping in MIGRATION_MAP.items():
|
|
||||||
new_path = None
|
|
||||||
transform = None
|
|
||||||
if mapping is None:
|
|
||||||
migrated_source_paths.add(old_path.strip("/"))
|
|
||||||
logger.debug(f"🗑️ Migration map: dropping '{old_path}'")
|
|
||||||
continue
|
|
||||||
if isinstance(mapping, tuple):
|
|
||||||
new_path, transform = mapping
|
|
||||||
else:
|
|
||||||
new_path = mapping
|
|
||||||
|
|
||||||
old_value = _get_json_nested_value(config_data, old_path)
|
|
||||||
if old_value is None:
|
|
||||||
migrated_source_paths.add(old_path.strip("/"))
|
|
||||||
mapped_count += 1
|
|
||||||
logger.debug(f"✅ Migrated mapped '{old_path}' → 'None'")
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
if transform:
|
|
||||||
old_value = transform(old_value)
|
|
||||||
new_config.set_nested_value(new_path, old_value)
|
|
||||||
migrated_source_paths.add(old_path.strip("/"))
|
|
||||||
mapped_count += 1
|
|
||||||
logger.debug(f"✅ Migrated mapped '{old_path}' → '{new_path}' = {old_value!r}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.opt(exception=True).warning(
|
|
||||||
f"Failed mapped migration '{old_path}' -> '{new_path}': {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 2) Automatic migration for remaining fields
|
|
||||||
auto_count += _migrate_matching_fields(
|
|
||||||
config_data, new_config, migrated_source_paths, skipped_paths
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3) Ensure version
|
|
||||||
try:
|
|
||||||
new_config.set_nested_value("general/version", __version__)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Could not set version on new configuration model: {e}")
|
|
||||||
|
|
||||||
# 4) Log final migration summary
|
|
||||||
logger.info(
|
|
||||||
f"Migration summary: "
|
|
||||||
f"mapped fields: {mapped_count}, automatically migrated: {auto_count}, skipped: {len(skipped_paths)}"
|
|
||||||
)
|
|
||||||
if skipped_paths:
|
|
||||||
logger.debug(f"Skipped paths: {', '.join(skipped_paths)}")
|
|
||||||
|
|
||||||
logger.success(f"Configuration successfully migrated to version {__version__}.")
|
|
||||||
return new_config
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_config_file(config_file: Path, backup_file: Path) -> bool:
|
|
||||||
"""Migrate configuration file to the current version.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if up-to-date or successfully migrated, False on failure.
|
|
||||||
"""
|
|
||||||
global migrated_source_paths, mapped_count, auto_count, skipped_paths
|
|
||||||
|
|
||||||
# Reset globals at the start of each migration
|
|
||||||
migrated_source_paths = set()
|
|
||||||
mapped_count = 0
|
|
||||||
auto_count = 0
|
|
||||||
skipped_paths = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
with config_file.open("r", encoding="utf-8") as f:
|
|
||||||
config_data: Dict[str, Any] = json.load(f)
|
|
||||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
|
||||||
logger.error(f"Failed to read configuration file '{config_file}': {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
match config_data:
|
|
||||||
case {"general": {"version": v}} if v == __version__:
|
|
||||||
logger.debug(f"Configuration file '{config_file}' is up to date (v{v}).")
|
|
||||||
return True
|
|
||||||
case _:
|
|
||||||
logger.info(
|
|
||||||
f"Configuration file '{config_file}' is missing current version info. "
|
|
||||||
f"Starting migration to v{__version__}..."
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Backup existing file - we already know it is existing
|
|
||||||
try:
|
|
||||||
config_file.replace(backup_file)
|
|
||||||
logger.info(f"Backed up old configuration to '{backup_file}'.")
|
|
||||||
except Exception as e_replace:
|
|
||||||
try:
|
|
||||||
shutil.copy(config_file, backup_file)
|
|
||||||
logger.info(
|
|
||||||
f"Could not replace; copied old configuration to '{backup_file}' instead."
|
|
||||||
)
|
|
||||||
except Exception as e_copy:
|
|
||||||
logger.warning(
|
|
||||||
f"Failed to backup existing config (replace: {e_replace}; copy: {e_copy}). Continuing without backup."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Migrate config data
|
|
||||||
new_config = migrate_config_data(config_data)
|
|
||||||
|
|
||||||
# Write migrated configuration
|
|
||||||
try:
|
|
||||||
with config_file.open("w", encoding="utf-8", newline=None) as f_out:
|
|
||||||
json_str = new_config.model_dump_json(indent=4)
|
|
||||||
f_out.write(json_str)
|
|
||||||
except Exception as e_write:
|
|
||||||
logger.error(f"Failed to write migrated configuration to '{config_file}': {e_write}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(f"Unexpected error during migration: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _get_json_nested_value(data: dict, path: str) -> Any:
|
|
||||||
"""Retrieve a nested value from a JSON-like dict using '/'-separated path."""
|
|
||||||
current: Any = data
|
|
||||||
for part in path.strip("/").split("/"):
|
|
||||||
if isinstance(current, list):
|
|
||||||
try:
|
|
||||||
part_idx = int(part)
|
|
||||||
current = current[part_idx]
|
|
||||||
except (ValueError, IndexError):
|
|
||||||
return None
|
|
||||||
elif isinstance(current, dict):
|
|
||||||
if part not in current:
|
|
||||||
return None
|
|
||||||
current = current[part]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
return current
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_matching_fields(
|
|
||||||
source: Dict[str, Any],
|
|
||||||
target_model: Any,
|
|
||||||
migrated_source_paths: Set[str],
|
|
||||||
skipped_paths: List[str],
|
|
||||||
prefix: str = "",
|
|
||||||
) -> int:
|
|
||||||
"""Recursively copy matching keys from source dict into target_model using set_nested_value.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
int: number of fields successfully auto-migrated
|
|
||||||
"""
|
|
||||||
count: int = 0
|
|
||||||
for key, value in source.items():
|
|
||||||
full_path = f"{prefix}/{key}".strip("/")
|
|
||||||
|
|
||||||
if full_path in migrated_source_paths:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if isinstance(value, dict):
|
|
||||||
count += _migrate_matching_fields(
|
|
||||||
value, target_model, migrated_source_paths, skipped_paths, full_path
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
target_model.set_nested_value(full_path, value)
|
|
||||||
count += 1
|
|
||||||
except Exception:
|
|
||||||
skipped_paths.append(full_path)
|
|
||||||
continue
|
|
||||||
return count
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
"""Settings for caching.
|
|
||||||
|
|
||||||
Kept in an extra module to avoid cyclic dependencies on package import.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import Field
|
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class CacheCommonSettings(SettingsBaseModel):
|
|
||||||
"""Cache Configuration."""
|
|
||||||
|
|
||||||
subpath: Optional[Path] = Field(
|
|
||||||
default="cache",
|
|
||||||
json_schema_extra={"description": "Sub-path for the EOS cache data directory."},
|
|
||||||
)
|
|
||||||
|
|
||||||
cleanup_interval: float = Field(
|
|
||||||
default=5 * 60,
|
|
||||||
json_schema_extra={"description": "Intervall in seconds for EOS file cache cleanup."},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Do not make this a pydantic computed field. The pydantic model must be fully initialized
|
|
||||||
# to have access to config.general, which may not be the case if it is a computed field.
|
|
||||||
def path(self) -> Optional[Path]:
|
|
||||||
"""Compute cache path based on general.data_folder_path."""
|
|
||||||
data_cache_path = self.config.general.data_folder_path
|
|
||||||
if data_cache_path is None or self.subpath is None:
|
|
||||||
return None
|
|
||||||
return data_cache_path.joinpath(self.subpath)
|
|
||||||
@@ -13,14 +13,17 @@ Classes:
|
|||||||
import threading
|
import threading
|
||||||
from typing import Any, ClassVar, Dict, Optional, Type
|
from typing import Any, ClassVar, Dict, Optional, Type
|
||||||
|
|
||||||
from loguru import logger
|
from pendulum import DateTime
|
||||||
|
from pydantic import computed_field
|
||||||
|
|
||||||
from akkudoktoreos.core.decorators import classproperty
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.utils.datetimeutil import DateTime
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
config_eos: Any = None
|
config_eos: Any = None
|
||||||
measurement_eos: Any = None
|
measurement_eos: Any = None
|
||||||
prediction_eos: Any = None
|
prediction_eos: Any = None
|
||||||
|
devices_eos: Any = None
|
||||||
ems_eos: Any = None
|
ems_eos: Any = None
|
||||||
|
|
||||||
|
|
||||||
@@ -46,9 +49,9 @@ class ConfigMixin:
|
|||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def config(cls) -> Any:
|
def config(self) -> Any:
|
||||||
"""Convenience class method/ attribute to retrieve the EOS configuration data.
|
"""Convenience method/ attribute to retrieve the EOS configuration data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
ConfigEOS: The configuration.
|
ConfigEOS: The configuration.
|
||||||
@@ -86,9 +89,9 @@ class MeasurementMixin:
|
|||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def measurement(cls) -> Any:
|
def measurement(self) -> Any:
|
||||||
"""Convenience class method/ attribute to retrieve the EOS measurement data.
|
"""Convenience method/ attribute to retrieve the EOS measurement data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Measurement: The measurement.
|
Measurement: The measurement.
|
||||||
@@ -126,9 +129,9 @@ class PredictionMixin:
|
|||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def prediction(cls) -> Any:
|
def prediction(self) -> Any:
|
||||||
"""Convenience class method/ attribute to retrieve the EOS prediction data.
|
"""Convenience method/ attribute to retrieve the EOS prediction data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Prediction: The prediction.
|
Prediction: The prediction.
|
||||||
@@ -143,6 +146,46 @@ class PredictionMixin:
|
|||||||
return prediction_eos
|
return prediction_eos
|
||||||
|
|
||||||
|
|
||||||
|
class DevicesMixin:
|
||||||
|
"""Mixin class for managing EOS devices simulation data.
|
||||||
|
|
||||||
|
This class serves as a foundational component for EOS-related classes requiring access
|
||||||
|
to global devices simulation data. It provides a `devices` property that dynamically retrieves
|
||||||
|
the devices instance, ensuring up-to-date access to devices simulation results.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Subclass this base class to gain access to the `devices` attribute, which retrieves the
|
||||||
|
global devices instance lazily to avoid import-time circular dependencies.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
devices (Devices): Property to access the global EOS devices simulation data.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
class MyOptimizationClass(DevicesMixin):
|
||||||
|
def analyze_mydevicesimulation(self):
|
||||||
|
device_simulation_data = self.devices.mydevicesresult
|
||||||
|
# Perform analysis
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def devices(self) -> Any:
|
||||||
|
"""Convenience method/ attribute to retrieve the EOS devices simulation data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Devices: The devices simulation.
|
||||||
|
"""
|
||||||
|
# avoid circular dependency at import time
|
||||||
|
global devices_eos
|
||||||
|
if devices_eos is None:
|
||||||
|
from akkudoktoreos.devices.devices import get_devices
|
||||||
|
|
||||||
|
devices_eos = get_devices()
|
||||||
|
|
||||||
|
return devices_eos
|
||||||
|
|
||||||
|
|
||||||
class EnergyManagementSystemMixin:
|
class EnergyManagementSystemMixin:
|
||||||
"""Mixin class for managing EOS energy management system.
|
"""Mixin class for managing EOS energy management system.
|
||||||
|
|
||||||
@@ -167,9 +210,9 @@ class EnergyManagementSystemMixin:
|
|||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def ems(cls) -> Any:
|
def ems(self) -> Any:
|
||||||
"""Convenience class method/ attribute to retrieve the EOS energy management system.
|
"""Convenience method/ attribute to retrieve the EOS energy management system.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
EnergyManagementSystem: The energy management system.
|
EnergyManagementSystem: The energy management system.
|
||||||
@@ -191,21 +234,16 @@ class StartMixin(EnergyManagementSystemMixin):
|
|||||||
- `start_datetime`: The starting datetime of the current or latest energy management.
|
- `start_datetime`: The starting datetime of the current or latest energy management.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classproperty
|
# Computed field for start_datetime
|
||||||
def ems_start_datetime(cls) -> Optional[DateTime]:
|
@computed_field # type: ignore[prop-decorator]
|
||||||
"""Convenience class method/ attribute to retrieve the start datetime of the current or latest energy management.
|
@property
|
||||||
|
def start_datetime(self) -> Optional[DateTime]:
|
||||||
|
"""Returns the start datetime of the current or latest energy management.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
DateTime: The starting datetime of the current or latest energy management, or None.
|
DateTime: The starting datetime of the current or latest energy management, or None.
|
||||||
"""
|
"""
|
||||||
# avoid circular dependency at import time
|
return self.ems.start_datetime
|
||||||
global ems_eos
|
|
||||||
if ems_eos is None:
|
|
||||||
from akkudoktoreos.core.ems import get_ems
|
|
||||||
|
|
||||||
ems_eos = get_ems()
|
|
||||||
|
|
||||||
return ems_eos.start_datetime
|
|
||||||
|
|
||||||
|
|
||||||
class SingletonMixin:
|
class SingletonMixin:
|
||||||
@@ -227,14 +265,6 @@ class SingletonMixin:
|
|||||||
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
# implement __init__ to avoid re-initialization of parent classes:
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
||||||
if hasattr(self, "_initialized"):
|
|
||||||
return
|
|
||||||
# Your initialisation here
|
|
||||||
...
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
instance1 = MySingletonModel(name="Instance 1")
|
instance1 = MySingletonModel(name="Instance 1")
|
||||||
instance2 = MySingletonModel(name="Instance 2")
|
instance2 = MySingletonModel(name="Instance 2")
|
||||||
|
|
||||||
|
|||||||
@@ -14,23 +14,13 @@ from abc import abstractmethod
|
|||||||
from collections.abc import MutableMapping, MutableSequence
|
from collections.abc import MutableMapping, MutableSequence
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union, overload
|
||||||
Any,
|
|
||||||
Dict,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
overload,
|
|
||||||
)
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pendulum
|
import pendulum
|
||||||
from loguru import logger
|
|
||||||
from numpydantic import NDArray, Shape
|
from numpydantic import NDArray, Shape
|
||||||
|
from pendulum import DateTime, Duration
|
||||||
from pydantic import (
|
from pydantic import (
|
||||||
AwareDatetime,
|
AwareDatetime,
|
||||||
ConfigDict,
|
ConfigDict,
|
||||||
@@ -38,22 +28,18 @@ from pydantic import (
|
|||||||
ValidationError,
|
ValidationError,
|
||||||
computed_field,
|
computed_field,
|
||||||
field_validator,
|
field_validator,
|
||||||
model_validator,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from akkudoktoreos.core.coreabc import ConfigMixin, SingletonMixin, StartMixin
|
from akkudoktoreos.core.coreabc import ConfigMixin, SingletonMixin, StartMixin
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.pydantic import (
|
from akkudoktoreos.core.pydantic import (
|
||||||
PydanticBaseModel,
|
PydanticBaseModel,
|
||||||
PydanticDateTimeData,
|
PydanticDateTimeData,
|
||||||
PydanticDateTimeDataFrame,
|
PydanticDateTimeDataFrame,
|
||||||
)
|
)
|
||||||
from akkudoktoreos.utils.datetimeutil import (
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||||
DateTime,
|
|
||||||
Duration,
|
logger = get_logger(__name__)
|
||||||
compare_datetimes,
|
|
||||||
to_datetime,
|
|
||||||
to_duration,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DataBase(ConfigMixin, StartMixin, PydanticBaseModel):
|
class DataBase(ConfigMixin, StartMixin, PydanticBaseModel):
|
||||||
@@ -71,11 +57,6 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Fields can be accessed and mutated both using dictionary-style access (`record['field_name']`)
|
Fields can be accessed and mutated both using dictionary-style access (`record['field_name']`)
|
||||||
and attribute-style access (`record.field_name`).
|
and attribute-style access (`record.field_name`).
|
||||||
|
|
||||||
The data record also provides configured field like data. Configuration has to be done by the
|
|
||||||
derived class. Configuration is a list of key strings, which is usually taken from the EOS
|
|
||||||
configuration. The internal field for these data `configured_data` is mostly hidden from
|
|
||||||
dictionary-style and attribute-style access.
|
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
date_time (Optional[DateTime]): Aware datetime indicating when the data record applies.
|
date_time (Optional[DateTime]): Aware datetime indicating when the data record applies.
|
||||||
|
|
||||||
@@ -84,48 +65,11 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
- Supports non-standard data types like `datetime`.
|
- Supports non-standard data types like `datetime`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
date_time: Optional[DateTime] = Field(
|
date_time: Optional[DateTime] = Field(default=None, description="DateTime")
|
||||||
default=None, json_schema_extra={"description": "DateTime"}
|
|
||||||
)
|
|
||||||
|
|
||||||
configured_data: dict[str, Any] = Field(
|
|
||||||
default_factory=dict,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Configured field like data",
|
|
||||||
"examples": [{"load0_mr": 40421}],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Pydantic v2 model configuration
|
# Pydantic v2 model configuration
|
||||||
model_config = ConfigDict(arbitrary_types_allowed=True, populate_by_name=True)
|
model_config = ConfigDict(arbitrary_types_allowed=True, populate_by_name=True)
|
||||||
|
|
||||||
@model_validator(mode="before")
|
|
||||||
@classmethod
|
|
||||||
def init_configured_field_like_data(cls, data: Any) -> Any:
|
|
||||||
"""Extracts configured data keys from the input and assigns them to `configured_data`.
|
|
||||||
|
|
||||||
This validator is called before the model is initialized. It filters out any keys from the input
|
|
||||||
dictionary that are listed in the configured data keys, and moves them into
|
|
||||||
the `configured_data` field of the model. This enables flexible, key-driven population of
|
|
||||||
dynamic data while keeping the model schema clean.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (Any): The raw input data used to initialize the model.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Any: The modified input data dictionary, with configured keys moved to `configured_data`.
|
|
||||||
"""
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
return data
|
|
||||||
|
|
||||||
configured_keys: Union[list[str], set] = cls.configured_data_keys() or set()
|
|
||||||
extracted = {k: data.pop(k) for k in list(data.keys()) if k in configured_keys}
|
|
||||||
|
|
||||||
if extracted:
|
|
||||||
data.setdefault("configured_data", {}).update(extracted)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
@field_validator("date_time", mode="before")
|
@field_validator("date_time", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def transform_to_datetime(cls, value: Any) -> Optional[DateTime]:
|
def transform_to_datetime(cls, value: Any) -> Optional[DateTime]:
|
||||||
@@ -135,39 +79,18 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
return None
|
return None
|
||||||
return to_datetime(value)
|
return to_datetime(value)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def configured_data_keys(cls) -> Optional[list[str]]:
|
|
||||||
"""Return the keys for the configured field like data.
|
|
||||||
|
|
||||||
Can be overwritten by derived classes to define specific field like data. Usually provided
|
|
||||||
by configuration data.
|
|
||||||
"""
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def record_keys(cls) -> List[str]:
|
def record_keys(cls) -> List[str]:
|
||||||
"""Returns the keys of all fields in the data record."""
|
"""Returns the keys of all fields in the data record."""
|
||||||
key_list = []
|
key_list = []
|
||||||
key_list.extend(list(cls.model_fields.keys()))
|
key_list.extend(list(cls.model_fields.keys()))
|
||||||
key_list.extend(list(cls.__pydantic_decorators__.computed_fields.keys()))
|
key_list.extend(list(cls.__pydantic_decorators__.computed_fields.keys()))
|
||||||
# Add also keys that may be added by configuration
|
|
||||||
key_list.remove("configured_data")
|
|
||||||
configured_keys = cls.configured_data_keys()
|
|
||||||
if configured_keys is not None:
|
|
||||||
key_list.extend(configured_keys)
|
|
||||||
return key_list
|
return key_list
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def record_keys_writable(cls) -> List[str]:
|
def record_keys_writable(cls) -> List[str]:
|
||||||
"""Returns the keys of all fields in the data record that are writable."""
|
"""Returns the keys of all fields in the data record that are writable."""
|
||||||
keys_writable = []
|
return list(cls.model_fields.keys())
|
||||||
keys_writable.extend(list(cls.model_fields.keys()))
|
|
||||||
# Add also keys that may be added by configuration
|
|
||||||
keys_writable.remove("configured_data")
|
|
||||||
configured_keys = cls.configured_data_keys()
|
|
||||||
if configured_keys is not None:
|
|
||||||
keys_writable.extend(configured_keys)
|
|
||||||
return keys_writable
|
|
||||||
|
|
||||||
def _validate_key_writable(self, key: str) -> None:
|
def _validate_key_writable(self, key: str) -> None:
|
||||||
"""Verify that a specified key exists and is writable in the current record keys.
|
"""Verify that a specified key exists and is writable in the current record keys.
|
||||||
@@ -183,40 +106,6 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
f"Key '{key}' is not in writable record keys: {self.record_keys_writable()}"
|
f"Key '{key}' is not in writable record keys: {self.record_keys_writable()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __dir__(self) -> list[str]:
|
|
||||||
"""Extend the default `dir()` output to include configured field like data keys.
|
|
||||||
|
|
||||||
This enables editor auto-completion and interactive introspection, while hiding the internal
|
|
||||||
`configured_data` dictionary.
|
|
||||||
|
|
||||||
This ensures the configured field like data values appear like native fields,
|
|
||||||
in line with the base model's attribute behavior.
|
|
||||||
"""
|
|
||||||
base = super().__dir__()
|
|
||||||
keys = set(base)
|
|
||||||
# Expose configured data keys as attributes
|
|
||||||
configured_keys = self.configured_data_keys()
|
|
||||||
if configured_keys is not None:
|
|
||||||
keys.update(configured_keys)
|
|
||||||
# Explicitly hide the 'configured_data' internal dict
|
|
||||||
keys.discard("configured_data")
|
|
||||||
return sorted(keys)
|
|
||||||
|
|
||||||
def __eq__(self, other: Any) -> bool:
|
|
||||||
"""Ensure equality comparison includes the contents of the `configured_data` dict.
|
|
||||||
|
|
||||||
Contents of the `configured_data` dict are in addition to the base model fields.
|
|
||||||
"""
|
|
||||||
if not isinstance(other, self.__class__):
|
|
||||||
return NotImplemented
|
|
||||||
# Compare all fields except `configured_data`
|
|
||||||
if self.model_dump(exclude={"configured_data"}) != other.model_dump(
|
|
||||||
exclude={"configured_data"}
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
# Compare `configured_data` explicitly
|
|
||||||
return self.configured_data == other.configured_data
|
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> Any:
|
def __getitem__(self, key: str) -> Any:
|
||||||
"""Retrieve the value of a field by key name.
|
"""Retrieve the value of a field by key name.
|
||||||
|
|
||||||
@@ -229,11 +118,9 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Raises:
|
Raises:
|
||||||
KeyError: If the specified key does not exist.
|
KeyError: If the specified key does not exist.
|
||||||
"""
|
"""
|
||||||
try:
|
if key in self.model_fields:
|
||||||
# Let getattr do the work
|
return getattr(self, key)
|
||||||
return self.__getattr__(key)
|
raise KeyError(f"'{key}' not found in the record fields.")
|
||||||
except:
|
|
||||||
raise KeyError(f"'{key}' not found in the record fields.")
|
|
||||||
|
|
||||||
def __setitem__(self, key: str, value: Any) -> None:
|
def __setitem__(self, key: str, value: Any) -> None:
|
||||||
"""Set the value of a field by key name.
|
"""Set the value of a field by key name.
|
||||||
@@ -245,10 +132,9 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Raises:
|
Raises:
|
||||||
KeyError: If the specified key does not exist in the fields.
|
KeyError: If the specified key does not exist in the fields.
|
||||||
"""
|
"""
|
||||||
try:
|
if key in self.model_fields:
|
||||||
# Let setattr do the work
|
setattr(self, key, value)
|
||||||
self.__setattr__(key, value)
|
else:
|
||||||
except:
|
|
||||||
raise KeyError(f"'{key}' is not a recognized field.")
|
raise KeyError(f"'{key}' is not a recognized field.")
|
||||||
|
|
||||||
def __delitem__(self, key: str) -> None:
|
def __delitem__(self, key: str) -> None:
|
||||||
@@ -260,9 +146,9 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Raises:
|
Raises:
|
||||||
KeyError: If the specified key does not exist in the fields.
|
KeyError: If the specified key does not exist in the fields.
|
||||||
"""
|
"""
|
||||||
try:
|
if key in self.model_fields:
|
||||||
self.__delattr__(key)
|
setattr(self, key, None) # Optional: set to None instead of deleting
|
||||||
except:
|
else:
|
||||||
raise KeyError(f"'{key}' is not a recognized field.")
|
raise KeyError(f"'{key}' is not a recognized field.")
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[str]:
|
def __iter__(self) -> Iterator[str]:
|
||||||
@@ -271,7 +157,7 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Returns:
|
Returns:
|
||||||
Iterator[str]: An iterator over field names.
|
Iterator[str]: An iterator over field names.
|
||||||
"""
|
"""
|
||||||
return iter(self.record_keys_writable())
|
return iter(self.model_fields)
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
"""Return the number of fields in the data record.
|
"""Return the number of fields in the data record.
|
||||||
@@ -279,7 +165,7 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Returns:
|
Returns:
|
||||||
int: The number of defined fields.
|
int: The number of defined fields.
|
||||||
"""
|
"""
|
||||||
return len(self.record_keys_writable())
|
return len(self.model_fields)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Provide a string representation of the data record.
|
"""Provide a string representation of the data record.
|
||||||
@@ -287,7 +173,7 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Returns:
|
Returns:
|
||||||
str: A string representation showing field names and their values.
|
str: A string representation showing field names and their values.
|
||||||
"""
|
"""
|
||||||
field_values = {field: getattr(self, field) for field in self.__class__.model_fields}
|
field_values = {field: getattr(self, field) for field in self.model_fields}
|
||||||
return f"{self.__class__.__name__}({field_values})"
|
return f"{self.__class__.__name__}({field_values})"
|
||||||
|
|
||||||
def __getattr__(self, key: str) -> Any:
|
def __getattr__(self, key: str) -> Any:
|
||||||
@@ -302,13 +188,8 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Raises:
|
Raises:
|
||||||
AttributeError: If the field does not exist.
|
AttributeError: If the field does not exist.
|
||||||
"""
|
"""
|
||||||
if key in self.__class__.model_fields:
|
if key in self.model_fields:
|
||||||
return getattr(self, key)
|
return getattr(self, key)
|
||||||
if key in self.configured_data.keys():
|
|
||||||
return self.configured_data[key]
|
|
||||||
configured_keys = self.configured_data_keys()
|
|
||||||
if configured_keys is not None and key in configured_keys:
|
|
||||||
return None
|
|
||||||
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{key}'")
|
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{key}'")
|
||||||
|
|
||||||
def __setattr__(self, key: str, value: Any) -> None:
|
def __setattr__(self, key: str, value: Any) -> None:
|
||||||
@@ -321,14 +202,10 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Raises:
|
Raises:
|
||||||
AttributeError: If the attribute/field does not exist.
|
AttributeError: If the attribute/field does not exist.
|
||||||
"""
|
"""
|
||||||
if key in self.__class__.model_fields:
|
if key in self.model_fields:
|
||||||
super().__setattr__(key, value)
|
super().__setattr__(key, value)
|
||||||
return
|
else:
|
||||||
configured_keys = self.configured_data_keys()
|
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{key}'")
|
||||||
if configured_keys is not None and key in configured_keys:
|
|
||||||
self.configured_data[key] = value
|
|
||||||
return
|
|
||||||
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{key}'")
|
|
||||||
|
|
||||||
def __delattr__(self, key: str) -> None:
|
def __delattr__(self, key: str) -> None:
|
||||||
"""Delete an attribute by setting it to None if it exists as a field.
|
"""Delete an attribute by setting it to None if it exists as a field.
|
||||||
@@ -339,21 +216,10 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
Raises:
|
Raises:
|
||||||
AttributeError: If the attribute/field does not exist.
|
AttributeError: If the attribute/field does not exist.
|
||||||
"""
|
"""
|
||||||
if key in self.__class__.model_fields:
|
if key in self.model_fields:
|
||||||
data: Optional[dict]
|
setattr(self, key, None) # Optional: set to None instead of deleting
|
||||||
if key == "configured_data":
|
else:
|
||||||
data = dict()
|
super().__delattr__(key)
|
||||||
else:
|
|
||||||
data = None
|
|
||||||
setattr(self, key, data)
|
|
||||||
return
|
|
||||||
if key in self.configured_data:
|
|
||||||
del self.configured_data[key]
|
|
||||||
return
|
|
||||||
configured_keys = self.configured_data_keys()
|
|
||||||
if configured_keys is not None and key in configured_keys:
|
|
||||||
return
|
|
||||||
super().__delattr__(key)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def key_from_description(cls, description: str, threshold: float = 0.8) -> Optional[str]:
|
def key_from_description(cls, description: str, threshold: float = 0.8) -> Optional[str]:
|
||||||
@@ -372,11 +238,10 @@ class DataRecord(DataBase, MutableMapping):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Get all descriptions from the fields
|
# Get all descriptions from the fields
|
||||||
descriptions: dict[str, str] = {}
|
descriptions = {
|
||||||
for field_name in cls.model_fields.keys():
|
field_name: field_info.description
|
||||||
desc = cls.field_description(field_name)
|
for field_name, field_info in cls.model_fields.items()
|
||||||
if desc:
|
}
|
||||||
descriptions[field_name] = desc
|
|
||||||
|
|
||||||
# Use difflib to get close matches
|
# Use difflib to get close matches
|
||||||
matches = difflib.get_close_matches(
|
matches = difflib.get_close_matches(
|
||||||
@@ -434,7 +299,8 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
Usage:
|
Usage:
|
||||||
# Example of creating, adding, and using DataSequence
|
# Example of creating, adding, and using DataSequence
|
||||||
class DerivedSequence(DataSquence):
|
class DerivedSequence(DataSquence):
|
||||||
records: List[DerivedDataRecord] = Field(default_factory=list, json_schema_extra={ "description": "List of data records" })
|
records: List[DerivedDataRecord] = Field(default_factory=list,
|
||||||
|
description="List of data records")
|
||||||
|
|
||||||
seq = DerivedSequence()
|
seq = DerivedSequence()
|
||||||
seq.insert(DerivedDataRecord(date_time=datetime.now(), temperature=72))
|
seq.insert(DerivedDataRecord(date_time=datetime.now(), temperature=72))
|
||||||
@@ -449,9 +315,7 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# To be overloaded by derived classes.
|
# To be overloaded by derived classes.
|
||||||
records: List[DataRecord] = Field(
|
records: List[DataRecord] = Field(default_factory=list, description="List of data records")
|
||||||
default_factory=list, json_schema_extra={"description": "List of data records"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Derived fields (computed)
|
# Derived fields (computed)
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@@ -490,7 +354,10 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
@property
|
@property
|
||||||
def record_keys(self) -> List[str]:
|
def record_keys(self) -> List[str]:
|
||||||
"""Returns the keys of all fields in the data records."""
|
"""Returns the keys of all fields in the data records."""
|
||||||
return self.record_class().record_keys()
|
key_list = []
|
||||||
|
key_list.extend(list(self.record_class().model_fields.keys()))
|
||||||
|
key_list.extend(list(self.record_class().__pydantic_decorators__.computed_fields.keys()))
|
||||||
|
return key_list
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
@@ -504,7 +371,7 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
Returns:
|
Returns:
|
||||||
List[str]: A list of field keys that are writable in the data records.
|
List[str]: A list of field keys that are writable in the data records.
|
||||||
"""
|
"""
|
||||||
return self.record_class().record_keys_writable()
|
return list(self.record_class().model_fields.keys())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def record_class(cls) -> Type:
|
def record_class(cls) -> Type:
|
||||||
@@ -842,38 +709,6 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
|
|
||||||
return filtered_data
|
return filtered_data
|
||||||
|
|
||||||
def key_to_value(self, key: str, target_datetime: DateTime) -> Optional[float]:
|
|
||||||
"""Returns the value corresponding to the specified key that is nearest to the given datetime.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): The key of the attribute in DataRecord to extract.
|
|
||||||
target_datetime (datetime): The datetime to search nearest to.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Optional[float]: The value nearest to the given datetime, or None if no valid records are found.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
KeyError: If the specified key is not found in any of the DataRecords.
|
|
||||||
"""
|
|
||||||
self._validate_key(key)
|
|
||||||
|
|
||||||
# Filter out records with None or NaN values for the key
|
|
||||||
valid_records = [
|
|
||||||
record
|
|
||||||
for record in self.records
|
|
||||||
if record.date_time is not None
|
|
||||||
and getattr(record, key, None) not in (None, float("nan"))
|
|
||||||
]
|
|
||||||
|
|
||||||
if not valid_records:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Find the record with datetime nearest to target_datetime
|
|
||||||
target = to_datetime(target_datetime)
|
|
||||||
nearest_record = min(valid_records, key=lambda r: abs(r.date_time - target))
|
|
||||||
|
|
||||||
return getattr(nearest_record, key, None)
|
|
||||||
|
|
||||||
def key_to_lists(
|
def key_to_lists(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
@@ -976,8 +811,7 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
dates, values = self.key_to_lists(
|
dates, values = self.key_to_lists(
|
||||||
key=key, start_datetime=start_datetime, end_datetime=end_datetime, dropna=dropna
|
key=key, start_datetime=start_datetime, end_datetime=end_datetime, dropna=dropna
|
||||||
)
|
)
|
||||||
series = pd.Series(data=values, index=pd.DatetimeIndex(dates), name=key)
|
return pd.Series(data=values, index=pd.DatetimeIndex(dates), name=key)
|
||||||
return series
|
|
||||||
|
|
||||||
def key_from_series(self, key: str, series: pd.Series) -> None:
|
def key_from_series(self, key: str, series: pd.Series) -> None:
|
||||||
"""Update the DataSequence from a Pandas Series.
|
"""Update the DataSequence from a Pandas Series.
|
||||||
@@ -1035,11 +869,6 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
KeyError: If the specified key is not found in any of the DataRecords.
|
KeyError: If the specified key is not found in any of the DataRecords.
|
||||||
"""
|
"""
|
||||||
self._validate_key(key)
|
self._validate_key(key)
|
||||||
|
|
||||||
# General check on fill_method
|
|
||||||
if fill_method not in ("ffill", "bfill", "linear", "none", None):
|
|
||||||
raise ValueError(f"Unsupported fill method: {fill_method}")
|
|
||||||
|
|
||||||
# Ensure datetime objects are normalized
|
# Ensure datetime objects are normalized
|
||||||
start_datetime = to_datetime(start_datetime, to_maxtime=False) if start_datetime else None
|
start_datetime = to_datetime(start_datetime, to_maxtime=False) if start_datetime else None
|
||||||
end_datetime = to_datetime(end_datetime, to_maxtime=False) if end_datetime else None
|
end_datetime = to_datetime(end_datetime, to_maxtime=False) if end_datetime else None
|
||||||
@@ -1052,7 +881,7 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
values_len = len(values)
|
values_len = len(values)
|
||||||
|
|
||||||
if values_len < 1:
|
if values_len < 1:
|
||||||
# No values, assume at least one value set to None
|
# No values, assume at at least one value set to None
|
||||||
if start_datetime is not None:
|
if start_datetime is not None:
|
||||||
dates.append(start_datetime - interval)
|
dates.append(start_datetime - interval)
|
||||||
else:
|
else:
|
||||||
@@ -1074,11 +903,6 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
# Truncate all values before latest value before start_datetime
|
# Truncate all values before latest value before start_datetime
|
||||||
dates = dates[start_index - 1 :]
|
dates = dates[start_index - 1 :]
|
||||||
values = values[start_index - 1 :]
|
values = values[start_index - 1 :]
|
||||||
# We have a start_datetime, align to start datetime
|
|
||||||
resample_origin = start_datetime
|
|
||||||
else:
|
|
||||||
# We do not have a start_datetime, align resample buckets to midnight of first day
|
|
||||||
resample_origin = "start_day"
|
|
||||||
|
|
||||||
if end_datetime is not None:
|
if end_datetime is not None:
|
||||||
if compare_datetimes(dates[-1], end_datetime).lt:
|
if compare_datetimes(dates[-1], end_datetime).lt:
|
||||||
@@ -1099,7 +923,7 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
if fill_method is None:
|
if fill_method is None:
|
||||||
fill_method = "linear"
|
fill_method = "linear"
|
||||||
# Resample the series to the specified interval
|
# Resample the series to the specified interval
|
||||||
resampled = series.resample(interval, origin=resample_origin).first()
|
resampled = series.resample(interval, origin="start").first()
|
||||||
if fill_method == "linear":
|
if fill_method == "linear":
|
||||||
resampled = resampled.interpolate(method="linear")
|
resampled = resampled.interpolate(method="linear")
|
||||||
elif fill_method == "ffill":
|
elif fill_method == "ffill":
|
||||||
@@ -1113,7 +937,7 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
if fill_method is None:
|
if fill_method is None:
|
||||||
fill_method = "ffill"
|
fill_method = "ffill"
|
||||||
# Resample the series to the specified interval
|
# Resample the series to the specified interval
|
||||||
resampled = series.resample(interval, origin=resample_origin).first()
|
resampled = series.resample(interval, origin="start").first()
|
||||||
if fill_method == "ffill":
|
if fill_method == "ffill":
|
||||||
resampled = resampled.ffill()
|
resampled = resampled.ffill()
|
||||||
elif fill_method == "bfill":
|
elif fill_method == "bfill":
|
||||||
@@ -1121,64 +945,14 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
elif fill_method != "none":
|
elif fill_method != "none":
|
||||||
raise ValueError(f"Unsupported fill method for non-numeric data: {fill_method}")
|
raise ValueError(f"Unsupported fill method for non-numeric data: {fill_method}")
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
"Resampled for '{}' with length {}: {}...{}",
|
|
||||||
key,
|
|
||||||
len(resampled),
|
|
||||||
resampled[:10],
|
|
||||||
resampled[-10:],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Convert the resampled series to a NumPy array
|
# Convert the resampled series to a NumPy array
|
||||||
if start_datetime is not None and len(resampled) > 0:
|
if start_datetime is not None and len(resampled) > 0:
|
||||||
resampled = resampled.truncate(before=start_datetime)
|
resampled = resampled.truncate(before=start_datetime)
|
||||||
if end_datetime is not None and len(resampled) > 0:
|
if end_datetime is not None and len(resampled) > 0:
|
||||||
resampled = resampled.truncate(after=end_datetime.subtract(seconds=1))
|
resampled = resampled.truncate(after=end_datetime.subtract(seconds=1))
|
||||||
array = resampled.values
|
array = resampled.values
|
||||||
logger.debug(
|
|
||||||
"Array for '{}' with length {}: {}...{}", key, len(array), array[:10], array[-10:]
|
|
||||||
)
|
|
||||||
|
|
||||||
return array
|
return array
|
||||||
|
|
||||||
def to_dataframe(
|
|
||||||
self,
|
|
||||||
start_datetime: Optional[DateTime] = None,
|
|
||||||
end_datetime: Optional[DateTime] = None,
|
|
||||||
) -> pd.DataFrame:
|
|
||||||
"""Converts the sequence of DataRecord instances into a Pandas DataFrame.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_datetime (Optional[datetime]): The lower bound for filtering (inclusive).
|
|
||||||
Defaults to the earliest possible datetime if None.
|
|
||||||
end_datetime (Optional[datetime]): The upper bound for filtering (exclusive).
|
|
||||||
Defaults to the latest possible datetime if None.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
pd.DataFrame: A DataFrame containing the filtered data from all records.
|
|
||||||
"""
|
|
||||||
if not self.records:
|
|
||||||
return pd.DataFrame() # Return empty DataFrame if no records exist
|
|
||||||
|
|
||||||
# Use filter_by_datetime to get filtered records
|
|
||||||
filtered_records = self.filter_by_datetime(start_datetime, end_datetime)
|
|
||||||
|
|
||||||
# Convert filtered records to a dictionary list
|
|
||||||
data = [record.model_dump() for record in filtered_records]
|
|
||||||
|
|
||||||
# Convert to DataFrame
|
|
||||||
df = pd.DataFrame(data)
|
|
||||||
if df.empty:
|
|
||||||
return df
|
|
||||||
|
|
||||||
# Ensure `date_time` column exists and use it for the index
|
|
||||||
if not "date_time" in df.columns:
|
|
||||||
error_msg = f"Cannot create dataframe: no `date_time` column in `{df}`."
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise TypeError(error_msg)
|
|
||||||
df.index = pd.DatetimeIndex(df["date_time"])
|
|
||||||
return df
|
|
||||||
|
|
||||||
def sort_by_datetime(self, reverse: bool = False) -> None:
|
def sort_by_datetime(self, reverse: bool = False) -> None:
|
||||||
"""Sort the DataRecords in the sequence by their date_time attribute.
|
"""Sort the DataRecords in the sequence by their date_time attribute.
|
||||||
|
|
||||||
@@ -1319,7 +1093,7 @@ class DataProvider(SingletonMixin, DataSequence):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
update_datetime: Optional[AwareDatetime] = Field(
|
update_datetime: Optional[AwareDatetime] = Field(
|
||||||
None, json_schema_extra={"description": "Latest update datetime for generic data"}
|
None, description="Latest update datetime for generic data"
|
||||||
)
|
)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@@ -1336,7 +1110,7 @@ class DataProvider(SingletonMixin, DataSequence):
|
|||||||
|
|
||||||
To be implemented by derived classes.
|
To be implemented by derived classes.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
return self.provider_id() == self.config.abstract_provider
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
@@ -1347,11 +1121,6 @@ class DataProvider(SingletonMixin, DataSequence):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
||||||
if hasattr(self, "_initialized"):
|
|
||||||
return
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def update_data(
|
def update_data(
|
||||||
self,
|
self,
|
||||||
force_enable: Optional[bool] = False,
|
force_enable: Optional[bool] = False,
|
||||||
@@ -1386,7 +1155,7 @@ class DataImportMixin:
|
|||||||
the values. `ìnterval` may be used to define the fixed time interval between two values.
|
the values. `ìnterval` may be used to define the fixed time interval between two values.
|
||||||
|
|
||||||
On import `self.update_value(datetime, key, value)` is called which has to be provided.
|
On import `self.update_value(datetime, key, value)` is called which has to be provided.
|
||||||
Also `self.ems_start_datetime` may be necessary as a default in case `start_datetime`is not given.
|
Also `self.start_datetime` may be necessary as a default in case `start_datetime`is not given.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Attributes required but defined elsehere.
|
# Attributes required but defined elsehere.
|
||||||
@@ -1455,14 +1224,14 @@ class DataImportMixin:
|
|||||||
# We jump back by 1 hour
|
# We jump back by 1 hour
|
||||||
# Repeat the value(s) (reuse value index)
|
# Repeat the value(s) (reuse value index)
|
||||||
for i in range(interval_steps_per_hour):
|
for i in range(interval_steps_per_hour):
|
||||||
logger.debug(f"{i + 1}: Repeat at {next_time} with index {value_index}")
|
logger.debug(f"{i+1}: Repeat at {next_time} with index {value_index}")
|
||||||
timestamps_with_indices.append((next_time, value_index))
|
timestamps_with_indices.append((next_time, value_index))
|
||||||
next_time = next_time.add(seconds=interval.total_seconds())
|
next_time = next_time.add(seconds=interval.total_seconds())
|
||||||
else:
|
else:
|
||||||
# We jump forward by 1 hour
|
# We jump forward by 1 hour
|
||||||
# Drop the value(s)
|
# Drop the value(s)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"{i + 1}: Skip {interval_steps_per_hour} at {next_time} with index {value_index}"
|
f"{i+1}: Skip {interval_steps_per_hour} at {next_time} with index {value_index}"
|
||||||
)
|
)
|
||||||
value_index += interval_steps_per_hour
|
value_index += interval_steps_per_hour
|
||||||
|
|
||||||
@@ -1504,7 +1273,7 @@ class DataImportMixin:
|
|||||||
raise ValueError(f"Invalid start_datetime in import data: {e}")
|
raise ValueError(f"Invalid start_datetime in import data: {e}")
|
||||||
|
|
||||||
if start_datetime is None:
|
if start_datetime is None:
|
||||||
start_datetime = self.ems_start_datetime # type: ignore
|
start_datetime = self.start_datetime # type: ignore
|
||||||
|
|
||||||
if "interval" in import_data:
|
if "interval" in import_data:
|
||||||
try:
|
try:
|
||||||
@@ -1595,7 +1364,7 @@ class DataImportMixin:
|
|||||||
raise ValueError(f"Invalid datetime index in DataFrame: {e}")
|
raise ValueError(f"Invalid datetime index in DataFrame: {e}")
|
||||||
else:
|
else:
|
||||||
if start_datetime is None:
|
if start_datetime is None:
|
||||||
start_datetime = self.ems_start_datetime # type: ignore
|
start_datetime = self.start_datetime # type: ignore
|
||||||
has_datetime_index = False
|
has_datetime_index = False
|
||||||
|
|
||||||
# Filter columns based on key_prefix and record_keys_writable
|
# Filter columns based on key_prefix and record_keys_writable
|
||||||
@@ -1652,7 +1421,7 @@ class DataImportMixin:
|
|||||||
|
|
||||||
If start_datetime and or interval is given in the JSON dict it will be used. Otherwise
|
If start_datetime and or interval is given in the JSON dict it will be used. Otherwise
|
||||||
the given parameters are used. If None is given start_datetime defaults to
|
the given parameters are used. If None is given start_datetime defaults to
|
||||||
'self.ems_start_datetime' and interval defaults to 1 hour.
|
'self.start_datetime' and interval defaults to 1 hour.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
json_str (str): The JSON string containing the generic data.
|
json_str (str): The JSON string containing the generic data.
|
||||||
@@ -1670,11 +1439,11 @@ class DataImportMixin:
|
|||||||
{
|
{
|
||||||
"start_datetime": "2024-11-10 00:00:00"
|
"start_datetime": "2024-11-10 00:00:00"
|
||||||
"interval": "30 minutes"
|
"interval": "30 minutes"
|
||||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
"load_mean": [20.5, 21.0, 22.1],
|
||||||
"other_xyz: [10.5, 11.0, 12.1],
|
"other_xyz: [10.5, 11.0, 12.1],
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
and `key_prefix = "load"`, only the "loadforecast_power_w" key will be processed even though
|
and `key_prefix = "load"`, only the "load_mean" key will be processed even though
|
||||||
both keys are in the record.
|
both keys are in the record.
|
||||||
"""
|
"""
|
||||||
# Try pandas dataframe with orient="split"
|
# Try pandas dataframe with orient="split"
|
||||||
@@ -1691,7 +1460,7 @@ class DataImportMixin:
|
|||||||
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
||||||
logger.debug(f"PydanticDateTimeDataFrame import: {error_msg}")
|
logger.debug(f"PydanticDateTimeDataFrame import: {error_msg}")
|
||||||
|
|
||||||
# Try dictionary with special keys start_datetime and interval
|
# Try dictionary with special keys start_datetime and intervall
|
||||||
try:
|
try:
|
||||||
import_data = PydanticDateTimeData.model_validate_json(json_str)
|
import_data = PydanticDateTimeData.model_validate_json(json_str)
|
||||||
self.import_from_dict(import_data.to_dict())
|
self.import_from_dict(import_data.to_dict())
|
||||||
@@ -1727,7 +1496,7 @@ class DataImportMixin:
|
|||||||
|
|
||||||
If start_datetime and or interval is given in the JSON dict it will be used. Otherwise
|
If start_datetime and or interval is given in the JSON dict it will be used. Otherwise
|
||||||
the given parameters are used. If None is given start_datetime defaults to
|
the given parameters are used. If None is given start_datetime defaults to
|
||||||
'self.ems_start_datetime' and interval defaults to 1 hour.
|
'self.start_datetime' and interval defaults to 1 hour.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
import_file_path (Path): The path to the JSON file containing the generic data.
|
import_file_path (Path): The path to the JSON file containing the generic data.
|
||||||
@@ -1744,14 +1513,14 @@ class DataImportMixin:
|
|||||||
Given a JSON file with the following content:
|
Given a JSON file with the following content:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
"load_mean": [20.5, 21.0, 22.1],
|
||||||
"other_xyz: [10.5, 11.0, 12.1],
|
"other_xyz: [10.5, 11.0, 12.1],
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
and `key_prefix = "load"`, only the "loadforecast_power_w" key will be processed even though
|
and `key_prefix = "load"`, only the "load_mean" key will be processed even though
|
||||||
both keys are in the record.
|
both keys are in the record.
|
||||||
"""
|
"""
|
||||||
with import_file_path.open("r", encoding="utf-8", newline=None) as import_file:
|
with import_file_path.open("r") as import_file:
|
||||||
import_str = import_file.read()
|
import_str = import_file.read()
|
||||||
self.import_from_json(
|
self.import_from_json(
|
||||||
import_str, key_prefix=key_prefix, start_datetime=start_datetime, interval=interval
|
import_str, key_prefix=key_prefix, start_datetime=start_datetime, interval=interval
|
||||||
@@ -1786,7 +1555,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
|||||||
|
|
||||||
# To be overloaded by derived classes.
|
# To be overloaded by derived classes.
|
||||||
providers: List[DataProvider] = Field(
|
providers: List[DataProvider] = Field(
|
||||||
default_factory=list, json_schema_extra={"description": "List of data providers"}
|
default_factory=list, description="List of data providers"
|
||||||
)
|
)
|
||||||
|
|
||||||
@field_validator("providers", mode="after")
|
@field_validator("providers", mode="after")
|
||||||
@@ -1826,11 +1595,6 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
|||||||
)
|
)
|
||||||
return list(key_set)
|
return list(key_set)
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
||||||
if hasattr(self, "_initialized"):
|
|
||||||
return
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> pd.Series:
|
def __getitem__(self, key: str) -> pd.Series:
|
||||||
"""Retrieve a Pandas Series for a specified key from the data in each DataProvider.
|
"""Retrieve a Pandas Series for a specified key from the data in each DataProvider.
|
||||||
|
|
||||||
@@ -1938,12 +1702,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
|||||||
force_update (bool, optional): If True, forces the providers to update the data even if still cached.
|
force_update (bool, optional): If True, forces the providers to update the data even if still cached.
|
||||||
"""
|
"""
|
||||||
for provider in self.providers:
|
for provider in self.providers:
|
||||||
try:
|
provider.update_data(force_enable=force_enable, force_update=force_update)
|
||||||
provider.update_data(force_enable=force_enable, force_update=force_update)
|
|
||||||
except Exception as ex:
|
|
||||||
error = f"Provider {provider.provider_id()} fails on update - enabled={provider.enabled()}, force_enable={force_enable}, force_update={force_update}: {ex}"
|
|
||||||
logger.error(error)
|
|
||||||
raise RuntimeError(error)
|
|
||||||
|
|
||||||
def key_to_series(
|
def key_to_series(
|
||||||
self,
|
self,
|
||||||
@@ -2038,95 +1797,6 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
|||||||
|
|
||||||
return array
|
return array
|
||||||
|
|
||||||
def keys_to_dataframe(
|
|
||||||
self,
|
|
||||||
keys: list[str],
|
|
||||||
start_datetime: Optional[DateTime] = None,
|
|
||||||
end_datetime: Optional[DateTime] = None,
|
|
||||||
interval: Optional[Any] = None, # Duration assumed
|
|
||||||
fill_method: Optional[str] = None,
|
|
||||||
) -> pd.DataFrame:
|
|
||||||
"""Retrieve a dataframe indexed by fixed time intervals for specified keys from the data in each DataProvider.
|
|
||||||
|
|
||||||
Generates a pandas DataFrame using the NumPy arrays for each specified key, ensuring a common time index.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
keys (list[str]): A list of field names to retrieve.
|
|
||||||
start_datetime (datetime, optional): Start date for filtering records (inclusive).
|
|
||||||
end_datetime (datetime, optional): End date for filtering records (exclusive).
|
|
||||||
interval (duration, optional): The fixed time interval. Defaults to 1 hour.
|
|
||||||
fill_method (str, optional): Method to handle missing values during resampling.
|
|
||||||
- 'linear': Linearly interpolate missing values (for numeric data only).
|
|
||||||
- 'ffill': Forward fill missing values.
|
|
||||||
- 'bfill': Backward fill missing values.
|
|
||||||
- 'none': Defaults to 'linear' for numeric values, otherwise 'ffill'.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
pd.DataFrame: A DataFrame where each column represents a key's array with a common time index.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
KeyError: If no valid data is found for any of the requested keys.
|
|
||||||
ValueError: If any retrieved array has a different time index than the first one.
|
|
||||||
"""
|
|
||||||
# Ensure datetime objects are normalized
|
|
||||||
start_datetime = to_datetime(start_datetime, to_maxtime=False) if start_datetime else None
|
|
||||||
end_datetime = to_datetime(end_datetime, to_maxtime=False) if end_datetime else None
|
|
||||||
if interval is None:
|
|
||||||
interval = to_duration("1 hour")
|
|
||||||
if start_datetime is None:
|
|
||||||
# Take earliest datetime of all providers that are enabled
|
|
||||||
for provider in self.enabled_providers:
|
|
||||||
if start_datetime is None:
|
|
||||||
start_datetime = provider.min_datetime
|
|
||||||
elif (
|
|
||||||
provider.min_datetime
|
|
||||||
and compare_datetimes(provider.min_datetime, start_datetime).lt
|
|
||||||
):
|
|
||||||
start_datetime = provider.min_datetime
|
|
||||||
if end_datetime is None:
|
|
||||||
# Take latest datetime of all providers that are enabled
|
|
||||||
for provider in self.enabled_providers:
|
|
||||||
if end_datetime is None:
|
|
||||||
end_datetime = provider.max_datetime
|
|
||||||
elif (
|
|
||||||
provider.max_datetime
|
|
||||||
and compare_datetimes(provider.max_datetime, end_datetime).gt
|
|
||||||
):
|
|
||||||
end_datetime = provider.min_datetime
|
|
||||||
if end_datetime:
|
|
||||||
end_datetime.add(seconds=1)
|
|
||||||
|
|
||||||
# Create a DatetimeIndex based on start, end, and interval
|
|
||||||
if start_datetime is None or end_datetime is None:
|
|
||||||
raise ValueError(
|
|
||||||
f"Can not determine datetime range. Got '{start_datetime}'..'{end_datetime}'."
|
|
||||||
)
|
|
||||||
reference_index = pd.date_range(
|
|
||||||
start=start_datetime,
|
|
||||||
end=end_datetime,
|
|
||||||
freq=interval,
|
|
||||||
inclusive="left",
|
|
||||||
)
|
|
||||||
|
|
||||||
data = {}
|
|
||||||
for key in keys:
|
|
||||||
try:
|
|
||||||
array = self.key_to_array(key, start_datetime, end_datetime, interval, fill_method)
|
|
||||||
|
|
||||||
if len(array) != len(reference_index):
|
|
||||||
raise ValueError(
|
|
||||||
f"Array length mismatch for key '{key}' (expected {len(reference_index)}, got {len(array)})"
|
|
||||||
)
|
|
||||||
|
|
||||||
data[key] = array
|
|
||||||
except KeyError as e:
|
|
||||||
raise KeyError(f"Failed to retrieve data for key '{key}': {e}")
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
raise KeyError(f"No valid data found for the requested keys {keys}.")
|
|
||||||
|
|
||||||
return pd.DataFrame(data, index=reference_index)
|
|
||||||
|
|
||||||
def provider_by_id(self, provider_id: str) -> DataProvider:
|
def provider_by_id(self, provider_id: str) -> DataProvider:
|
||||||
"""Retrieves a data provider by its unique identifier.
|
"""Retrieves a data provider by its unique identifier.
|
||||||
|
|
||||||
|
|||||||
@@ -1,44 +0,0 @@
|
|||||||
from collections.abc import Callable
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
|
|
||||||
class classproperty:
|
|
||||||
"""A decorator to define a read-only property at the class level.
|
|
||||||
|
|
||||||
This class replaces the built-in `property` which is no longer available in
|
|
||||||
combination with @classmethod since Python 3.13 to allow a method to be
|
|
||||||
accessed as a property on the class itself, rather than an instance. This
|
|
||||||
is useful when you want a property-like syntax for methods that depend on
|
|
||||||
the class rather than any instance of the class.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
class MyClass:
|
|
||||||
_value = 42
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def value(cls):
|
|
||||||
return cls._value
|
|
||||||
|
|
||||||
print(MyClass.value) # Outputs: 42
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
__get__: Retrieves the value of the class property by calling the
|
|
||||||
decorated method on the class.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
fget (Callable[[Any], Any]): A method that takes the class as an
|
|
||||||
argument and returns a value.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
RuntimeError: If `fget` is not defined when `__get__` is called.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, fget: Callable[[Any], Any]) -> None:
|
|
||||||
self.fget = fget
|
|
||||||
|
|
||||||
def __get__(self, _: Any, owner_cls: Optional[type[Any]] = None) -> Any:
|
|
||||||
if owner_cls is None:
|
|
||||||
return self
|
|
||||||
if self.fget is None:
|
|
||||||
raise RuntimeError("'fget' not defined when `__get__` is called")
|
|
||||||
return self.fget(owner_cls)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,353 +1,443 @@
|
|||||||
import traceback
|
from typing import Any, ClassVar, Optional
|
||||||
from asyncio import Lock, get_running_loop
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
from functools import partial
|
|
||||||
from typing import ClassVar, Optional
|
|
||||||
|
|
||||||
from loguru import logger
|
import numpy as np
|
||||||
from pydantic import computed_field
|
from numpydantic import NDArray, Shape
|
||||||
|
from pendulum import DateTime
|
||||||
|
from pydantic import ConfigDict, Field, computed_field, field_validator, model_validator
|
||||||
|
from typing_extensions import Self
|
||||||
|
|
||||||
from akkudoktoreos.core.cache import CacheEnergyManagementStore
|
|
||||||
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin, SingletonMixin
|
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin, SingletonMixin
|
||||||
from akkudoktoreos.core.emplan import EnergyManagementPlan
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.emsettings import EnergyManagementMode
|
from akkudoktoreos.core.pydantic import ParametersBaseModel, PydanticBaseModel
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
from akkudoktoreos.devices.battery import Battery
|
||||||
from akkudoktoreos.optimization.genetic.genetic import GeneticOptimization
|
from akkudoktoreos.devices.generic import HomeAppliance
|
||||||
from akkudoktoreos.optimization.genetic.geneticparams import (
|
from akkudoktoreos.devices.inverter import Inverter
|
||||||
GeneticOptimizationParameters,
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
)
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
from akkudoktoreos.optimization.genetic.geneticsolution import GeneticSolution
|
|
||||||
from akkudoktoreos.optimization.optimization import OptimizationSolution
|
|
||||||
from akkudoktoreos.utils.datetimeutil import DateTime, compare_datetimes, to_datetime
|
|
||||||
|
|
||||||
# The executor to execute the CPU heavy energy management run
|
logger = get_logger(__name__)
|
||||||
executor = ThreadPoolExecutor(max_workers=1)
|
|
||||||
|
|
||||||
|
|
||||||
class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBaseModel):
|
class EnergieManagementSystemParameters(ParametersBaseModel):
|
||||||
"""Energy management."""
|
pv_prognose_wh: list[float] = Field(
|
||||||
|
description="An array of floats representing the forecasted photovoltaic output in watts for different time intervals."
|
||||||
|
)
|
||||||
|
strompreis_euro_pro_wh: list[float] = Field(
|
||||||
|
description="An array of floats representing the electricity price in euros per watt-hour for different time intervals."
|
||||||
|
)
|
||||||
|
einspeiseverguetung_euro_pro_wh: list[float] | float = Field(
|
||||||
|
description="A float or array of floats representing the feed-in compensation in euros per watt-hour."
|
||||||
|
)
|
||||||
|
preis_euro_pro_wh_akku: float = Field(
|
||||||
|
description="A float representing the cost of battery energy per watt-hour."
|
||||||
|
)
|
||||||
|
gesamtlast: list[float] = Field(
|
||||||
|
description="An array of floats representing the total load (consumption) in watts for different time intervals."
|
||||||
|
)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def validate_list_length(self) -> Self:
|
||||||
|
pv_prognose_length = len(self.pv_prognose_wh)
|
||||||
|
if (
|
||||||
|
pv_prognose_length != len(self.strompreis_euro_pro_wh)
|
||||||
|
or pv_prognose_length != len(self.gesamtlast)
|
||||||
|
or (
|
||||||
|
isinstance(self.einspeiseverguetung_euro_pro_wh, list)
|
||||||
|
and pv_prognose_length != len(self.einspeiseverguetung_euro_pro_wh)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
raise ValueError("Input lists have different lengths")
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class SimulationResult(ParametersBaseModel):
|
||||||
|
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
|
||||||
|
|
||||||
|
Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
|
||||||
|
EAuto_SoC_pro_Stunde: list[Optional[float]] = Field(
|
||||||
|
description="The state of charge of the EV for each hour."
|
||||||
|
)
|
||||||
|
Einnahmen_Euro_pro_Stunde: list[Optional[float]] = Field(
|
||||||
|
description="The revenue from grid feed-in or other sources in euros per hour."
|
||||||
|
)
|
||||||
|
Gesamt_Verluste: float = Field(
|
||||||
|
description="The total losses in watt-hours over the entire period."
|
||||||
|
)
|
||||||
|
Gesamtbilanz_Euro: float = Field(
|
||||||
|
description="The total balance of revenues minus costs in euros."
|
||||||
|
)
|
||||||
|
Gesamteinnahmen_Euro: float = Field(description="The total revenues in euros.")
|
||||||
|
Gesamtkosten_Euro: float = Field(description="The total costs in euros.")
|
||||||
|
Home_appliance_wh_per_hour: list[Optional[float]] = Field(
|
||||||
|
description="The energy consumption of a household appliance in watt-hours per hour."
|
||||||
|
)
|
||||||
|
Kosten_Euro_pro_Stunde: list[Optional[float]] = Field(
|
||||||
|
description="The costs in euros per hour."
|
||||||
|
)
|
||||||
|
Netzbezug_Wh_pro_Stunde: list[Optional[float]] = Field(
|
||||||
|
description="The grid energy drawn in watt-hours per hour."
|
||||||
|
)
|
||||||
|
Netzeinspeisung_Wh_pro_Stunde: list[Optional[float]] = Field(
|
||||||
|
description="The energy fed into the grid in watt-hours per hour."
|
||||||
|
)
|
||||||
|
Verluste_Pro_Stunde: list[Optional[float]] = Field(
|
||||||
|
description="The losses in watt-hours per hour."
|
||||||
|
)
|
||||||
|
akku_soc_pro_stunde: list[Optional[float]] = Field(
|
||||||
|
description="The state of charge of the battery (not the EV) in percentage per hour."
|
||||||
|
)
|
||||||
|
Electricity_price: list[Optional[float]] = Field(
|
||||||
|
description="Used Electricity Price, including predictions"
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator(
|
||||||
|
"Last_Wh_pro_Stunde",
|
||||||
|
"Netzeinspeisung_Wh_pro_Stunde",
|
||||||
|
"akku_soc_pro_stunde",
|
||||||
|
"Netzbezug_Wh_pro_Stunde",
|
||||||
|
"Kosten_Euro_pro_Stunde",
|
||||||
|
"Einnahmen_Euro_pro_Stunde",
|
||||||
|
"EAuto_SoC_pro_Stunde",
|
||||||
|
"Verluste_Pro_Stunde",
|
||||||
|
"Home_appliance_wh_per_hour",
|
||||||
|
"Electricity_price",
|
||||||
|
mode="before",
|
||||||
|
)
|
||||||
|
def convert_numpy(cls, field: Any) -> Any:
|
||||||
|
return NumpyEncoder.convert_numpy(field)[0]
|
||||||
|
|
||||||
|
|
||||||
|
class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBaseModel):
|
||||||
|
# Disable validation on assignment to speed up simulation runs.
|
||||||
|
model_config = ConfigDict(
|
||||||
|
validate_assignment=False,
|
||||||
|
)
|
||||||
|
|
||||||
# Start datetime.
|
# Start datetime.
|
||||||
_start_datetime: ClassVar[Optional[DateTime]] = None
|
_start_datetime: ClassVar[Optional[DateTime]] = None
|
||||||
|
|
||||||
# last run datetime. Used by energy management task
|
|
||||||
_last_run_datetime: ClassVar[Optional[DateTime]] = None
|
|
||||||
|
|
||||||
# energy management plan of latest energy management run with optimization
|
|
||||||
_plan: ClassVar[Optional[EnergyManagementPlan]] = None
|
|
||||||
|
|
||||||
# opimization solution of the latest energy management run
|
|
||||||
_optimization_solution: ClassVar[Optional[OptimizationSolution]] = None
|
|
||||||
|
|
||||||
# Solution of the genetic algorithm of latest energy management run with optimization
|
|
||||||
# For classic API
|
|
||||||
_genetic_solution: ClassVar[Optional[GeneticSolution]] = None
|
|
||||||
|
|
||||||
# energy management lock (for energy management run)
|
|
||||||
_run_lock: ClassVar[Lock] = Lock()
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def start_datetime(self) -> DateTime:
|
def start_datetime(self) -> DateTime:
|
||||||
"""The starting datetime of the current or latest energy management."""
|
"""The starting datetime of the current or latest energy management."""
|
||||||
if EnergyManagement._start_datetime is None:
|
if EnergieManagementSystem._start_datetime is None:
|
||||||
EnergyManagement.set_start_datetime()
|
EnergieManagementSystem.set_start_datetime()
|
||||||
return EnergyManagement._start_datetime
|
return EnergieManagementSystem._start_datetime
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def last_run_datetime(self) -> Optional[DateTime]:
|
|
||||||
"""The datetime the last energy management was run."""
|
|
||||||
return EnergyManagement._last_run_datetime
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def set_start_datetime(cls, start_datetime: Optional[DateTime] = None) -> DateTime:
|
def set_start_datetime(cls, start_datetime: Optional[DateTime] = None) -> DateTime:
|
||||||
"""Set the start datetime for the next energy management run.
|
|
||||||
|
|
||||||
If no datetime is provided, the current datetime is used.
|
|
||||||
|
|
||||||
The start datetime is always rounded down to the nearest hour
|
|
||||||
(i.e., setting minutes, seconds, and microseconds to zero).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_datetime (Optional[DateTime]): The datetime to set as the start.
|
|
||||||
If None, the current datetime is used.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
DateTime: The adjusted start datetime.
|
|
||||||
"""
|
|
||||||
if start_datetime is None:
|
if start_datetime is None:
|
||||||
start_datetime = to_datetime()
|
start_datetime = to_datetime()
|
||||||
cls._start_datetime = start_datetime.set(minute=0, second=0, microsecond=0)
|
cls._start_datetime = start_datetime.set(minute=0, second=0, microsecond=0)
|
||||||
return cls._start_datetime
|
return cls._start_datetime
|
||||||
|
|
||||||
@classmethod
|
# -------------------------
|
||||||
def plan(cls) -> Optional[EnergyManagementPlan]:
|
# TODO: Take from prediction
|
||||||
"""Get the latest energy management plan.
|
# -------------------------
|
||||||
|
|
||||||
Returns:
|
load_energy_array: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
Optional[EnergyManagementPlan]: The latest energy management plan or None.
|
default=None,
|
||||||
"""
|
description="An array of floats representing the total load (consumption) in watts for different time intervals.",
|
||||||
return cls._plan
|
)
|
||||||
|
pv_prediction_wh: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the forecasted photovoltaic output in watts for different time intervals.",
|
||||||
|
)
|
||||||
|
elect_price_hourly: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the electricity price in euros per watt-hour for different time intervals.",
|
||||||
|
)
|
||||||
|
elect_revenue_per_hour_arr: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the feed-in compensation in euros per watt-hour.",
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
# -------------------------
|
||||||
def optimization_solution(cls) -> Optional[OptimizationSolution]:
|
# TODO: Move to devices
|
||||||
"""Get the latest optimization solution.
|
# -------------------------
|
||||||
|
|
||||||
Returns:
|
battery: Optional[Battery] = Field(default=None, description="TBD.")
|
||||||
Optional[OptimizationSolution]: The latest optimization solution.
|
ev: Optional[Battery] = Field(default=None, description="TBD.")
|
||||||
"""
|
home_appliance: Optional[HomeAppliance] = Field(default=None, description="TBD.")
|
||||||
return cls._optimization_solution
|
inverter: Optional[Inverter] = Field(default=None, description="TBD.")
|
||||||
|
|
||||||
@classmethod
|
# -------------------------
|
||||||
def genetic_solution(cls) -> Optional[GeneticSolution]:
|
# TODO: Move to devices
|
||||||
"""Get the latest solution of the genetic algorithm.
|
# -------------------------
|
||||||
|
|
||||||
Returns:
|
ac_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
Optional[GeneticSolution]: The latest solution of the genetic algorithm.
|
dc_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
"""
|
ev_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
return cls._genetic_solution
|
|
||||||
|
|
||||||
@classmethod
|
def set_parameters(
|
||||||
def _run(
|
|
||||||
cls,
|
|
||||||
start_datetime: Optional[DateTime] = None,
|
|
||||||
mode: Optional[EnergyManagementMode] = None,
|
|
||||||
genetic_parameters: Optional[GeneticOptimizationParameters] = None,
|
|
||||||
genetic_individuals: Optional[int] = None,
|
|
||||||
genetic_seed: Optional[int] = None,
|
|
||||||
force_enable: Optional[bool] = False,
|
|
||||||
force_update: Optional[bool] = False,
|
|
||||||
) -> None:
|
|
||||||
"""Run the energy management.
|
|
||||||
|
|
||||||
This method initializes the energy management run by setting its
|
|
||||||
start datetime, updating predictions, and optionally starting
|
|
||||||
optimization depending on the selected mode or configuration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_datetime (DateTime, optional): The starting timestamp
|
|
||||||
of the energy management run. Defaults to the current datetime
|
|
||||||
if not provided.
|
|
||||||
mode (EnergyManagementMode, optional): The management mode to use. Must be one of:
|
|
||||||
- "OPTIMIZATION": Runs the optimization process.
|
|
||||||
- "PREDICTION": Updates the forecast without optimization.
|
|
||||||
|
|
||||||
Defaults to the mode defined in the current configuration.
|
|
||||||
genetic_parameters (GeneticOptimizationParameters, optional): The
|
|
||||||
parameter set for the genetic algorithm. If not provided, it will
|
|
||||||
be constructed based on the current configuration and predictions.
|
|
||||||
genetic_individuals (int, optional): The number of individuals for the
|
|
||||||
genetic algorithm. Defaults to the algorithm's internal default (400)
|
|
||||||
if not specified.
|
|
||||||
genetic_seed (int, optional): The seed for the genetic algorithm. Defaults
|
|
||||||
to the algorithm's internal random seed if not specified.
|
|
||||||
force_enable (bool, optional): If True, bypasses any disabled state
|
|
||||||
to force the update process. This is mostly applicable to
|
|
||||||
prediction providers.
|
|
||||||
force_update (bool, optional): If True, forces data to be refreshed
|
|
||||||
even if a cached version is still valid.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
None
|
|
||||||
"""
|
|
||||||
# Ensure there is only one optimization/ energy management run at a time
|
|
||||||
if mode not in (None, "PREDICTION", "OPTIMIZATION"):
|
|
||||||
raise ValueError(f"Unknown energy management mode {mode}.")
|
|
||||||
|
|
||||||
logger.info("Starting energy management run.")
|
|
||||||
|
|
||||||
# Remember/ set the start datetime of this energy management run.
|
|
||||||
# None leads
|
|
||||||
cls.set_start_datetime(start_datetime)
|
|
||||||
|
|
||||||
# Throw away any memory cached results of the last energy management run.
|
|
||||||
CacheEnergyManagementStore().clear()
|
|
||||||
|
|
||||||
if mode is None:
|
|
||||||
mode = cls.config.ems.mode
|
|
||||||
if mode is None or mode == "PREDICTION":
|
|
||||||
# Update the predictions
|
|
||||||
cls.prediction.update_data(force_enable=force_enable, force_update=force_update)
|
|
||||||
logger.info("Energy management run done (predictions updated)")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Prepare optimization parameters
|
|
||||||
# This also creates default configurations for missing values and updates the predictions
|
|
||||||
logger.info(
|
|
||||||
"Starting energy management prediction update and optimzation parameter preparation."
|
|
||||||
)
|
|
||||||
if genetic_parameters is None:
|
|
||||||
genetic_parameters = GeneticOptimizationParameters.prepare()
|
|
||||||
|
|
||||||
if not genetic_parameters:
|
|
||||||
logger.error(
|
|
||||||
"Energy management run canceled. Could not prepare optimisation parameters."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Take values from config if not given
|
|
||||||
if genetic_individuals is None:
|
|
||||||
genetic_individuals = cls.config.optimization.genetic.individuals
|
|
||||||
if genetic_seed is None:
|
|
||||||
genetic_seed = cls.config.optimization.genetic.seed
|
|
||||||
|
|
||||||
if cls._start_datetime is None: # Make mypy happy - already set by us
|
|
||||||
raise RuntimeError("Start datetime not set.")
|
|
||||||
|
|
||||||
logger.info("Starting energy management optimization.")
|
|
||||||
try:
|
|
||||||
optimization = GeneticOptimization(
|
|
||||||
verbose=bool(cls.config.server.verbose),
|
|
||||||
fixed_seed=genetic_seed,
|
|
||||||
)
|
|
||||||
solution = optimization.optimierung_ems(
|
|
||||||
start_hour=cls._start_datetime.hour,
|
|
||||||
parameters=genetic_parameters,
|
|
||||||
ngen=genetic_individuals,
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
logger.exception("Energy management optimization failed.")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Make genetic solution public
|
|
||||||
cls._genetic_solution = solution
|
|
||||||
|
|
||||||
# Make optimization solution public
|
|
||||||
cls._optimization_solution = solution.optimization_solution()
|
|
||||||
|
|
||||||
# Make plan public
|
|
||||||
cls._plan = solution.energy_management_plan()
|
|
||||||
|
|
||||||
logger.debug("Energy management genetic solution:\n{}", cls._genetic_solution)
|
|
||||||
logger.debug("Energy management optimization solution:\n{}", cls._optimization_solution)
|
|
||||||
logger.debug("Energy management plan:\n{}", cls._plan)
|
|
||||||
logger.info("Energy management run done (optimization updated)")
|
|
||||||
|
|
||||||
async def run(
|
|
||||||
self,
|
self,
|
||||||
start_datetime: Optional[DateTime] = None,
|
parameters: EnergieManagementSystemParameters,
|
||||||
mode: Optional[EnergyManagementMode] = None,
|
ev: Optional[Battery] = None,
|
||||||
genetic_parameters: Optional[GeneticOptimizationParameters] = None,
|
home_appliance: Optional[HomeAppliance] = None,
|
||||||
genetic_individuals: Optional[int] = None,
|
inverter: Optional[Inverter] = None,
|
||||||
genetic_seed: Optional[int] = None,
|
) -> None:
|
||||||
|
self.load_energy_array = np.array(parameters.gesamtlast, float)
|
||||||
|
self.pv_prediction_wh = np.array(parameters.pv_prognose_wh, float)
|
||||||
|
self.elect_price_hourly = np.array(parameters.strompreis_euro_pro_wh, float)
|
||||||
|
self.elect_revenue_per_hour_arr = (
|
||||||
|
parameters.einspeiseverguetung_euro_pro_wh
|
||||||
|
if isinstance(parameters.einspeiseverguetung_euro_pro_wh, list)
|
||||||
|
else np.full(
|
||||||
|
len(self.load_energy_array), parameters.einspeiseverguetung_euro_pro_wh, float
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if inverter:
|
||||||
|
self.battery = inverter.battery
|
||||||
|
else:
|
||||||
|
self.battery = None
|
||||||
|
self.ev = ev
|
||||||
|
self.home_appliance = home_appliance
|
||||||
|
self.inverter = inverter
|
||||||
|
self.ac_charge_hours = np.full(self.config.prediction_hours, 0.0)
|
||||||
|
self.dc_charge_hours = np.full(self.config.prediction_hours, 1.0)
|
||||||
|
self.ev_charge_hours = np.full(self.config.prediction_hours, 0.0)
|
||||||
|
|
||||||
|
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
||||||
|
if self.battery:
|
||||||
|
self.battery.set_discharge_per_hour(ds)
|
||||||
|
|
||||||
|
def set_akku_ac_charge_hours(self, ds: np.ndarray) -> None:
|
||||||
|
self.ac_charge_hours = ds
|
||||||
|
|
||||||
|
def set_akku_dc_charge_hours(self, ds: np.ndarray) -> None:
|
||||||
|
self.dc_charge_hours = ds
|
||||||
|
|
||||||
|
def set_ev_charge_hours(self, ds: np.ndarray) -> None:
|
||||||
|
self.ev_charge_hours = ds
|
||||||
|
|
||||||
|
def set_home_appliance_start(self, ds: int, global_start_hour: int = 0) -> None:
|
||||||
|
if self.home_appliance:
|
||||||
|
self.home_appliance.set_starting_time(ds, global_start_hour=global_start_hour)
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
if self.ev:
|
||||||
|
self.ev.reset()
|
||||||
|
if self.battery:
|
||||||
|
self.battery.reset()
|
||||||
|
|
||||||
|
def run(
|
||||||
|
self,
|
||||||
|
start_hour: Optional[int] = None,
|
||||||
force_enable: Optional[bool] = False,
|
force_enable: Optional[bool] = False,
|
||||||
force_update: Optional[bool] = False,
|
force_update: Optional[bool] = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run the energy management.
|
"""Run energy management.
|
||||||
|
|
||||||
This method initializes the energy management run by setting its
|
Sets `start_datetime` to current hour, updates the configuration and the prediction, and
|
||||||
start datetime, updating predictions, and optionally starting
|
starts simulation at current hour.
|
||||||
optimization depending on the selected mode or configuration.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
start_datetime (DateTime, optional): The starting timestamp
|
start_hour (int, optional): Hour to take as start time for the energy management. Defaults
|
||||||
of the energy management run. Defaults to the current datetime
|
to now.
|
||||||
if not provided.
|
force_enable (bool, optional): If True, forces to update even if disabled. This
|
||||||
mode (EnergyManagementMode, optional): The management mode to use. Must be one of:
|
is mostly relevant to prediction providers.
|
||||||
- "OPTIMIZATION": Runs the optimization process.
|
force_update (bool, optional): If True, forces to update the data even if still cached.
|
||||||
- "PREDICTION": Updates the forecast without optimization.
|
|
||||||
|
|
||||||
Defaults to the mode defined in the current configuration.
|
|
||||||
genetic_parameters (GeneticOptimizationParameters, optional): The
|
|
||||||
parameter set for the genetic algorithm. If not provided, it will
|
|
||||||
be constructed based on the current configuration and predictions.
|
|
||||||
genetic_individuals (int, optional): The number of individuals for the
|
|
||||||
genetic algorithm. Defaults to the algorithm's internal default (400)
|
|
||||||
if not specified.
|
|
||||||
genetic_seed (int, optional): The seed for the genetic algorithm. Defaults
|
|
||||||
to the algorithm's internal random seed if not specified.
|
|
||||||
force_enable (bool, optional): If True, bypasses any disabled state
|
|
||||||
to force the update process. This is mostly applicable to
|
|
||||||
prediction providers.
|
|
||||||
force_update (bool, optional): If True, forces data to be refreshed
|
|
||||||
even if a cached version is still valid.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
None
|
|
||||||
"""
|
"""
|
||||||
async with self._run_lock:
|
self.set_start_hour(start_hour=start_hour)
|
||||||
loop = get_running_loop()
|
self.config.update()
|
||||||
# Create a partial function with parameters "baked in"
|
|
||||||
func = partial(
|
|
||||||
EnergyManagement._run,
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
mode=mode,
|
|
||||||
genetic_parameters=genetic_parameters,
|
|
||||||
genetic_individuals=genetic_individuals,
|
|
||||||
genetic_seed=genetic_seed,
|
|
||||||
force_enable=force_enable,
|
|
||||||
force_update=force_update,
|
|
||||||
)
|
|
||||||
# Run optimization in background thread to avoid blocking event loop
|
|
||||||
await loop.run_in_executor(executor, func)
|
|
||||||
|
|
||||||
async def manage_energy(self) -> None:
|
# Check for run definitions
|
||||||
"""Repeating task for managing energy.
|
if self.start_datetime is None:
|
||||||
|
error_msg = "Start datetime unknown."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
if self.config.prediction_hours is None:
|
||||||
|
error_msg = "Prediction hours unknown."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
if self.config.optimisation_hours is None:
|
||||||
|
error_msg = "Optimisation hours unknown."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
This task should be executed by the server regularly (e.g., every 10 seconds)
|
self.prediction.update_data(force_enable=force_enable, force_update=force_update)
|
||||||
to ensure proper energy management. Configuration changes to the energy management interval
|
# TODO: Create optimisation problem that calls into devices.update_data() for simulations.
|
||||||
will only take effect if this task is executed.
|
|
||||||
|
|
||||||
- Initializes and runs the energy management for the first time if it has never been run
|
def set_start_hour(self, start_hour: Optional[int] = None) -> None:
|
||||||
before.
|
"""Sets start datetime to given hour.
|
||||||
- If the energy management interval is not configured or invalid (NaN), the task will not
|
|
||||||
trigger any repeated energy management runs.
|
|
||||||
- Compares the current time with the last run time and runs the energy management if the
|
|
||||||
interval has elapsed.
|
|
||||||
- Logs any exceptions that occur during the initialization or execution of the energy
|
|
||||||
management.
|
|
||||||
|
|
||||||
Note: The task maintains the interval even if some intervals are missed.
|
Args:
|
||||||
|
start_hour (int, optional): Hour to take as start time for the energy management. Defaults
|
||||||
|
to now.
|
||||||
"""
|
"""
|
||||||
current_datetime = to_datetime()
|
if start_hour is None:
|
||||||
interval = self.config.ems.interval # interval maybe changed in between
|
self.set_start_datetime()
|
||||||
|
else:
|
||||||
|
start_datetime = to_datetime().set(hour=start_hour, minute=0, second=0, microsecond=0)
|
||||||
|
self.set_start_datetime(start_datetime)
|
||||||
|
|
||||||
if EnergyManagement._last_run_datetime is None:
|
def simulate_start_now(self) -> dict[str, Any]:
|
||||||
# Never run before
|
start_hour = to_datetime().now().hour
|
||||||
try:
|
return self.simulate(start_hour)
|
||||||
# Remember energy run datetime.
|
|
||||||
EnergyManagement._last_run_datetime = current_datetime
|
|
||||||
# Try to run a first energy management. May fail due to config incomplete.
|
|
||||||
await self.run()
|
|
||||||
except Exception as e:
|
|
||||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
|
||||||
message = f"EOS init: {e}\n{trace}"
|
|
||||||
logger.error(message)
|
|
||||||
return
|
|
||||||
|
|
||||||
if interval is None or interval == float("nan"):
|
def simulate(self, start_hour: int) -> dict[str, Any]:
|
||||||
# No Repetition
|
"""Simulate energy usage and costs for the given start hour.
|
||||||
return
|
|
||||||
|
|
||||||
if (
|
akku_soc_pro_stunde begin of the hour, initial hour state!
|
||||||
compare_datetimes(current_datetime, EnergyManagement._last_run_datetime).time_diff
|
last_wh_pro_stunde integral of last hour (end state)
|
||||||
< interval
|
"""
|
||||||
):
|
# Check for simulation integrity
|
||||||
# Wait for next run
|
required_attrs = [
|
||||||
return
|
"load_energy_array",
|
||||||
|
"pv_prediction_wh",
|
||||||
|
"elect_price_hourly",
|
||||||
|
"ev_charge_hours",
|
||||||
|
"ac_charge_hours",
|
||||||
|
"dc_charge_hours",
|
||||||
|
"elect_revenue_per_hour_arr",
|
||||||
|
]
|
||||||
|
missing_data = [
|
||||||
|
attr.replace("_", " ").title() for attr in required_attrs if getattr(self, attr) is None
|
||||||
|
]
|
||||||
|
|
||||||
try:
|
if missing_data:
|
||||||
await self.run()
|
logger.error("Mandatory data missing - %s", ", ".join(missing_data))
|
||||||
except Exception as e:
|
raise ValueError(f"Mandatory data missing: {', '.join(missing_data)}")
|
||||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
|
||||||
message = f"EOS run: {e}\n{trace}"
|
|
||||||
logger.error(message)
|
|
||||||
|
|
||||||
# Remember the energy management run - keep on interval even if we missed some intervals
|
# Pre-fetch data
|
||||||
while (
|
load_energy_array = np.array(self.load_energy_array)
|
||||||
compare_datetimes(current_datetime, EnergyManagement._last_run_datetime).time_diff
|
pv_prediction_wh = np.array(self.pv_prediction_wh)
|
||||||
>= interval
|
elect_price_hourly = np.array(self.elect_price_hourly)
|
||||||
):
|
ev_charge_hours = np.array(self.ev_charge_hours)
|
||||||
EnergyManagement._last_run_datetime = EnergyManagement._last_run_datetime.add(
|
ac_charge_hours = np.array(self.ac_charge_hours)
|
||||||
seconds=interval
|
dc_charge_hours = np.array(self.dc_charge_hours)
|
||||||
|
elect_revenue_per_hour_arr = np.array(self.elect_revenue_per_hour_arr)
|
||||||
|
|
||||||
|
# Fetch objects
|
||||||
|
battery = self.battery
|
||||||
|
assert battery # to please mypy
|
||||||
|
ev = self.ev
|
||||||
|
home_appliance = self.home_appliance
|
||||||
|
inverter = self.inverter
|
||||||
|
|
||||||
|
if not (len(load_energy_array) == len(pv_prediction_wh) == len(elect_price_hourly)):
|
||||||
|
error_msg = f"Array sizes do not match: Load Curve = {len(load_energy_array)}, PV Forecast = {len(pv_prediction_wh)}, Electricity Price = {len(elect_price_hourly)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
end_hour = len(load_energy_array)
|
||||||
|
total_hours = end_hour - start_hour
|
||||||
|
|
||||||
|
# Pre-allocate arrays for the results, optimized for speed
|
||||||
|
loads_energy_per_hour = np.full((total_hours), np.nan)
|
||||||
|
feedin_energy_per_hour = np.full((total_hours), np.nan)
|
||||||
|
consumption_energy_per_hour = np.full((total_hours), np.nan)
|
||||||
|
costs_per_hour = np.full((total_hours), np.nan)
|
||||||
|
revenue_per_hour = np.full((total_hours), np.nan)
|
||||||
|
soc_per_hour = np.full((total_hours), np.nan)
|
||||||
|
soc_ev_per_hour = np.full((total_hours), np.nan)
|
||||||
|
losses_wh_per_hour = np.full((total_hours), np.nan)
|
||||||
|
home_appliance_wh_per_hour = np.full((total_hours), np.nan)
|
||||||
|
electricity_price_per_hour = np.full((total_hours), np.nan)
|
||||||
|
|
||||||
|
# Set initial state
|
||||||
|
soc_per_hour[0] = battery.current_soc_percentage()
|
||||||
|
if ev:
|
||||||
|
soc_ev_per_hour[0] = ev.current_soc_percentage()
|
||||||
|
|
||||||
|
for hour in range(start_hour, end_hour):
|
||||||
|
hour_idx = hour - start_hour
|
||||||
|
|
||||||
|
# save begin states
|
||||||
|
soc_per_hour[hour_idx] = battery.current_soc_percentage()
|
||||||
|
|
||||||
|
if ev:
|
||||||
|
soc_ev_per_hour[hour_idx] = ev.current_soc_percentage()
|
||||||
|
|
||||||
|
# Accumulate loads and PV generation
|
||||||
|
consumption = load_energy_array[hour]
|
||||||
|
losses_wh_per_hour[hour_idx] = 0.0
|
||||||
|
|
||||||
|
# Home appliances
|
||||||
|
if home_appliance:
|
||||||
|
ha_load = home_appliance.get_load_for_hour(hour)
|
||||||
|
consumption += ha_load
|
||||||
|
home_appliance_wh_per_hour[hour_idx] = ha_load
|
||||||
|
|
||||||
|
# E-Auto handling
|
||||||
|
if ev and ev_charge_hours[hour] > 0:
|
||||||
|
loaded_energy_ev, verluste_eauto = ev.charge_energy(
|
||||||
|
None, hour, relative_power=ev_charge_hours[hour]
|
||||||
|
)
|
||||||
|
consumption += loaded_energy_ev
|
||||||
|
losses_wh_per_hour[hour_idx] += verluste_eauto
|
||||||
|
|
||||||
|
# Process inverter logic
|
||||||
|
energy_feedin_grid_actual = energy_consumption_grid_actual = losses = eigenverbrauch = (
|
||||||
|
0.0
|
||||||
)
|
)
|
||||||
|
|
||||||
|
hour_ac_charge = ac_charge_hours[hour]
|
||||||
|
hour_dc_charge = dc_charge_hours[hour]
|
||||||
|
hourly_electricity_price = elect_price_hourly[hour]
|
||||||
|
hourly_energy_revenue = elect_revenue_per_hour_arr[hour]
|
||||||
|
|
||||||
|
battery.set_charge_allowed_for_hour(hour_dc_charge, hour)
|
||||||
|
|
||||||
|
if inverter:
|
||||||
|
energy_produced = pv_prediction_wh[hour]
|
||||||
|
(
|
||||||
|
energy_feedin_grid_actual,
|
||||||
|
energy_consumption_grid_actual,
|
||||||
|
losses,
|
||||||
|
eigenverbrauch,
|
||||||
|
) = inverter.process_energy(energy_produced, consumption, hour)
|
||||||
|
|
||||||
|
# AC PV Battery Charge
|
||||||
|
if hour_ac_charge > 0.0:
|
||||||
|
battery.set_charge_allowed_for_hour(1, hour)
|
||||||
|
battery_charged_energy_actual, battery_losses_actual = battery.charge_energy(
|
||||||
|
None, hour, relative_power=hour_ac_charge
|
||||||
|
)
|
||||||
|
|
||||||
|
total_battery_energy = battery_charged_energy_actual + battery_losses_actual
|
||||||
|
consumption += total_battery_energy
|
||||||
|
energy_consumption_grid_actual += total_battery_energy
|
||||||
|
losses_wh_per_hour[hour_idx] += battery_losses_actual
|
||||||
|
|
||||||
|
# Update hourly arrays
|
||||||
|
feedin_energy_per_hour[hour_idx] = energy_feedin_grid_actual
|
||||||
|
consumption_energy_per_hour[hour_idx] = energy_consumption_grid_actual
|
||||||
|
losses_wh_per_hour[hour_idx] += losses
|
||||||
|
loads_energy_per_hour[hour_idx] = consumption
|
||||||
|
electricity_price_per_hour[hour_idx] = hourly_electricity_price
|
||||||
|
|
||||||
|
# Financial calculations
|
||||||
|
costs_per_hour[hour_idx] = energy_consumption_grid_actual * hourly_electricity_price
|
||||||
|
revenue_per_hour[hour_idx] = energy_feedin_grid_actual * hourly_energy_revenue
|
||||||
|
|
||||||
|
total_cost = np.nansum(costs_per_hour)
|
||||||
|
total_losses = np.nansum(losses_wh_per_hour)
|
||||||
|
total_revenue = np.nansum(revenue_per_hour)
|
||||||
|
|
||||||
|
# Prepare output dictionary
|
||||||
|
return {
|
||||||
|
"Last_Wh_pro_Stunde": loads_energy_per_hour,
|
||||||
|
"Netzeinspeisung_Wh_pro_Stunde": feedin_energy_per_hour,
|
||||||
|
"Netzbezug_Wh_pro_Stunde": consumption_energy_per_hour,
|
||||||
|
"Kosten_Euro_pro_Stunde": costs_per_hour,
|
||||||
|
"akku_soc_pro_stunde": soc_per_hour,
|
||||||
|
"Einnahmen_Euro_pro_Stunde": revenue_per_hour,
|
||||||
|
"Gesamtbilanz_Euro": total_cost - total_revenue,
|
||||||
|
"EAuto_SoC_pro_Stunde": soc_ev_per_hour,
|
||||||
|
"Gesamteinnahmen_Euro": total_revenue,
|
||||||
|
"Gesamtkosten_Euro": total_cost,
|
||||||
|
"Verluste_Pro_Stunde": losses_wh_per_hour,
|
||||||
|
"Gesamt_Verluste": total_losses,
|
||||||
|
"Home_appliance_wh_per_hour": home_appliance_wh_per_hour,
|
||||||
|
"Electricity_price": electricity_price_per_hour,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Initialize the Energy Management System, it is a singleton.
|
# Initialize the Energy Management System, it is a singleton.
|
||||||
ems = EnergyManagement()
|
ems = EnergieManagementSystem()
|
||||||
|
|
||||||
|
|
||||||
def get_ems() -> EnergyManagement:
|
def get_ems() -> EnergieManagementSystem:
|
||||||
"""Gets the EOS Energy Management System."""
|
"""Gets the EOS Energy Management System."""
|
||||||
return ems
|
return ems
|
||||||
|
|||||||
@@ -1,46 +0,0 @@
|
|||||||
"""Settings for energy management.
|
|
||||||
|
|
||||||
Kept in an extra module to avoid cyclic dependencies on package import.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import Field
|
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class EnergyManagementMode(str, Enum):
|
|
||||||
"""Energy management mode."""
|
|
||||||
|
|
||||||
PREDICTION = "PREDICTION"
|
|
||||||
OPTIMIZATION = "OPTIMIZATION"
|
|
||||||
|
|
||||||
|
|
||||||
class EnergyManagementCommonSettings(SettingsBaseModel):
|
|
||||||
"""Energy Management Configuration."""
|
|
||||||
|
|
||||||
startup_delay: float = Field(
|
|
||||||
default=5,
|
|
||||||
ge=1,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Startup delay in seconds for EOS energy management runs."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
interval: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Intervall in seconds between EOS energy management runs.",
|
|
||||||
"examples": ["300"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
mode: Optional[EnergyManagementMode] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Energy management mode [OPTIMIZATION | PREDICTION].",
|
|
||||||
"examples": ["OPTIMIZATION", "PREDICTION"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
@@ -1,3 +1,20 @@
|
|||||||
"""Abstract and base classes for logging."""
|
"""Abstract and base classes for logging."""
|
||||||
|
|
||||||
LOGGING_LEVELS: list[str] = ["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
def logging_str_to_level(level_str: str) -> int:
|
||||||
|
"""Convert log level string to logging level."""
|
||||||
|
if level_str == "DEBUG":
|
||||||
|
level = logging.DEBUG
|
||||||
|
elif level_str == "INFO":
|
||||||
|
level = logging.INFO
|
||||||
|
elif level_str == "WARNING":
|
||||||
|
level = logging.WARNING
|
||||||
|
elif level_str == "CRITICAL":
|
||||||
|
level = logging.CRITICAL
|
||||||
|
elif level_str == "ERROR":
|
||||||
|
level = logging.ERROR
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown loggin level: {level_str}")
|
||||||
|
return level
|
||||||
|
|||||||
@@ -1,245 +1,91 @@
|
|||||||
"""Utility for configuring Loguru loggers."""
|
"""Utility functions for handling logging tasks.
|
||||||
|
|
||||||
|
Functions:
|
||||||
|
----------
|
||||||
|
- get_logger: Creates and configures a logger with console and optional rotating file logging.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
--------------
|
||||||
|
# Logger setup
|
||||||
|
>>> logger = get_logger(__name__, log_file="app.log", logging_level="DEBUG")
|
||||||
|
>>> logger.info("Logging initialized.")
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
------
|
||||||
|
- The logger supports rotating log files to prevent excessive log file size.
|
||||||
|
"""
|
||||||
|
|
||||||
import json
|
|
||||||
import logging as pylogging
|
import logging as pylogging
|
||||||
import os
|
import os
|
||||||
import re
|
from logging.handlers import RotatingFileHandler
|
||||||
import sys
|
from typing import Optional
|
||||||
from pathlib import Path
|
|
||||||
from types import FrameType
|
|
||||||
from typing import Any, List, Optional
|
|
||||||
|
|
||||||
import pendulum
|
from akkudoktoreos.core.logabc import logging_str_to_level
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
|
||||||
|
|
||||||
|
|
||||||
class InterceptHandler(pylogging.Handler):
|
def get_logger(
|
||||||
"""A logging handler that redirects standard Python logging messages to Loguru.
|
name: str,
|
||||||
|
log_file: Optional[str] = None,
|
||||||
|
logging_level: Optional[str] = None,
|
||||||
|
max_bytes: int = 5000000,
|
||||||
|
backup_count: int = 5,
|
||||||
|
) -> pylogging.Logger:
|
||||||
|
"""Creates and configures a logger with a given name.
|
||||||
|
|
||||||
This handler ensures consistency between the `logging` module and Loguru by intercepting
|
The logger supports logging to both the console and an optional log file. File logging is
|
||||||
logs sent to the standard logging system and re-emitting them through Loguru with proper
|
handled by a rotating file handler to prevent excessive log file size.
|
||||||
formatting and context (including exception info and call depth).
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
loglevel_mapping (dict): Mapping from standard logging levels to Loguru level names.
|
|
||||||
"""
|
|
||||||
|
|
||||||
loglevel_mapping: dict[int, str] = {
|
|
||||||
50: "CRITICAL",
|
|
||||||
40: "ERROR",
|
|
||||||
30: "WARNING",
|
|
||||||
20: "INFO",
|
|
||||||
10: "DEBUG",
|
|
||||||
5: "TRACE",
|
|
||||||
0: "NOTSET",
|
|
||||||
}
|
|
||||||
|
|
||||||
def emit(self, record: pylogging.LogRecord) -> None:
|
|
||||||
"""Emits a logging record by forwarding it to Loguru with preserved metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
record (logging.LogRecord): A record object containing log message and metadata.
|
|
||||||
"""
|
|
||||||
# Skip DEBUG logs from matplotlib - very noisy
|
|
||||||
if record.name.startswith("matplotlib") and record.levelno <= pylogging.DEBUG:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
level = logger.level(record.levelname).name
|
|
||||||
except AttributeError:
|
|
||||||
level = self.loglevel_mapping.get(record.levelno, "INFO")
|
|
||||||
|
|
||||||
frame: Optional[FrameType] = pylogging.currentframe()
|
|
||||||
depth: int = 2
|
|
||||||
while frame and frame.f_code.co_filename == pylogging.__file__:
|
|
||||||
frame = frame.f_back
|
|
||||||
depth += 1
|
|
||||||
|
|
||||||
log = logger.bind(request_id="app")
|
|
||||||
log.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
|
|
||||||
|
|
||||||
|
|
||||||
console_handler_id = None
|
|
||||||
file_handler_id = None
|
|
||||||
|
|
||||||
|
|
||||||
def track_logging_config(config_eos: Any, path: str, old_value: Any, value: Any) -> None:
|
|
||||||
"""Track logging config changes."""
|
|
||||||
global console_handler_id, file_handler_id
|
|
||||||
|
|
||||||
if not path.startswith("logging"):
|
|
||||||
raise ValueError(f"Logging shall not track '{path}'")
|
|
||||||
|
|
||||||
if not config_eos.logging.console_level:
|
|
||||||
# No value given - check environment value - may also be None
|
|
||||||
config_eos.logging.console_level = os.getenv("EOS_LOGGING__LEVEL")
|
|
||||||
if not config_eos.logging.file_level:
|
|
||||||
# No value given - check environment value - may also be None
|
|
||||||
config_eos.logging.file_level = os.getenv("EOS_LOGGING__LEVEL")
|
|
||||||
|
|
||||||
# Remove handlers
|
|
||||||
if console_handler_id:
|
|
||||||
try:
|
|
||||||
logger.remove(console_handler_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug("Exception on logger.remove: {}", e, exc_info=True)
|
|
||||||
console_handler_id = None
|
|
||||||
if file_handler_id:
|
|
||||||
try:
|
|
||||||
logger.remove(file_handler_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug("Exception on logger.remove: {}", e, exc_info=True)
|
|
||||||
file_handler_id = None
|
|
||||||
|
|
||||||
# Create handlers with new configuration
|
|
||||||
# Always add console handler
|
|
||||||
if config_eos.logging.console_level not in LOGGING_LEVELS:
|
|
||||||
logger.error(
|
|
||||||
f"Invalid console log level '{config_eos.logging.console_level} - forced to INFO'."
|
|
||||||
)
|
|
||||||
config_eos.logging.console_level = "INFO"
|
|
||||||
|
|
||||||
console_handler_id = logger.add(
|
|
||||||
sys.stderr,
|
|
||||||
enqueue=True,
|
|
||||||
backtrace=True,
|
|
||||||
level=config_eos.logging.console_level,
|
|
||||||
# format=_console_format
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add file handler
|
|
||||||
if config_eos.logging.file_level and config_eos.logging.file_path:
|
|
||||||
if config_eos.logging.file_level not in LOGGING_LEVELS:
|
|
||||||
logger.error(
|
|
||||||
f"Invalid file log level '{config_eos.logging.console_level}' - forced to INFO."
|
|
||||||
)
|
|
||||||
config_eos.logging.file_level = "INFO"
|
|
||||||
|
|
||||||
file_handler_id = logger.add(
|
|
||||||
sink=config_eos.logging.file_path,
|
|
||||||
rotation="100 MB",
|
|
||||||
retention="3 days",
|
|
||||||
enqueue=True,
|
|
||||||
backtrace=True,
|
|
||||||
level=config_eos.logging.file_level,
|
|
||||||
serialize=True, # JSON dict formatting
|
|
||||||
# format=_file_format
|
|
||||||
)
|
|
||||||
|
|
||||||
# Redirect standard logging to Loguru
|
|
||||||
pylogging.basicConfig(handlers=[InterceptHandler()], level=0)
|
|
||||||
# Redirect uvicorn and fastapi logging to Loguru
|
|
||||||
pylogging.getLogger("uvicorn.access").handlers = [InterceptHandler()]
|
|
||||||
for pylogger_name in ["uvicorn", "uvicorn.error", "fastapi"]:
|
|
||||||
pylogger = pylogging.getLogger(pylogger_name)
|
|
||||||
pylogger.handlers = [InterceptHandler()]
|
|
||||||
pylogger.propagate = False
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Logger reconfigured - console: {config_eos.logging.console_level}, file: {config_eos.logging.file_level}."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def read_file_log(
|
|
||||||
log_path: Path,
|
|
||||||
limit: int = 100,
|
|
||||||
level: Optional[str] = None,
|
|
||||||
contains: Optional[str] = None,
|
|
||||||
regex: Optional[str] = None,
|
|
||||||
from_time: Optional[str] = None,
|
|
||||||
to_time: Optional[str] = None,
|
|
||||||
tail: bool = False,
|
|
||||||
) -> List[dict]:
|
|
||||||
"""Read and filter structured log entries from a JSON-formatted log file.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
log_path (Path): Path to the JSON-formatted log file.
|
name (str): The name of the logger, typically `__name__` from the calling module.
|
||||||
limit (int, optional): Maximum number of log entries to return. Defaults to 100.
|
log_file (Optional[str]): Path to the log file for file logging. If None, no file logging is done.
|
||||||
level (Optional[str], optional): Filter logs by log level (e.g., "INFO", "ERROR"). Defaults to None.
|
logging_level (Optional[str]): Logging level (e.g., "INFO", "DEBUG"). Defaults to "INFO".
|
||||||
contains (Optional[str], optional): Filter logs that contain this substring in their message. Case-insensitive. Defaults to None.
|
max_bytes (int): Maximum size in bytes for log file before rotation. Defaults to 5 MB.
|
||||||
regex (Optional[str], optional): Filter logs whose message matches this regular expression. Defaults to None.
|
backup_count (int): Number of backup log files to keep. Defaults to 5.
|
||||||
from_time (Optional[str], optional): ISO 8601 datetime string to filter logs not earlier than this time. Defaults to None.
|
|
||||||
to_time (Optional[str], optional): ISO 8601 datetime string to filter logs not later than this time. Defaults to None.
|
|
||||||
tail (bool, optional): If True, read the last lines of the file (like `tail -n`). Defaults to False.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List[dict]: A list of filtered log entries as dictionaries.
|
logging.Logger: Configured logger instance.
|
||||||
|
|
||||||
Raises:
|
Example:
|
||||||
FileNotFoundError: If the log file does not exist.
|
logger = get_logger(__name__, log_file="app.log", logging_level="DEBUG")
|
||||||
ValueError: If the datetime strings are invalid or improperly formatted.
|
logger.info("Application started")
|
||||||
Exception: For other unforeseen I/O or parsing errors.
|
|
||||||
"""
|
"""
|
||||||
if not log_path.exists():
|
# Create a logger with the specified name
|
||||||
raise FileNotFoundError("Log file not found")
|
logger = pylogging.getLogger(name)
|
||||||
|
logger.propagate = True
|
||||||
|
if logging_level is not None:
|
||||||
|
level = logging_str_to_level(logging_level)
|
||||||
|
logger.setLevel(level)
|
||||||
|
|
||||||
try:
|
# The log message format
|
||||||
from_dt = pendulum.parse(from_time) if from_time else None
|
formatter = pylogging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||||
to_dt = pendulum.parse(to_time) if to_time else None
|
|
||||||
except Exception as e:
|
|
||||||
raise ValueError(f"Invalid date/time format: {e}")
|
|
||||||
|
|
||||||
regex_pattern = re.compile(regex) if regex else None
|
# Prevent loggers from being added multiple times
|
||||||
|
# There may already be a logger from pytest
|
||||||
|
if not logger.handlers:
|
||||||
|
# Create a console handler with a standard output stream
|
||||||
|
console_handler = pylogging.StreamHandler()
|
||||||
|
if logging_level is not None:
|
||||||
|
console_handler.setLevel(level)
|
||||||
|
console_handler.setFormatter(formatter)
|
||||||
|
|
||||||
def matches_filters(log: dict) -> bool:
|
# Add the console handler to the logger
|
||||||
if level and log.get("level", {}).get("name") != level.upper():
|
logger.addHandler(console_handler)
|
||||||
return False
|
|
||||||
if contains and contains.lower() not in log.get("message", "").lower():
|
|
||||||
return False
|
|
||||||
if regex_pattern and not regex_pattern.search(log.get("message", "")):
|
|
||||||
return False
|
|
||||||
if from_dt or to_dt:
|
|
||||||
try:
|
|
||||||
log_time = pendulum.parse(log["time"])
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
if from_dt and log_time < from_dt:
|
|
||||||
return False
|
|
||||||
if to_dt and log_time > to_dt:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
matched_logs = []
|
if log_file and len(logger.handlers) < 2: # We assume a console logger to be the first logger
|
||||||
lines: list[str] = []
|
# If a log file path is specified, create a rotating file handler
|
||||||
|
|
||||||
if tail:
|
# Ensure the log directory exists
|
||||||
with log_path.open("rb") as f:
|
log_dir = os.path.dirname(log_file)
|
||||||
f.seek(0, 2)
|
if log_dir and not os.path.exists(log_dir):
|
||||||
end = f.tell()
|
os.makedirs(log_dir)
|
||||||
buffer = bytearray()
|
|
||||||
pointer = end
|
|
||||||
|
|
||||||
while pointer > 0 and len(lines) < limit * 5:
|
# Create a rotating file handler
|
||||||
pointer -= 1
|
file_handler = RotatingFileHandler(log_file, maxBytes=max_bytes, backupCount=backup_count)
|
||||||
f.seek(pointer)
|
if logging_level is not None:
|
||||||
byte = f.read(1)
|
file_handler.setLevel(level)
|
||||||
if byte == b"\n":
|
file_handler.setFormatter(formatter)
|
||||||
if buffer:
|
|
||||||
line = buffer[::-1].decode("utf-8", errors="ignore")
|
|
||||||
lines.append(line)
|
|
||||||
buffer.clear()
|
|
||||||
else:
|
|
||||||
buffer.append(byte[0])
|
|
||||||
if buffer:
|
|
||||||
line = buffer[::-1].decode("utf-8", errors="ignore")
|
|
||||||
lines.append(line)
|
|
||||||
lines = lines[::-1]
|
|
||||||
else:
|
|
||||||
with log_path.open("r", encoding="utf-8", newline=None) as f_txt:
|
|
||||||
lines = f_txt.readlines()
|
|
||||||
|
|
||||||
for line in lines:
|
# Add the file handler to the logger
|
||||||
if not line.strip():
|
logger.addHandler(file_handler)
|
||||||
continue
|
|
||||||
try:
|
|
||||||
log = json.loads(line)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
continue
|
|
||||||
if matches_filters(log):
|
|
||||||
matched_logs.append(log)
|
|
||||||
if len(matched_logs) >= limit:
|
|
||||||
break
|
|
||||||
|
|
||||||
return matched_logs
|
return logger
|
||||||
|
|||||||
@@ -3,60 +3,43 @@
|
|||||||
Kept in an extra module to avoid cyclic dependencies on package import.
|
Kept in an extra module to avoid cyclic dependencies on package import.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pathlib import Path
|
import logging
|
||||||
|
import os
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, computed_field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
from akkudoktoreos.core.logabc import logging_str_to_level
|
||||||
|
|
||||||
|
|
||||||
class LoggingCommonSettings(SettingsBaseModel):
|
class LoggingCommonSettings(SettingsBaseModel):
|
||||||
"""Logging Configuration."""
|
"""Common settings for logging."""
|
||||||
|
|
||||||
console_level: Optional[str] = Field(
|
logging_level_default: Optional[str] = Field(
|
||||||
default=None,
|
default=None, description="EOS default logging level."
|
||||||
json_schema_extra={
|
|
||||||
"description": "Logging level when logging to console.",
|
|
||||||
"examples": LOGGING_LEVELS,
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
file_level: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Logging level when logging to file.",
|
|
||||||
"examples": LOGGING_LEVELS,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def file_path(self) -> Optional[Path]:
|
|
||||||
"""Computed log file path based on data output path."""
|
|
||||||
try:
|
|
||||||
path = SettingsBaseModel.config.general.data_output_path / "eos.log"
|
|
||||||
except:
|
|
||||||
# Config may not be fully set up
|
|
||||||
path = None
|
|
||||||
return path
|
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("console_level", "file_level", mode="after")
|
@field_validator("logging_level_default", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_level(cls, value: Optional[str]) -> Optional[str]:
|
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
|
||||||
"""Validate logging level string."""
|
if isinstance(value, str) and value.upper() == "NONE":
|
||||||
|
value = None
|
||||||
|
if value is None and (env_level := os.getenv("EOS_LOGGING_LEVEL")) is not None:
|
||||||
|
# Take default logging level from special environment variable
|
||||||
|
value = env_level
|
||||||
if value is None:
|
if value is None:
|
||||||
# Nothing to set
|
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
level = logging_str_to_level(value)
|
||||||
level = value.upper()
|
logging.getLogger().setLevel(level)
|
||||||
if level == "NONE":
|
|
||||||
return None
|
|
||||||
if level not in LOGGING_LEVELS:
|
|
||||||
raise ValueError(f"Logging level {value} not supported")
|
|
||||||
value = level
|
|
||||||
else:
|
|
||||||
raise TypeError(f"Invalid {type(value)} of logging level {value}")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def logging_level_root(self) -> str:
|
||||||
|
"""Root logger logging level."""
|
||||||
|
level = logging.getLogger().getEffectiveLevel()
|
||||||
|
level_name = logging.getLevelName(level)
|
||||||
|
return level_name
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
|||||||
"""Version information for akkudoktoreos."""
|
|
||||||
|
|
||||||
# For development add `+dev` to previous release
|
|
||||||
# For release omit `+dev`.
|
|
||||||
__version__ = "0.2.0+dev"
|
|
||||||
@@ -1,5 +1,113 @@
|
|||||||
{
|
{
|
||||||
"general": {
|
"config_file_path": null,
|
||||||
"version": "0.2.0+dev"
|
"config_folder_path": null,
|
||||||
}
|
"data_cache_path": null,
|
||||||
|
"data_cache_subpath": null,
|
||||||
|
"data_folder_path": null,
|
||||||
|
"data_output_path": null,
|
||||||
|
"data_output_subpath": null,
|
||||||
|
"elecprice_charges_kwh": 0.21,
|
||||||
|
"elecprice_provider": null,
|
||||||
|
"elecpriceimport_file_path": null,
|
||||||
|
"latitude": 52.5,
|
||||||
|
"load_import_file_path": null,
|
||||||
|
"load_name": null,
|
||||||
|
"load_provider": null,
|
||||||
|
"loadakkudoktor_year_energy": null,
|
||||||
|
"logging_level": "INFO",
|
||||||
|
"longitude": 13.4,
|
||||||
|
"optimization_ev_available_charge_rates_percent": null,
|
||||||
|
"optimization_hours": 48,
|
||||||
|
"optimization_penalty": null,
|
||||||
|
"prediction_historic_hours": 48,
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"pvforecast0_albedo": null,
|
||||||
|
"pvforecast0_inverter_model": null,
|
||||||
|
"pvforecast0_inverter_paco": null,
|
||||||
|
"pvforecast0_loss": null,
|
||||||
|
"pvforecast0_module_model": null,
|
||||||
|
"pvforecast0_modules_per_string": null,
|
||||||
|
"pvforecast0_mountingplace": "free",
|
||||||
|
"pvforecast0_optimal_surface_tilt": false,
|
||||||
|
"pvforecast0_optimalangles": false,
|
||||||
|
"pvforecast0_peakpower": null,
|
||||||
|
"pvforecast0_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast0_strings_per_inverter": null,
|
||||||
|
"pvforecast0_surface_azimuth": 180,
|
||||||
|
"pvforecast0_surface_tilt": 0,
|
||||||
|
"pvforecast0_trackingtype": 0,
|
||||||
|
"pvforecast0_userhorizon": null,
|
||||||
|
"pvforecast1_albedo": null,
|
||||||
|
"pvforecast1_inverter_model": null,
|
||||||
|
"pvforecast1_inverter_paco": null,
|
||||||
|
"pvforecast1_loss": 0,
|
||||||
|
"pvforecast1_module_model": null,
|
||||||
|
"pvforecast1_modules_per_string": null,
|
||||||
|
"pvforecast1_mountingplace": "free",
|
||||||
|
"pvforecast1_optimal_surface_tilt": false,
|
||||||
|
"pvforecast1_optimalangles": false,
|
||||||
|
"pvforecast1_peakpower": null,
|
||||||
|
"pvforecast1_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast1_strings_per_inverter": null,
|
||||||
|
"pvforecast1_surface_azimuth": 180,
|
||||||
|
"pvforecast1_surface_tilt": 0,
|
||||||
|
"pvforecast1_trackingtype": 0,
|
||||||
|
"pvforecast1_userhorizon": null,
|
||||||
|
"pvforecast2_albedo": null,
|
||||||
|
"pvforecast2_inverter_model": null,
|
||||||
|
"pvforecast2_inverter_paco": null,
|
||||||
|
"pvforecast2_loss": 0,
|
||||||
|
"pvforecast2_module_model": null,
|
||||||
|
"pvforecast2_modules_per_string": null,
|
||||||
|
"pvforecast2_mountingplace": "free",
|
||||||
|
"pvforecast2_optimal_surface_tilt": false,
|
||||||
|
"pvforecast2_optimalangles": false,
|
||||||
|
"pvforecast2_peakpower": null,
|
||||||
|
"pvforecast2_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast2_strings_per_inverter": null,
|
||||||
|
"pvforecast2_surface_azimuth": 180,
|
||||||
|
"pvforecast2_surface_tilt": 0,
|
||||||
|
"pvforecast2_trackingtype": 0,
|
||||||
|
"pvforecast2_userhorizon": null,
|
||||||
|
"pvforecast3_albedo": null,
|
||||||
|
"pvforecast3_inverter_model": null,
|
||||||
|
"pvforecast3_inverter_paco": null,
|
||||||
|
"pvforecast3_loss": 0,
|
||||||
|
"pvforecast3_module_model": null,
|
||||||
|
"pvforecast3_modules_per_string": null,
|
||||||
|
"pvforecast3_mountingplace": "free",
|
||||||
|
"pvforecast3_optimal_surface_tilt": false,
|
||||||
|
"pvforecast3_optimalangles": false,
|
||||||
|
"pvforecast3_peakpower": null,
|
||||||
|
"pvforecast3_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast3_strings_per_inverter": null,
|
||||||
|
"pvforecast3_surface_azimuth": 180,
|
||||||
|
"pvforecast3_surface_tilt": 0,
|
||||||
|
"pvforecast3_trackingtype": 0,
|
||||||
|
"pvforecast3_userhorizon": null,
|
||||||
|
"pvforecast4_albedo": null,
|
||||||
|
"pvforecast4_inverter_model": null,
|
||||||
|
"pvforecast4_inverter_paco": null,
|
||||||
|
"pvforecast4_loss": 0,
|
||||||
|
"pvforecast4_module_model": null,
|
||||||
|
"pvforecast4_modules_per_string": null,
|
||||||
|
"pvforecast4_mountingplace": "free",
|
||||||
|
"pvforecast4_optimal_surface_tilt": false,
|
||||||
|
"pvforecast4_optimalangles": false,
|
||||||
|
"pvforecast4_peakpower": null,
|
||||||
|
"pvforecast4_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast4_strings_per_inverter": null,
|
||||||
|
"pvforecast4_surface_azimuth": 180,
|
||||||
|
"pvforecast4_surface_tilt": 0,
|
||||||
|
"pvforecast4_trackingtype": 0,
|
||||||
|
"pvforecast4_userhorizon": null,
|
||||||
|
"pvforecast_provider": null,
|
||||||
|
"pvforecastimport_file_path": null,
|
||||||
|
"server_eos_startup_eosdash": true,
|
||||||
|
"server_eos_host": "0.0.0.0",
|
||||||
|
"server_eos_port": 8503,
|
||||||
|
"server_eosdash_host": "0.0.0.0",
|
||||||
|
"server_eosdash_port": 8504,
|
||||||
|
"weather_provider": null,
|
||||||
|
"weatherimport_file_path": null
|
||||||
}
|
}
|
||||||
|
|||||||
289
src/akkudoktoreos/devices/battery.py
Normal file
289
src/akkudoktoreos/devices/battery.py
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
||||||
|
from akkudoktoreos.devices.devicesabc import DeviceBase
|
||||||
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def max_charging_power_field(description: Optional[str] = None) -> float:
|
||||||
|
if description is None:
|
||||||
|
description = "Maximum charging power in watts."
|
||||||
|
return Field(
|
||||||
|
default=5000,
|
||||||
|
gt=0,
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def initial_soc_percentage_field(description: str) -> int:
|
||||||
|
return Field(default=0, ge=0, le=100, description=description)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseBatteryParameters(ParametersBaseModel):
|
||||||
|
"""Base class for battery parameters with fields for capacity, efficiency, and state of charge."""
|
||||||
|
|
||||||
|
capacity_wh: int = Field(
|
||||||
|
gt=0, description="An integer representing the capacity of the battery in watt-hours."
|
||||||
|
)
|
||||||
|
charging_efficiency: float = Field(
|
||||||
|
default=0.88,
|
||||||
|
gt=0,
|
||||||
|
le=1,
|
||||||
|
description="A float representing the charging efficiency of the battery.",
|
||||||
|
)
|
||||||
|
discharging_efficiency: float = Field(
|
||||||
|
default=0.88,
|
||||||
|
gt=0,
|
||||||
|
le=1,
|
||||||
|
description="A float representing the discharge efficiency of the battery.",
|
||||||
|
)
|
||||||
|
max_charge_power_w: Optional[float] = max_charging_power_field()
|
||||||
|
initial_soc_percentage: int = initial_soc_percentage_field(
|
||||||
|
"An integer representing the state of charge of the battery at the **start** of the current hour (not the current state)."
|
||||||
|
)
|
||||||
|
min_soc_percentage: int = Field(
|
||||||
|
default=0,
|
||||||
|
ge=0,
|
||||||
|
le=100,
|
||||||
|
description="An integer representing the minimum state of charge (SOC) of the battery in percentage.",
|
||||||
|
)
|
||||||
|
max_soc_percentage: int = Field(
|
||||||
|
default=100,
|
||||||
|
ge=0,
|
||||||
|
le=100,
|
||||||
|
description="An integer representing the maximum state of charge (SOC) of the battery in percentage.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SolarPanelBatteryParameters(BaseBatteryParameters):
|
||||||
|
max_charge_power_w: Optional[float] = max_charging_power_field()
|
||||||
|
|
||||||
|
|
||||||
|
class ElectricVehicleParameters(BaseBatteryParameters):
|
||||||
|
"""Parameters specific to an electric vehicle (EV)."""
|
||||||
|
|
||||||
|
discharging_efficiency: float = 1.0
|
||||||
|
initial_soc_percentage: int = initial_soc_percentage_field(
|
||||||
|
"An integer representing the current state of charge (SOC) of the battery in percentage."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ElectricVehicleResult(BaseModel):
|
||||||
|
"""Result class containing information related to the electric vehicle's charging and discharging behavior."""
|
||||||
|
|
||||||
|
charge_array: list[float] = Field(
|
||||||
|
description="Hourly charging status (0 for no charging, 1 for charging)."
|
||||||
|
)
|
||||||
|
discharge_array: list[int] = Field(
|
||||||
|
description="Hourly discharging status (0 for no discharging, 1 for discharging)."
|
||||||
|
)
|
||||||
|
discharging_efficiency: float = Field(description="The discharge efficiency as a float..")
|
||||||
|
hours: int = Field(description="Number of hours in the simulation.")
|
||||||
|
capacity_wh: int = Field(description="Capacity of the EV’s battery in watt-hours.")
|
||||||
|
charging_efficiency: float = Field(description="Charging efficiency as a float..")
|
||||||
|
max_charge_power_w: int = Field(description="Maximum charging power in watts.")
|
||||||
|
soc_wh: float = Field(
|
||||||
|
description="State of charge of the battery in watt-hours at the start of the simulation."
|
||||||
|
)
|
||||||
|
initial_soc_percentage: int = Field(
|
||||||
|
description="State of charge at the start of the simulation in percentage."
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator("discharge_array", "charge_array", mode="before")
|
||||||
|
def convert_numpy(cls, field: Any) -> Any:
|
||||||
|
return NumpyEncoder.convert_numpy(field)[0]
|
||||||
|
|
||||||
|
|
||||||
|
class Battery(DeviceBase):
|
||||||
|
"""Represents a battery device with methods to simulate energy charging and discharging."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
parameters: Optional[BaseBatteryParameters] = None,
|
||||||
|
hours: Optional[int] = 24,
|
||||||
|
provider_id: Optional[str] = None,
|
||||||
|
):
|
||||||
|
# Initialize configuration and parameters
|
||||||
|
self.provider_id = provider_id
|
||||||
|
self.prefix = "<invalid>"
|
||||||
|
if self.provider_id == "GenericBattery":
|
||||||
|
self.prefix = "battery"
|
||||||
|
elif self.provider_id == "GenericBEV":
|
||||||
|
self.prefix = "bev"
|
||||||
|
|
||||||
|
self.parameters = parameters
|
||||||
|
if hours is None:
|
||||||
|
self.hours = self.total_hours # TODO where does that come from?
|
||||||
|
else:
|
||||||
|
self.hours = hours
|
||||||
|
|
||||||
|
self.initialised = False
|
||||||
|
|
||||||
|
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
||||||
|
if self.parameters is not None:
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self) -> None:
|
||||||
|
"""Sets up the battery parameters based on configuration or provided parameters."""
|
||||||
|
if self.initialised:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.provider_id:
|
||||||
|
# Setup from configuration
|
||||||
|
self.capacity_wh = getattr(self.config, f"{self.prefix}_capacity")
|
||||||
|
self.initial_soc_percentage = getattr(self.config, f"{self.prefix}_initial_soc")
|
||||||
|
self.hours = self.total_hours # TODO where does that come from?
|
||||||
|
self.charging_efficiency = getattr(self.config, f"{self.prefix}_charging_efficiency")
|
||||||
|
self.discharging_efficiency = getattr(
|
||||||
|
self.config, f"{self.prefix}_discharging_efficiency"
|
||||||
|
)
|
||||||
|
self.max_charge_power_w = getattr(self.config, f"{self.prefix}_max_charging_power")
|
||||||
|
|
||||||
|
if self.provider_id == "GenericBattery":
|
||||||
|
self.min_soc_percentage = getattr(
|
||||||
|
self.config,
|
||||||
|
f"{self.prefix}_soc_min",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.min_soc_percentage = 0
|
||||||
|
|
||||||
|
self.max_soc_percentage = getattr(
|
||||||
|
self.config,
|
||||||
|
f"{self.prefix}_soc_max",
|
||||||
|
)
|
||||||
|
elif self.parameters:
|
||||||
|
# Setup from parameters
|
||||||
|
self.capacity_wh = self.parameters.capacity_wh
|
||||||
|
self.initial_soc_percentage = self.parameters.initial_soc_percentage
|
||||||
|
self.charging_efficiency = self.parameters.charging_efficiency
|
||||||
|
self.discharging_efficiency = self.parameters.discharging_efficiency
|
||||||
|
self.max_charge_power_w = self.parameters.max_charge_power_w
|
||||||
|
# Only assign for storage battery
|
||||||
|
self.min_soc_percentage = (
|
||||||
|
self.parameters.min_soc_percentage
|
||||||
|
if isinstance(self.parameters, SolarPanelBatteryParameters)
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
self.max_soc_percentage = self.parameters.max_soc_percentage
|
||||||
|
else:
|
||||||
|
error_msg = "Parameters and provider ID are missing. Cannot instantiate."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# Initialize state of charge
|
||||||
|
if self.max_charge_power_w is None:
|
||||||
|
self.max_charge_power_w = self.capacity_wh # TODO this should not be equal capacity_wh
|
||||||
|
self.discharge_array = np.full(self.hours, 1)
|
||||||
|
self.charge_array = np.full(self.hours, 1)
|
||||||
|
self.soc_wh = (self.initial_soc_percentage / 100) * self.capacity_wh
|
||||||
|
self.min_soc_wh = (self.min_soc_percentage / 100) * self.capacity_wh
|
||||||
|
self.max_soc_wh = (self.max_soc_percentage / 100) * self.capacity_wh
|
||||||
|
|
||||||
|
self.initialised = True
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, Any]:
|
||||||
|
"""Converts the object to a dictionary representation."""
|
||||||
|
return {
|
||||||
|
"capacity_wh": self.capacity_wh,
|
||||||
|
"initial_soc_percentage": self.initial_soc_percentage,
|
||||||
|
"soc_wh": self.soc_wh,
|
||||||
|
"hours": self.hours,
|
||||||
|
"discharge_array": self.discharge_array,
|
||||||
|
"charge_array": self.charge_array,
|
||||||
|
"charging_efficiency": self.charging_efficiency,
|
||||||
|
"discharging_efficiency": self.discharging_efficiency,
|
||||||
|
"max_charge_power_w": self.max_charge_power_w,
|
||||||
|
}
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
"""Resets the battery state to its initial values."""
|
||||||
|
self.soc_wh = (self.initial_soc_percentage / 100) * self.capacity_wh
|
||||||
|
self.soc_wh = min(max(self.soc_wh, self.min_soc_wh), self.max_soc_wh)
|
||||||
|
self.discharge_array = np.full(self.hours, 1)
|
||||||
|
self.charge_array = np.full(self.hours, 1)
|
||||||
|
|
||||||
|
def set_discharge_per_hour(self, discharge_array: np.ndarray) -> None:
|
||||||
|
"""Sets the discharge values for each hour."""
|
||||||
|
if len(discharge_array) != self.hours:
|
||||||
|
raise ValueError(f"Discharge array must have exactly {self.hours} elements.")
|
||||||
|
self.discharge_array = np.array(discharge_array)
|
||||||
|
|
||||||
|
def set_charge_per_hour(self, charge_array: np.ndarray) -> None:
|
||||||
|
"""Sets the charge values for each hour."""
|
||||||
|
if len(charge_array) != self.hours:
|
||||||
|
raise ValueError(f"Charge array must have exactly {self.hours} elements.")
|
||||||
|
self.charge_array = np.array(charge_array)
|
||||||
|
|
||||||
|
def set_charge_allowed_for_hour(self, charge: float, hour: int) -> None:
|
||||||
|
"""Sets the charge for a specific hour."""
|
||||||
|
if hour >= self.hours:
|
||||||
|
raise ValueError(f"Hour {hour} is out of range. Must be less than {self.hours}.")
|
||||||
|
self.charge_array[hour] = charge
|
||||||
|
|
||||||
|
def current_soc_percentage(self) -> float:
|
||||||
|
"""Calculates the current state of charge in percentage."""
|
||||||
|
return (self.soc_wh / self.capacity_wh) * 100
|
||||||
|
|
||||||
|
def discharge_energy(self, wh: float, hour: int) -> tuple[float, float]:
|
||||||
|
"""Discharges energy from the battery."""
|
||||||
|
if self.discharge_array[hour] == 0:
|
||||||
|
return 0.0, 0.0
|
||||||
|
|
||||||
|
max_possible_discharge_wh = (self.soc_wh - self.min_soc_wh) * self.discharging_efficiency
|
||||||
|
max_possible_discharge_wh = max(max_possible_discharge_wh, 0.0)
|
||||||
|
|
||||||
|
max_possible_discharge_wh = min(
|
||||||
|
max_possible_discharge_wh, self.max_charge_power_w
|
||||||
|
) # TODO make a new cfg variable max_discharge_power_w
|
||||||
|
|
||||||
|
actual_discharge_wh = min(wh, max_possible_discharge_wh)
|
||||||
|
actual_withdrawal_wh = (
|
||||||
|
actual_discharge_wh / self.discharging_efficiency
|
||||||
|
if self.discharging_efficiency > 0
|
||||||
|
else 0.0
|
||||||
|
)
|
||||||
|
|
||||||
|
self.soc_wh -= actual_withdrawal_wh
|
||||||
|
self.soc_wh = max(self.soc_wh, self.min_soc_wh)
|
||||||
|
|
||||||
|
losses_wh = actual_withdrawal_wh - actual_discharge_wh
|
||||||
|
return actual_discharge_wh, losses_wh
|
||||||
|
|
||||||
|
def charge_energy(
|
||||||
|
self, wh: Optional[float], hour: int, relative_power: float = 0.0
|
||||||
|
) -> tuple[float, float]:
|
||||||
|
"""Charges energy into the battery."""
|
||||||
|
if hour is not None and self.charge_array[hour] == 0:
|
||||||
|
return 0.0, 0.0 # Charging not allowed in this hour
|
||||||
|
|
||||||
|
if relative_power > 0.0:
|
||||||
|
wh = self.max_charge_power_w * relative_power
|
||||||
|
|
||||||
|
wh = wh if wh is not None else self.max_charge_power_w
|
||||||
|
|
||||||
|
max_possible_charge_wh = (
|
||||||
|
(self.max_soc_wh - self.soc_wh) / self.charging_efficiency
|
||||||
|
if self.charging_efficiency > 0
|
||||||
|
else 0.0
|
||||||
|
)
|
||||||
|
max_possible_charge_wh = max(max_possible_charge_wh, 0.0)
|
||||||
|
|
||||||
|
effective_charge_wh = min(wh, max_possible_charge_wh)
|
||||||
|
charged_wh = effective_charge_wh * self.charging_efficiency
|
||||||
|
|
||||||
|
self.soc_wh += charged_wh
|
||||||
|
self.soc_wh = min(self.soc_wh, self.max_soc_wh)
|
||||||
|
|
||||||
|
losses_wh = effective_charge_wh - charged_wh
|
||||||
|
return charged_wh, losses_wh
|
||||||
|
|
||||||
|
def current_energy_content(self) -> float:
|
||||||
|
"""Returns the current usable energy in the battery."""
|
||||||
|
usable_energy = (self.soc_wh - self.min_soc_wh) * self.discharging_efficiency
|
||||||
|
return max(usable_energy, 0.0)
|
||||||
@@ -1,441 +1,313 @@
|
|||||||
"""General configuration settings for simulated devices for optimization."""
|
from typing import Any, ClassVar, Dict, Optional, Union
|
||||||
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
from typing import Any, Optional, TextIO, cast
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from loguru import logger
|
|
||||||
from numpydantic import NDArray, Shape
|
from numpydantic import NDArray, Shape
|
||||||
from pydantic import Field, computed_field, field_validator, model_validator
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.cache import CacheFileStore
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
from akkudoktoreos.core.coreabc import ConfigMixin, SingletonMixin
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.emplan import ResourceStatus
|
from akkudoktoreos.devices.battery import Battery
|
||||||
from akkudoktoreos.core.pydantic import ConfigDict, PydanticBaseModel
|
from akkudoktoreos.devices.devicesabc import DevicesBase
|
||||||
from akkudoktoreos.devices.devicesabc import DevicesBaseSettings
|
from akkudoktoreos.devices.generic import HomeAppliance
|
||||||
from akkudoktoreos.utils.datetimeutil import DateTime, TimeWindowSequence, to_datetime
|
from akkudoktoreos.devices.inverter import Inverter
|
||||||
|
from akkudoktoreos.prediction.interpolator import SelfConsumptionProbabilityInterpolator
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||||
|
|
||||||
# Default charge rates for battery
|
logger = get_logger(__name__)
|
||||||
BATTERY_DEFAULT_CHARGE_RATES = np.linspace(0.0, 1.0, 11) # 0.0, 0.1, ..., 1.0
|
|
||||||
|
|
||||||
|
|
||||||
class BatteriesCommonSettings(DevicesBaseSettings):
|
|
||||||
"""Battery devices base settings."""
|
|
||||||
|
|
||||||
capacity_wh: int = Field(
|
|
||||||
default=8000, gt=0, json_schema_extra={"description": "Capacity [Wh].", "examples": [8000]}
|
|
||||||
)
|
|
||||||
|
|
||||||
charging_efficiency: float = Field(
|
|
||||||
default=0.88,
|
|
||||||
gt=0,
|
|
||||||
le=1,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Charging efficiency [0.01 ... 1.00].",
|
|
||||||
"examples": [0.88],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
discharging_efficiency: float = Field(
|
|
||||||
default=0.88,
|
|
||||||
gt=0,
|
|
||||||
le=1,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Discharge efficiency [0.01 ... 1.00].",
|
|
||||||
"examples": [0.88],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
levelized_cost_of_storage_kwh: float = Field(
|
|
||||||
default=0.0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Levelized cost of storage (LCOS), the average lifetime cost of delivering one kWh [€/kWh].",
|
|
||||||
"examples": [0.12],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
max_charge_power_w: Optional[float] = Field(
|
|
||||||
default=5000,
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={"description": "Maximum charging power [W].", "examples": [5000]},
|
|
||||||
)
|
|
||||||
|
|
||||||
min_charge_power_w: Optional[float] = Field(
|
|
||||||
default=50,
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={"description": "Minimum charging power [W].", "examples": [50]},
|
|
||||||
)
|
|
||||||
|
|
||||||
charge_rates: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=BATTERY_DEFAULT_CHARGE_RATES,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": (
|
|
||||||
"Charge rates as factor of maximum charging power [0.00 ... 1.00]. "
|
|
||||||
"None triggers fallback to default charge-rates."
|
|
||||||
),
|
|
||||||
"examples": [[0.0, 0.25, 0.5, 0.75, 1.0], None],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
min_soc_percentage: int = Field(
|
|
||||||
default=0,
|
|
||||||
ge=0,
|
|
||||||
le=100,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": (
|
|
||||||
"Minimum state of charge (SOC) as percentage of capacity [%]. "
|
|
||||||
"This is the target SoC for charging"
|
|
||||||
),
|
|
||||||
"examples": [10],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
max_soc_percentage: int = Field(
|
|
||||||
default=100,
|
|
||||||
ge=0,
|
|
||||||
le=100,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Maximum state of charge (SOC) as percentage of capacity [%].",
|
|
||||||
"examples": [100],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@field_validator("charge_rates", mode="before")
|
|
||||||
def validate_and_sort_charge_rates(cls, v: Any) -> NDArray[Shape["*"], float]:
|
|
||||||
# None means fallback to default values
|
|
||||||
if v is None:
|
|
||||||
return BATTERY_DEFAULT_CHARGE_RATES.copy()
|
|
||||||
|
|
||||||
# Convert to numpy array
|
|
||||||
if isinstance(v, str):
|
|
||||||
# Remove brackets and split by comma or whitespace
|
|
||||||
numbers = re.split(r"[,\s]+", v.strip("[]"))
|
|
||||||
|
|
||||||
# Filter out any empty strings and convert to floats
|
|
||||||
arr = np.array([float(x) for x in numbers if x])
|
|
||||||
else:
|
|
||||||
arr = np.array(v, dtype=float)
|
|
||||||
|
|
||||||
# Must not be empty
|
|
||||||
if arr.size == 0:
|
|
||||||
raise ValueError("charge_rates must contain at least one value.")
|
|
||||||
|
|
||||||
# Enforce bounds: 0.0 ≤ x ≤ 1.0
|
|
||||||
if (arr < 0.0).any() or (arr > 1.0).any():
|
|
||||||
raise ValueError("charge_rates must be within [0.0, 1.0].")
|
|
||||||
|
|
||||||
# Remove duplicates + sort
|
|
||||||
arr = np.unique(arr)
|
|
||||||
arr.sort()
|
|
||||||
|
|
||||||
return arr
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_key_soc_factor(self) -> str:
|
|
||||||
"""Measurement key for the battery state of charge (SoC) as factor of total capacity [0.0 ... 1.0]."""
|
|
||||||
return f"{self.device_id}-soc-factor"
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_key_power_l1_w(self) -> str:
|
|
||||||
"""Measurement key for the L1 power the battery is charged or discharged with [W]."""
|
|
||||||
return f"{self.device_id}-power-l1-w"
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_key_power_l2_w(self) -> str:
|
|
||||||
"""Measurement key for the L2 power the battery is charged or discharged with [W]."""
|
|
||||||
return f"{self.device_id}-power-l2-w"
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_key_power_l3_w(self) -> str:
|
|
||||||
"""Measurement key for the L3 power the battery is charged or discharged with [W]."""
|
|
||||||
return f"{self.device_id}-power-l3-w"
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_key_power_3_phase_sym_w(self) -> str:
|
|
||||||
"""Measurement key for the symmetric 3 phase power the battery is charged or discharged with [W]."""
|
|
||||||
return f"{self.device_id}-power-3-phase-sym-w"
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_keys(self) -> Optional[list[str]]:
|
|
||||||
"""Measurement keys for the battery stati that are measurements.
|
|
||||||
|
|
||||||
Battery SoC, power.
|
|
||||||
"""
|
|
||||||
keys: list[str] = [
|
|
||||||
self.measurement_key_soc_factor,
|
|
||||||
self.measurement_key_power_l1_w,
|
|
||||||
self.measurement_key_power_l2_w,
|
|
||||||
self.measurement_key_power_l3_w,
|
|
||||||
self.measurement_key_power_3_phase_sym_w,
|
|
||||||
]
|
|
||||||
return keys
|
|
||||||
|
|
||||||
|
|
||||||
class InverterCommonSettings(DevicesBaseSettings):
|
|
||||||
"""Inverter devices base settings."""
|
|
||||||
|
|
||||||
max_power_w: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={"description": "Maximum power [W].", "examples": [10000]},
|
|
||||||
)
|
|
||||||
|
|
||||||
battery_id: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "ID of battery controlled by this inverter.",
|
|
||||||
"examples": [None, "battery1"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_keys(self) -> Optional[list[str]]:
|
|
||||||
"""Measurement keys for the inverter stati that are measurements."""
|
|
||||||
keys: list[str] = []
|
|
||||||
return keys
|
|
||||||
|
|
||||||
|
|
||||||
class HomeApplianceCommonSettings(DevicesBaseSettings):
|
|
||||||
"""Home Appliance devices base settings."""
|
|
||||||
|
|
||||||
consumption_wh: int = Field(
|
|
||||||
gt=0, json_schema_extra={"description": "Energy consumption [Wh].", "examples": [2000]}
|
|
||||||
)
|
|
||||||
|
|
||||||
duration_h: int = Field(
|
|
||||||
gt=0,
|
|
||||||
le=24,
|
|
||||||
json_schema_extra={"description": "Usage duration in hours [0 ... 24].", "examples": [1]},
|
|
||||||
)
|
|
||||||
|
|
||||||
time_windows: Optional[TimeWindowSequence] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Sequence of allowed time windows. Defaults to optimization general time window.",
|
|
||||||
"examples": [
|
|
||||||
{
|
|
||||||
"windows": [
|
|
||||||
{"start_time": "10:00", "duration": "2 hours"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def measurement_keys(self) -> Optional[list[str]]:
|
|
||||||
"""Measurement keys for the home appliance stati that are measurements."""
|
|
||||||
keys: list[str] = []
|
|
||||||
return keys
|
|
||||||
|
|
||||||
|
|
||||||
class DevicesCommonSettings(SettingsBaseModel):
|
class DevicesCommonSettings(SettingsBaseModel):
|
||||||
"""Base configuration for devices simulation settings."""
|
"""Base configuration for devices simulation settings."""
|
||||||
|
|
||||||
batteries: Optional[list[BatteriesCommonSettings]] = Field(
|
# Battery
|
||||||
|
# -------
|
||||||
|
battery_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of Battery simulation provider."
|
||||||
|
)
|
||||||
|
battery_capacity: Optional[int] = Field(default=None, description="Battery capacity [Wh].")
|
||||||
|
battery_initial_soc: Optional[int] = Field(
|
||||||
|
default=None, description="Battery initial state of charge [%]."
|
||||||
|
)
|
||||||
|
battery_soc_min: Optional[int] = Field(
|
||||||
|
default=None, description="Battery minimum state of charge [%]."
|
||||||
|
)
|
||||||
|
battery_soc_max: Optional[int] = Field(
|
||||||
|
default=None, description="Battery maximum state of charge [%]."
|
||||||
|
)
|
||||||
|
battery_charging_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery charging efficiency [%]."
|
||||||
|
)
|
||||||
|
battery_discharging_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery discharging efficiency [%]."
|
||||||
|
)
|
||||||
|
battery_max_charging_power: Optional[int] = Field(
|
||||||
|
default=None, description="Battery maximum charge power [W]."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Battery Electric Vehicle
|
||||||
|
# ------------------------
|
||||||
|
bev_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of Battery Electric Vehicle simulation provider."
|
||||||
|
)
|
||||||
|
bev_capacity: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle capacity [Wh]."
|
||||||
|
)
|
||||||
|
bev_initial_soc: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle initial state of charge [%]."
|
||||||
|
)
|
||||||
|
bev_soc_max: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle maximum state of charge [%]."
|
||||||
|
)
|
||||||
|
bev_charging_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle charging efficiency [%]."
|
||||||
|
)
|
||||||
|
bev_discharging_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle discharging efficiency [%]."
|
||||||
|
)
|
||||||
|
bev_max_charging_power: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle maximum charge power [W]."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Home Appliance - Dish Washer
|
||||||
|
# ----------------------------
|
||||||
|
dishwasher_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of Dish Washer simulation provider."
|
||||||
|
)
|
||||||
|
dishwasher_consumption: Optional[int] = Field(
|
||||||
|
default=None, description="Dish Washer energy consumption [Wh]."
|
||||||
|
)
|
||||||
|
dishwasher_duration: Optional[int] = Field(
|
||||||
|
default=None, description="Dish Washer usage duration [h]."
|
||||||
|
)
|
||||||
|
|
||||||
|
# PV Inverter
|
||||||
|
# -----------
|
||||||
|
inverter_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of PV Inverter simulation provider."
|
||||||
|
)
|
||||||
|
inverter_power_max: Optional[float] = Field(
|
||||||
|
default=None, description="Inverter maximum power [W]."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Devices(SingletonMixin, DevicesBase):
|
||||||
|
# Results of the devices simulation and
|
||||||
|
# insights into various parameters over the entire forecast period.
|
||||||
|
# -----------------------------------------------------------------
|
||||||
|
last_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The load in watt-hours per hour."
|
||||||
|
)
|
||||||
|
eauto_soc_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The state of charge of the EV for each hour."
|
||||||
|
)
|
||||||
|
einnahmen_euro_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
json_schema_extra={
|
description="The revenue from grid feed-in or other sources in euros per hour.",
|
||||||
"description": "List of battery devices",
|
|
||||||
"examples": [[{"device_id": "battery1", "capacity_wh": 8000}]],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
home_appliance_wh_per_hour: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
max_batteries: Optional[int] = Field(
|
|
||||||
default=None,
|
default=None,
|
||||||
ge=0,
|
description="The energy consumption of a household appliance in watt-hours per hour.",
|
||||||
json_schema_extra={
|
|
||||||
"description": "Maximum number of batteries that can be set",
|
|
||||||
"examples": [1, 2],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
kosten_euro_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
electric_vehicles: Optional[list[BatteriesCommonSettings]] = Field(
|
default=None, description="The costs in euros per hour."
|
||||||
|
)
|
||||||
|
grid_import_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The grid energy drawn in watt-hours per hour."
|
||||||
|
)
|
||||||
|
grid_export_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The energy fed into the grid in watt-hours per hour."
|
||||||
|
)
|
||||||
|
verluste_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The losses in watt-hours per hour."
|
||||||
|
)
|
||||||
|
akku_soc_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
json_schema_extra={
|
description="The state of charge of the battery (not the EV) in percentage per hour.",
|
||||||
"description": "List of electric vehicle devices",
|
|
||||||
"examples": [[{"device_id": "battery1", "capacity_wh": 8000}]],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
max_electric_vehicles: Optional[int] = Field(
|
# Computed fields
|
||||||
default=None,
|
@computed_field # type: ignore[prop-decorator]
|
||||||
ge=0,
|
@property
|
||||||
json_schema_extra={
|
def total_balance_euro(self) -> float:
|
||||||
"description": "Maximum number of electric vehicles that can be set",
|
"""The total balance of revenues minus costs in euros."""
|
||||||
"examples": [1, 2],
|
return self.total_revenues_euro - self.total_costs_euro
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
inverters: Optional[list[InverterCommonSettings]] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "List of inverters", "examples": [[]]}
|
|
||||||
)
|
|
||||||
|
|
||||||
max_inverters: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
ge=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Maximum number of inverters that can be set",
|
|
||||||
"examples": [1, 2],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
home_appliances: Optional[list[HomeApplianceCommonSettings]] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "List of home appliances", "examples": [[]]}
|
|
||||||
)
|
|
||||||
|
|
||||||
max_home_appliances: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
ge=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Maximum number of home_appliances that can be set",
|
|
||||||
"examples": [1, 2],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def measurement_keys(self) -> Optional[list[str]]:
|
def total_revenues_euro(self) -> float:
|
||||||
"""Return the measurement keys for the resource/ device stati that are measurements."""
|
"""The total revenues in euros."""
|
||||||
keys: list[str] = []
|
if self.einnahmen_euro_pro_stunde is None:
|
||||||
|
return 0
|
||||||
|
return np.nansum(self.einnahmen_euro_pro_stunde)
|
||||||
|
|
||||||
if self.max_batteries and self.batteries:
|
@computed_field # type: ignore[prop-decorator]
|
||||||
for battery in self.batteries:
|
@property
|
||||||
keys.extend(battery.measurement_keys)
|
def total_costs_euro(self) -> float:
|
||||||
if self.max_electric_vehicles and self.electric_vehicles:
|
"""The total costs in euros."""
|
||||||
for electric_vehicle in self.electric_vehicles:
|
if self.kosten_euro_pro_stunde is None:
|
||||||
keys.extend(electric_vehicle.measurement_keys)
|
return 0
|
||||||
return keys
|
return np.nansum(self.kosten_euro_pro_stunde)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_losses_wh(self) -> float:
|
||||||
|
"""The total losses in watt-hours over the entire period."""
|
||||||
|
if self.verluste_wh_pro_stunde is None:
|
||||||
|
return 0
|
||||||
|
return np.nansum(self.verluste_wh_pro_stunde)
|
||||||
|
|
||||||
# Type used for indexing: (resource_id, optional actuator_id)
|
# Devices
|
||||||
class ResourceKey(PydanticBaseModel):
|
# TODO: Make devices class a container of device simulation providers.
|
||||||
"""Key identifying a resource and optionally an actuator."""
|
# Device simulations to be used are then enabled in the configuration.
|
||||||
|
battery: ClassVar[Battery] = Battery(provider_id="GenericBattery")
|
||||||
resource_id: str
|
ev: ClassVar[Battery] = Battery(provider_id="GenericBEV")
|
||||||
actuator_id: Optional[str] = None
|
home_appliance: ClassVar[HomeAppliance] = HomeAppliance(provider_id="GenericDishWasher")
|
||||||
|
inverter: ClassVar[Inverter] = Inverter(
|
||||||
model_config = ConfigDict(frozen=True)
|
self_consumption_predictor=SelfConsumptionProbabilityInterpolator,
|
||||||
|
battery=battery,
|
||||||
def __hash__(self) -> int:
|
provider_id="GenericInverter",
|
||||||
"""Returns a stable hash based on the resource_id and actuator_id.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
int: Hash value derived from the resource_id and actuator_id.
|
|
||||||
"""
|
|
||||||
return hash(self.resource_id + self.actuator_id if self.actuator_id else "")
|
|
||||||
|
|
||||||
def as_tuple(self) -> tuple[str, Optional[str]]:
|
|
||||||
"""Return the key as a tuple for internal dictionary indexing."""
|
|
||||||
return (self.resource_id, self.actuator_id)
|
|
||||||
|
|
||||||
def __eq__(self, other: Any) -> bool:
|
|
||||||
if not isinstance(other, ResourceKey):
|
|
||||||
return NotImplemented
|
|
||||||
return self.resource_id == other.resource_id and self.actuator_id == other.actuator_id
|
|
||||||
|
|
||||||
|
|
||||||
class ResourceRegistry(SingletonMixin, ConfigMixin, PydanticBaseModel):
|
|
||||||
"""Registry for collecting and retrieving device status reports for simulations.
|
|
||||||
|
|
||||||
Maintains the latest and optionally historical status reports for each resource.
|
|
||||||
"""
|
|
||||||
|
|
||||||
keep_history: bool = False
|
|
||||||
history_size: int = 100
|
|
||||||
|
|
||||||
latest: dict[ResourceKey, ResourceStatus] = Field(
|
|
||||||
default_factory=dict,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Latest resource status that was reported per resource key.",
|
|
||||||
"example": [],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
history: dict[ResourceKey, list[tuple[DateTime, ResourceStatus]]] = Field(
|
|
||||||
default_factory=dict,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "History of resource stati that were reported per resource key.",
|
|
||||||
"example": [],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@model_validator(mode="after")
|
def update_data(self) -> None:
|
||||||
def _enforce_history_limits(self) -> "ResourceRegistry":
|
"""Update device simulation data."""
|
||||||
"""Ensure history list lengths respect the history_size limit."""
|
# Assure devices are set up
|
||||||
if self.keep_history:
|
self.battery.setup()
|
||||||
for key, records in self.history.items():
|
self.ev.setup()
|
||||||
if len(records) > self.history_size:
|
self.home_appliance.setup()
|
||||||
self.history[key] = records[-self.history_size :]
|
self.inverter.setup()
|
||||||
return self
|
|
||||||
|
|
||||||
def update_status(self, key: ResourceKey, status: ResourceStatus) -> None:
|
# Pre-allocate arrays for the results, optimized for speed
|
||||||
"""Update the latest status and optionally store in history.
|
self.last_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.grid_export_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.grid_import_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.kosten_euro_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.einnahmen_euro_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.akku_soc_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.eauto_soc_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.verluste_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.home_appliance_wh_per_hour = np.full((self.total_hours), np.nan)
|
||||||
|
|
||||||
Args:
|
# Set initial state
|
||||||
key (ResourceKey): Identifier for the resource.
|
simulation_step = to_duration("1 hour")
|
||||||
status (ResourceStatus): Status report to store.
|
if self.battery:
|
||||||
"""
|
self.akku_soc_pro_stunde[0] = self.battery.current_soc_percentage()
|
||||||
self.latest[key] = status
|
if self.ev:
|
||||||
if self.keep_history:
|
self.eauto_soc_pro_stunde[0] = self.ev.current_soc_percentage()
|
||||||
timestamp = getattr(status, "transition_timestamp", None) or to_datetime()
|
|
||||||
self.history.setdefault(key, []).append((timestamp, status))
|
|
||||||
if len(self.history[key]) > self.history_size:
|
|
||||||
self.history[key] = self.history[key][-self.history_size :]
|
|
||||||
|
|
||||||
def status_latest(self, key: ResourceKey) -> Optional[ResourceStatus]:
|
# Get predictions for full device simulation time range
|
||||||
"""Retrieve the most recent status for a resource."""
|
# gesamtlast[stunde]
|
||||||
return self.latest.get(key)
|
load_total_mean = self.prediction.key_to_array(
|
||||||
|
"load_total_mean",
|
||||||
def status_history(self, key: ResourceKey) -> list[tuple[DateTime, ResourceStatus]]:
|
start_datetime=self.start_datetime,
|
||||||
"""Retrieve historical status reports for a resource."""
|
end_datetime=self.end_datetime,
|
||||||
if not self.keep_history:
|
interval=simulation_step,
|
||||||
raise RuntimeError("History tracking is disabled.")
|
|
||||||
return self.history.get(key, [])
|
|
||||||
|
|
||||||
def status_exists(self, key: ResourceKey) -> bool:
|
|
||||||
"""Check if a status report exists for the given resource.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (ResourceKey): Identifier for the resource.
|
|
||||||
"""
|
|
||||||
return key in self.latest
|
|
||||||
|
|
||||||
def save(self) -> None:
|
|
||||||
"""Save the registry to file."""
|
|
||||||
# Make explicit cast to make mypy happy
|
|
||||||
cache_file = cast(
|
|
||||||
TextIO, CacheFileStore().create(key="resource_registry", mode="w+", suffix=".json")
|
|
||||||
)
|
)
|
||||||
cache_file.seek(0)
|
# pv_prognose_wh[stunde]
|
||||||
cache_file.write(self.model_dump_json(indent=4))
|
pvforecast_ac_power = self.prediction.key_to_array(
|
||||||
cache_file.truncate() # Important to remove leftover data!
|
"pvforecast_ac_power",
|
||||||
|
start_datetime=self.start_datetime,
|
||||||
|
end_datetime=self.end_datetime,
|
||||||
|
interval=simulation_step,
|
||||||
|
)
|
||||||
|
# strompreis_euro_pro_wh[stunde]
|
||||||
|
elecprice_marketprice_wh = self.prediction.key_to_array(
|
||||||
|
"elecprice_marketprice_wh",
|
||||||
|
start_datetime=self.start_datetime,
|
||||||
|
end_datetime=self.end_datetime,
|
||||||
|
interval=simulation_step,
|
||||||
|
)
|
||||||
|
# einspeiseverguetung_euro_pro_wh_arr[stunde]
|
||||||
|
# TODO: Create prediction for einspeiseverguetung_euro_pro_wh_arr
|
||||||
|
einspeiseverguetung_euro_pro_wh_arr = np.full((self.total_hours), 0.078)
|
||||||
|
|
||||||
def load(self) -> None:
|
for stunde_since_now in range(0, self.total_hours):
|
||||||
"""Load registry state from file and update the current instance."""
|
hour = self.start_datetime.hour + stunde_since_now
|
||||||
cache_file = CacheFileStore().get(key="resource_registry")
|
|
||||||
if cache_file:
|
|
||||||
try:
|
|
||||||
cache_file.seek(0)
|
|
||||||
data = json.load(cache_file)
|
|
||||||
loaded = self.__class__.model_validate(data)
|
|
||||||
|
|
||||||
self.keep_history = loaded.keep_history
|
# Accumulate loads and PV generation
|
||||||
self.history_size = loaded.history_size
|
consumption = load_total_mean[stunde_since_now]
|
||||||
self.latest = loaded.latest
|
self.verluste_wh_pro_stunde[stunde_since_now] = 0.0
|
||||||
self.history = loaded.history
|
|
||||||
except Exception as e:
|
# Home appliances
|
||||||
logger.error("Can not load resource registry: {}", e)
|
if self.home_appliance:
|
||||||
|
ha_load = self.home_appliance.get_load_for_hour(hour)
|
||||||
|
consumption += ha_load
|
||||||
|
self.home_appliance_wh_per_hour[stunde_since_now] = ha_load
|
||||||
|
|
||||||
|
# E-Auto handling
|
||||||
|
if self.ev:
|
||||||
|
if self.ev_charge_hours[hour] > 0:
|
||||||
|
geladene_menge_eauto, verluste_eauto = self.ev.charge_energy(
|
||||||
|
None, hour, relative_power=self.ev_charge_hours[hour]
|
||||||
|
)
|
||||||
|
consumption += geladene_menge_eauto
|
||||||
|
self.verluste_wh_pro_stunde[stunde_since_now] += verluste_eauto
|
||||||
|
self.eauto_soc_pro_stunde[stunde_since_now] = self.ev.current_soc_percentage()
|
||||||
|
|
||||||
|
# Process inverter logic
|
||||||
|
grid_export, grid_import, losses, self_consumption = (0.0, 0.0, 0.0, 0.0)
|
||||||
|
if self.battery:
|
||||||
|
self.battery.set_charge_allowed_for_hour(self.dc_charge_hours[hour], hour)
|
||||||
|
if self.inverter:
|
||||||
|
generation = pvforecast_ac_power[hour]
|
||||||
|
grid_export, grid_import, losses, self_consumption = self.inverter.process_energy(
|
||||||
|
generation, consumption, hour
|
||||||
|
)
|
||||||
|
|
||||||
|
# AC PV Battery Charge
|
||||||
|
if self.battery and self.ac_charge_hours[hour] > 0.0:
|
||||||
|
self.battery.set_charge_allowed_for_hour(1, hour)
|
||||||
|
geladene_menge, verluste_wh = self.battery.charge_energy(
|
||||||
|
None, hour, relative_power=self.ac_charge_hours[hour]
|
||||||
|
)
|
||||||
|
# print(stunde, " ", geladene_menge, " ",self.ac_charge_hours[stunde]," ",self.battery.current_soc_percentage())
|
||||||
|
consumption += geladene_menge
|
||||||
|
grid_import += geladene_menge
|
||||||
|
self.verluste_wh_pro_stunde[stunde_since_now] += verluste_wh
|
||||||
|
|
||||||
|
self.grid_export_wh_pro_stunde[stunde_since_now] = grid_export
|
||||||
|
self.grid_import_wh_pro_stunde[stunde_since_now] = grid_import
|
||||||
|
self.verluste_wh_pro_stunde[stunde_since_now] += losses
|
||||||
|
self.last_wh_pro_stunde[stunde_since_now] = consumption
|
||||||
|
|
||||||
|
# Financial calculations
|
||||||
|
self.kosten_euro_pro_stunde[stunde_since_now] = (
|
||||||
|
grid_import * self.strompreis_euro_pro_wh[hour]
|
||||||
|
)
|
||||||
|
self.einnahmen_euro_pro_stunde[stunde_since_now] = (
|
||||||
|
grid_export * self.einspeiseverguetung_euro_pro_wh_arr[hour]
|
||||||
|
)
|
||||||
|
|
||||||
|
# battery SOC tracking
|
||||||
|
if self.battery:
|
||||||
|
self.akku_soc_pro_stunde[stunde_since_now] = self.battery.current_soc_percentage()
|
||||||
|
else:
|
||||||
|
self.akku_soc_pro_stunde[stunde_since_now] = 0.0
|
||||||
|
|
||||||
|
def report_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Provides devices simulation output as a dictionary."""
|
||||||
|
out: Dict[str, Optional[Union[np.ndarray, float]]] = {
|
||||||
|
"Last_Wh_pro_Stunde": self.last_wh_pro_stunde,
|
||||||
|
"grid_export_Wh_pro_Stunde": self.grid_export_wh_pro_stunde,
|
||||||
|
"grid_import_Wh_pro_Stunde": self.grid_import_wh_pro_stunde,
|
||||||
|
"Kosten_Euro_pro_Stunde": self.kosten_euro_pro_stunde,
|
||||||
|
"akku_soc_pro_stunde": self.akku_soc_pro_stunde,
|
||||||
|
"Einnahmen_Euro_pro_Stunde": self.einnahmen_euro_pro_stunde,
|
||||||
|
"Gesamtbilanz_Euro": self.total_balance_euro,
|
||||||
|
"EAuto_SoC_pro_Stunde": self.eauto_soc_pro_stunde,
|
||||||
|
"Gesamteinnahmen_Euro": self.total_revenues_euro,
|
||||||
|
"Gesamtkosten_Euro": self.total_costs_euro,
|
||||||
|
"Verluste_Pro_Stunde": self.verluste_wh_pro_stunde,
|
||||||
|
"Gesamt_Verluste": self.total_losses_wh,
|
||||||
|
"Home_appliance_wh_per_hour": self.home_appliance_wh_per_hour,
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
def get_resource_registry() -> ResourceRegistry:
|
# Initialize the Devices simulation, it is a singleton.
|
||||||
"""Gets the EOS resource registry."""
|
devices = Devices()
|
||||||
return ResourceRegistry()
|
|
||||||
|
|
||||||
|
def get_devices() -> Devices:
|
||||||
|
"""Gets the EOS Devices simulation."""
|
||||||
|
return devices
|
||||||
|
|||||||
@@ -1,133 +1,100 @@
|
|||||||
"""Abstract and base classes for devices."""
|
"""Abstract and base classes for devices."""
|
||||||
|
|
||||||
from enum import StrEnum
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pendulum import DateTime
|
||||||
|
from pydantic import ConfigDict, computed_field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.core.coreabc import (
|
||||||
|
ConfigMixin,
|
||||||
|
EnergyManagementSystemMixin,
|
||||||
|
PredictionMixin,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DevicesBaseSettings(SettingsBaseModel):
|
class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
||||||
"""Base devices setting."""
|
"""A mixin to manage start, end datetimes for devices data.
|
||||||
|
|
||||||
device_id: str = Field(
|
The starting datetime for devices data generation is provided by the energy management
|
||||||
default="<unknown>",
|
system. Device data cannot be computed if this value is `None`.
|
||||||
json_schema_extra={
|
"""
|
||||||
"description": "ID of device",
|
|
||||||
"examples": ["battery1", "ev1", "inverter1", "dishwasher"],
|
# Computed field for end_datetime and keep_datetime
|
||||||
},
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def end_datetime(self) -> Optional[DateTime]:
|
||||||
|
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
||||||
|
|
||||||
|
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||||
|
"""
|
||||||
|
if self.ems.start_datetime and self.config.prediction_hours:
|
||||||
|
end_datetime = self.ems.start_datetime + to_duration(
|
||||||
|
f"{self.config.prediction_hours} hours"
|
||||||
|
)
|
||||||
|
dst_change = end_datetime.offset_hours - self.ems.start_datetime.offset_hours
|
||||||
|
logger.debug(
|
||||||
|
f"Pre: {self.ems.start_datetime}..{end_datetime}: DST change: {dst_change}"
|
||||||
|
)
|
||||||
|
if dst_change < 0:
|
||||||
|
end_datetime = end_datetime + to_duration(f"{abs(int(dst_change))} hours")
|
||||||
|
elif dst_change > 0:
|
||||||
|
end_datetime = end_datetime - to_duration(f"{abs(int(dst_change))} hours")
|
||||||
|
logger.debug(
|
||||||
|
f"Pst: {self.ems.start_datetime}..{end_datetime}: DST change: {dst_change}"
|
||||||
|
)
|
||||||
|
return end_datetime
|
||||||
|
return None
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_hours(self) -> Optional[int]:
|
||||||
|
"""Compute the hours from `start_datetime` to `end_datetime`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[pendulum.period]: The duration hours, or `None` if either datetime is unavailable.
|
||||||
|
"""
|
||||||
|
end_dt = self.end_datetime
|
||||||
|
if end_dt is None:
|
||||||
|
return None
|
||||||
|
duration = end_dt - self.ems.start_datetime
|
||||||
|
return int(duration.total_hours())
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceBase(DevicesStartEndMixin, PredictionMixin):
|
||||||
|
"""Base class for device simulations.
|
||||||
|
|
||||||
|
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
||||||
|
`prediction`).
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Validation on assignment of the Pydantic model is disabled to speed up simulation runs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Disable validation on assignment to speed up simulation runs.
|
||||||
|
model_config = ConfigDict(
|
||||||
|
validate_assignment=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BatteryOperationMode(StrEnum):
|
class DevicesBase(DevicesStartEndMixin, PredictionMixin, PydanticBaseModel):
|
||||||
"""Battery Operation Mode.
|
"""Base class for handling device data.
|
||||||
|
|
||||||
Enumerates the operating modes of a battery in a home energy
|
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
||||||
management simulation. These modes require no direct awareness
|
`prediction`).
|
||||||
of electricity prices or carbon intensity — higher-level
|
|
||||||
controllers or optimizers decide when to switch modes.
|
|
||||||
|
|
||||||
Modes
|
Note:
|
||||||
-----
|
Validation on assignment of the Pydantic model is disabled to speed up simulation runs.
|
||||||
- IDLE:
|
|
||||||
No charging or discharging.
|
|
||||||
|
|
||||||
- SELF_CONSUMPTION:
|
|
||||||
Charge from local surplus and discharge to meet local demand.
|
|
||||||
|
|
||||||
- NON_EXPORT:
|
|
||||||
Charge from on-site or local surplus with the goal of
|
|
||||||
minimizing or preventing energy export to the external grid.
|
|
||||||
Discharging to the grid is not allowed.
|
|
||||||
|
|
||||||
- PEAK_SHAVING:
|
|
||||||
Discharge during local demand peaks to reduce grid draw.
|
|
||||||
|
|
||||||
- GRID_SUPPORT_EXPORT:
|
|
||||||
Discharge to support the upstream grid when commanded.
|
|
||||||
|
|
||||||
- GRID_SUPPORT_IMPORT:
|
|
||||||
Charge from the grid when instructed to absorb excess supply.
|
|
||||||
|
|
||||||
- FREQUENCY_REGULATION:
|
|
||||||
Perform fast bidirectional power adjustments based on grid
|
|
||||||
frequency deviations.
|
|
||||||
|
|
||||||
- RAMP_RATE_CONTROL:
|
|
||||||
Smooth changes in local net load or generation.
|
|
||||||
|
|
||||||
- RESERVE_BACKUP:
|
|
||||||
Maintain a minimum state of charge for emergency use.
|
|
||||||
|
|
||||||
- OUTAGE_SUPPLY:
|
|
||||||
Discharge to power critical loads during a grid outage.
|
|
||||||
|
|
||||||
- FORCED_CHARGE:
|
|
||||||
Override all other logic and charge regardless of conditions.
|
|
||||||
|
|
||||||
- FORCED_DISCHARGE:
|
|
||||||
Override all other logic and discharge regardless of conditions.
|
|
||||||
|
|
||||||
- FAULT:
|
|
||||||
Battery is unavailable due to fault or error state.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
IDLE = "IDLE"
|
# Disable validation on assignment to speed up simulation runs.
|
||||||
SELF_CONSUMPTION = "SELF_CONSUMPTION"
|
model_config = ConfigDict(
|
||||||
NON_EXPORT = "NON_EXPORT"
|
validate_assignment=False,
|
||||||
PEAK_SHAVING = "PEAK_SHAVING"
|
)
|
||||||
GRID_SUPPORT_EXPORT = "GRID_SUPPORT_EXPORT"
|
|
||||||
GRID_SUPPORT_IMPORT = "GRID_SUPPORT_IMPORT"
|
|
||||||
FREQUENCY_REGULATION = "FREQUENCY_REGULATION"
|
|
||||||
RAMP_RATE_CONTROL = "RAMP_RATE_CONTROL"
|
|
||||||
RESERVE_BACKUP = "RESERVE_BACKUP"
|
|
||||||
OUTAGE_SUPPLY = "OUTAGE_SUPPLY"
|
|
||||||
FORCED_CHARGE = "FORCED_CHARGE"
|
|
||||||
FORCED_DISCHARGE = "FORCED_DISCHARGE"
|
|
||||||
FAULT = "FAULT"
|
|
||||||
|
|
||||||
|
|
||||||
class ApplianceOperationMode(StrEnum):
|
|
||||||
"""Appliance operation modes.
|
|
||||||
|
|
||||||
Modes
|
|
||||||
-----
|
|
||||||
- OFF:
|
|
||||||
Stop or prevent any active operation of the appliance.
|
|
||||||
|
|
||||||
- RUN:
|
|
||||||
Start or continue normal operation of the appliance.
|
|
||||||
|
|
||||||
- DEFER:
|
|
||||||
Postpone operation to a later time window based on
|
|
||||||
scheduling or optimization criteria.
|
|
||||||
|
|
||||||
- PAUSE:
|
|
||||||
Temporarily suspend an ongoing operation, keeping the
|
|
||||||
option to resume later.
|
|
||||||
|
|
||||||
- RESUME:
|
|
||||||
Continue an operation that was previously paused or
|
|
||||||
deferred.
|
|
||||||
|
|
||||||
- LIMIT_POWER:
|
|
||||||
Run the appliance under reduced power constraints,
|
|
||||||
for example in response to load-management or
|
|
||||||
demand-response signals.
|
|
||||||
|
|
||||||
- FORCED_RUN:
|
|
||||||
Start or maintain operation even if constraints or
|
|
||||||
optimization strategies would otherwise delay or limit it.
|
|
||||||
|
|
||||||
- FAULT:
|
|
||||||
Appliance is unavailable due to fault or error state.
|
|
||||||
"""
|
|
||||||
|
|
||||||
OFF = "OFF"
|
|
||||||
RUN = "RUN"
|
|
||||||
DEFER = "DEFER"
|
|
||||||
PAUSE = "PAUSE"
|
|
||||||
RESUME = "RESUME"
|
|
||||||
LIMIT_POWER = "LIMIT_POWER"
|
|
||||||
FORCED_RUN = "FORCED_RUN"
|
|
||||||
FAULT = "FAULT"
|
|
||||||
|
|||||||
108
src/akkudoktoreos/devices/generic.py
Normal file
108
src/akkudoktoreos/devices/generic.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
||||||
|
from akkudoktoreos.devices.devicesabc import DeviceBase
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HomeApplianceParameters(ParametersBaseModel):
|
||||||
|
consumption_wh: int = Field(
|
||||||
|
gt=0,
|
||||||
|
description="An integer representing the energy consumption of a household device in watt-hours.",
|
||||||
|
)
|
||||||
|
duration_h: int = Field(
|
||||||
|
gt=0,
|
||||||
|
description="An integer representing the usage duration of a household device in hours.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HomeAppliance(DeviceBase):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
parameters: Optional[HomeApplianceParameters] = None,
|
||||||
|
hours: Optional[int] = 24,
|
||||||
|
provider_id: Optional[str] = None,
|
||||||
|
):
|
||||||
|
# Configuration initialisation
|
||||||
|
self.provider_id = provider_id
|
||||||
|
self.prefix = "<invalid>"
|
||||||
|
if self.provider_id == "GenericDishWasher":
|
||||||
|
self.prefix = "dishwasher"
|
||||||
|
# Parameter initialisiation
|
||||||
|
self.parameters = parameters
|
||||||
|
if hours is None:
|
||||||
|
self.hours = self.total_hours
|
||||||
|
else:
|
||||||
|
self.hours = hours
|
||||||
|
|
||||||
|
self.initialised = False
|
||||||
|
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
||||||
|
if self.parameters is not None:
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self) -> None:
|
||||||
|
if self.initialised:
|
||||||
|
return
|
||||||
|
if self.provider_id is not None:
|
||||||
|
# Setup by configuration
|
||||||
|
self.hours = self.total_hours
|
||||||
|
self.consumption_wh = getattr(self.config, f"{self.prefix}_consumption")
|
||||||
|
self.duration_h = getattr(self.config, f"{self.prefix}_duration")
|
||||||
|
elif self.parameters is not None:
|
||||||
|
# Setup by parameters
|
||||||
|
self.consumption_wh = (
|
||||||
|
self.parameters.consumption_wh
|
||||||
|
) # Total energy consumption of the device in kWh
|
||||||
|
self.duration_h = self.parameters.duration_h # Duration of use in hours
|
||||||
|
else:
|
||||||
|
error_msg = "Parameters and provider ID missing. Can't instantiate."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
|
||||||
|
self.initialised = True
|
||||||
|
|
||||||
|
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
|
||||||
|
"""Sets the start time of the device and generates the corresponding load curve.
|
||||||
|
|
||||||
|
:param start_hour: The hour at which the device should start.
|
||||||
|
"""
|
||||||
|
self.reset_load_curve()
|
||||||
|
# Check if the duration of use is within the available time frame
|
||||||
|
if start_hour + self.duration_h > self.hours:
|
||||||
|
raise ValueError("The duration of use exceeds the available time frame.")
|
||||||
|
if start_hour < global_start_hour:
|
||||||
|
raise ValueError("The start time is earlier than the available time frame.")
|
||||||
|
|
||||||
|
# Calculate power per hour based on total consumption and duration
|
||||||
|
power_per_hour = self.consumption_wh / self.duration_h # Convert to watt-hours
|
||||||
|
|
||||||
|
# Set the power for the duration of use in the load curve array
|
||||||
|
self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour
|
||||||
|
|
||||||
|
def reset_load_curve(self) -> None:
|
||||||
|
"""Resets the load curve."""
|
||||||
|
self.load_curve = np.zeros(self.hours)
|
||||||
|
|
||||||
|
def get_load_curve(self) -> np.ndarray:
|
||||||
|
"""Returns the current load curve."""
|
||||||
|
return self.load_curve
|
||||||
|
|
||||||
|
def get_load_for_hour(self, hour: int) -> float:
|
||||||
|
"""Returns the load for a specific hour.
|
||||||
|
|
||||||
|
:param hour: The hour for which the load is queried.
|
||||||
|
:return: The load in watts for the specified hour.
|
||||||
|
"""
|
||||||
|
if hour < 0 or hour >= self.hours:
|
||||||
|
raise ValueError("The specified hour is outside the available time frame.")
|
||||||
|
|
||||||
|
return self.load_curve[hour]
|
||||||
|
|
||||||
|
def get_latest_starting_point(self) -> int:
|
||||||
|
"""Returns the latest possible start time at which the device can still run completely."""
|
||||||
|
return self.hours - self.duration_h
|
||||||
@@ -1,280 +0,0 @@
|
|||||||
from typing import Any, Iterator, Optional
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from akkudoktoreos.devices.devices import BATTERY_DEFAULT_CHARGE_RATES
|
|
||||||
from akkudoktoreos.optimization.genetic.geneticdevices import (
|
|
||||||
BaseBatteryParameters,
|
|
||||||
SolarPanelBatteryParameters,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Battery:
|
|
||||||
"""Represents a battery device with methods to simulate energy charging and discharging."""
|
|
||||||
|
|
||||||
def __init__(self, parameters: BaseBatteryParameters, prediction_hours: int):
|
|
||||||
self.parameters = parameters
|
|
||||||
self.prediction_hours = prediction_hours
|
|
||||||
self._setup()
|
|
||||||
|
|
||||||
def _setup(self) -> None:
|
|
||||||
"""Sets up the battery parameters based on provided parameters."""
|
|
||||||
self.capacity_wh = self.parameters.capacity_wh
|
|
||||||
self.initial_soc_percentage = self.parameters.initial_soc_percentage
|
|
||||||
self.charging_efficiency = self.parameters.charging_efficiency
|
|
||||||
self.discharging_efficiency = self.parameters.discharging_efficiency
|
|
||||||
|
|
||||||
# Charge rates, in case of None use default
|
|
||||||
self.charge_rates = BATTERY_DEFAULT_CHARGE_RATES
|
|
||||||
if self.parameters.charge_rates:
|
|
||||||
charge_rates = np.array(self.parameters.charge_rates, dtype=float)
|
|
||||||
charge_rates = np.unique(charge_rates)
|
|
||||||
charge_rates.sort()
|
|
||||||
self.charge_rates = charge_rates
|
|
||||||
|
|
||||||
# Only assign for storage battery
|
|
||||||
self.min_soc_percentage = (
|
|
||||||
self.parameters.min_soc_percentage
|
|
||||||
if isinstance(self.parameters, SolarPanelBatteryParameters)
|
|
||||||
else 0
|
|
||||||
)
|
|
||||||
self.max_soc_percentage = self.parameters.max_soc_percentage
|
|
||||||
|
|
||||||
# Initialize state of charge
|
|
||||||
if self.parameters.max_charge_power_w is not None:
|
|
||||||
self.max_charge_power_w = self.parameters.max_charge_power_w
|
|
||||||
else:
|
|
||||||
self.max_charge_power_w = self.capacity_wh # TODO this should not be equal capacity_wh
|
|
||||||
self.discharge_array = np.full(self.prediction_hours, 0)
|
|
||||||
self.charge_array = np.full(self.prediction_hours, 0)
|
|
||||||
self.soc_wh = (self.initial_soc_percentage / 100) * self.capacity_wh
|
|
||||||
self.min_soc_wh = (self.min_soc_percentage / 100) * self.capacity_wh
|
|
||||||
self.max_soc_wh = (self.max_soc_percentage / 100) * self.capacity_wh
|
|
||||||
|
|
||||||
def _lower_charge_rates_desc(self, start_rate: float) -> Iterator[float]:
|
|
||||||
"""Yield all charge rates lower than a given rate in descending order.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
charge_rates (np.ndarray): Sorted 1D array of available charge rates.
|
|
||||||
start_rate (float): The reference charge rate.
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
float: Charge rates lower than `start_rate`, in descending order.
|
|
||||||
"""
|
|
||||||
charge_rates_fast = self.charge_rates
|
|
||||||
|
|
||||||
# Find the insertion index for start_rate (left-most position)
|
|
||||||
idx = np.searchsorted(charge_rates_fast, start_rate, side="left")
|
|
||||||
|
|
||||||
# Yield values before idx in reverse (descending)
|
|
||||||
return (charge_rates_fast[j] for j in range(idx - 1, -1, -1))
|
|
||||||
|
|
||||||
def to_dict(self) -> dict[str, Any]:
|
|
||||||
"""Converts the object to a dictionary representation."""
|
|
||||||
return {
|
|
||||||
"device_id": self.parameters.device_id,
|
|
||||||
"capacity_wh": self.capacity_wh,
|
|
||||||
"initial_soc_percentage": self.initial_soc_percentage,
|
|
||||||
"soc_wh": self.soc_wh,
|
|
||||||
"hours": self.prediction_hours,
|
|
||||||
"discharge_array": self.discharge_array,
|
|
||||||
"charge_array": self.charge_array,
|
|
||||||
"charging_efficiency": self.charging_efficiency,
|
|
||||||
"discharging_efficiency": self.discharging_efficiency,
|
|
||||||
"max_charge_power_w": self.max_charge_power_w,
|
|
||||||
}
|
|
||||||
|
|
||||||
def reset(self) -> None:
|
|
||||||
"""Resets the battery state to its initial values."""
|
|
||||||
self.soc_wh = (self.initial_soc_percentage / 100) * self.capacity_wh
|
|
||||||
self.soc_wh = min(max(self.soc_wh, self.min_soc_wh), self.max_soc_wh)
|
|
||||||
self.discharge_array = np.full(self.prediction_hours, 0)
|
|
||||||
self.charge_array = np.full(self.prediction_hours, 0)
|
|
||||||
|
|
||||||
def set_discharge_per_hour(self, discharge_array: np.ndarray) -> None:
|
|
||||||
"""Sets the discharge values for each hour."""
|
|
||||||
if len(discharge_array) != self.prediction_hours:
|
|
||||||
raise ValueError(
|
|
||||||
f"Discharge array must have exactly {self.prediction_hours} elements. Got {len(discharge_array)} elements."
|
|
||||||
)
|
|
||||||
self.discharge_array = np.array(discharge_array)
|
|
||||||
|
|
||||||
def set_charge_per_hour(self, charge_array: np.ndarray) -> None:
|
|
||||||
"""Sets the charge values for each hour."""
|
|
||||||
if len(charge_array) != self.prediction_hours:
|
|
||||||
raise ValueError(
|
|
||||||
f"Charge array must have exactly {self.prediction_hours} elements. Got {len(charge_array)} elements."
|
|
||||||
)
|
|
||||||
self.charge_array = np.array(charge_array)
|
|
||||||
|
|
||||||
def current_soc_percentage(self) -> float:
|
|
||||||
"""Calculates the current state of charge in percentage."""
|
|
||||||
return (self.soc_wh / self.capacity_wh) * 100
|
|
||||||
|
|
||||||
def discharge_energy(self, wh: float, hour: int) -> tuple[float, float]:
|
|
||||||
"""Discharge energy from the battery.
|
|
||||||
|
|
||||||
Discharge is limited by:
|
|
||||||
* Requested delivered energy
|
|
||||||
* Remaining energy above minimum SoC
|
|
||||||
* Maximum discharge power
|
|
||||||
* Discharge efficiency
|
|
||||||
|
|
||||||
Args:
|
|
||||||
wh (float): Requested delivered energy in watt-hours.
|
|
||||||
hour (int): Time index. If `self.discharge_array[hour] == 0`,
|
|
||||||
no discharge occurs.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[float, float]:
|
|
||||||
delivered_wh (float): Actual delivered energy [Wh].
|
|
||||||
losses_wh (float): Conversion losses [Wh].
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.discharge_array[hour] == 0:
|
|
||||||
return 0.0, 0.0
|
|
||||||
|
|
||||||
# Raw extractable energy above minimum SoC
|
|
||||||
raw_available_wh = max(self.soc_wh - self.min_soc_wh, 0.0)
|
|
||||||
|
|
||||||
# Maximum raw discharge due to power limit
|
|
||||||
max_raw_wh = self.max_charge_power_w # TODO rename to max_discharge_power_w
|
|
||||||
|
|
||||||
# Actual raw withdrawal (internal)
|
|
||||||
raw_withdrawal_wh = min(raw_available_wh, max_raw_wh)
|
|
||||||
|
|
||||||
# Convert raw to delivered
|
|
||||||
max_deliverable_wh = raw_withdrawal_wh * self.discharging_efficiency
|
|
||||||
|
|
||||||
# Cap by requested delivered energy
|
|
||||||
delivered_wh = min(wh, max_deliverable_wh)
|
|
||||||
|
|
||||||
# Effective raw withdrawal based on what is delivered
|
|
||||||
raw_used_wh = delivered_wh / self.discharging_efficiency
|
|
||||||
|
|
||||||
# Update SoC
|
|
||||||
self.soc_wh -= raw_used_wh
|
|
||||||
self.soc_wh = max(self.soc_wh, self.min_soc_wh)
|
|
||||||
|
|
||||||
# Losses
|
|
||||||
losses_wh = raw_used_wh - delivered_wh
|
|
||||||
|
|
||||||
return delivered_wh, losses_wh
|
|
||||||
|
|
||||||
def charge_energy(
|
|
||||||
self,
|
|
||||||
wh: Optional[float],
|
|
||||||
hour: int,
|
|
||||||
charge_factor: float = 0.0,
|
|
||||||
) -> tuple[float, float]:
|
|
||||||
"""Charge energy into the battery.
|
|
||||||
|
|
||||||
Two **exclusive** modes:
|
|
||||||
|
|
||||||
Mode 1:
|
|
||||||
- `wh is not None` and `charge_factor == 0`
|
|
||||||
→ The raw requested charge energy is `wh` (pre-efficiency).
|
|
||||||
→ If remaining capacity is insufficient, charging is automatically limited.
|
|
||||||
→ No exception is raised due to capacity limits.
|
|
||||||
|
|
||||||
Mode 2:
|
|
||||||
- `wh is None` and `charge_factor > 0`
|
|
||||||
→ The raw requested energy is `max_charge_power_w * charge_factor`.
|
|
||||||
→ If the request exceeds remaining capacity, the algorithm tries to
|
|
||||||
find a lower charge_factor that is compatible. If such a charge factor
|
|
||||||
exists, this hour’s charge_factor is replaced.
|
|
||||||
→ If no charge factor can accommodate charging, the request is ignored
|
|
||||||
(`(0.0, 0.0)` is returned) and a penalty is applied elsewhere.
|
|
||||||
|
|
||||||
Charging is constrained by:
|
|
||||||
• Available SoC headroom (max_soc_wh − soc_wh)
|
|
||||||
• max_charge_power_w
|
|
||||||
• charging_efficiency
|
|
||||||
|
|
||||||
Args:
|
|
||||||
wh (float | None):
|
|
||||||
Requested raw energy [Wh] before efficiency.
|
|
||||||
Must be provided only for Mode 1 (charge_factor must be 0).
|
|
||||||
|
|
||||||
hour (int):
|
|
||||||
Time index. If charging is disabled at this hour (charge_array[hour] == 0),
|
|
||||||
returns `(0.0, 0.0)`.
|
|
||||||
|
|
||||||
charge_factor (float):
|
|
||||||
Fraction (0–1) of max charge power.
|
|
||||||
Must be >0 only in Mode 2 (`wh is None`).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[float, float]:
|
|
||||||
stored_wh : float
|
|
||||||
Energy stored after efficiency [Wh].
|
|
||||||
losses_wh : float
|
|
||||||
Conversion losses [Wh].
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError:
|
|
||||||
- If the mode is ambiguous (neither Mode 1 nor Mode 2).
|
|
||||||
- If the final new SoC would exceed capacity_wh.
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
stored_wh = raw_input_wh * charging_efficiency
|
|
||||||
losses_wh = raw_input_wh − stored_wh
|
|
||||||
"""
|
|
||||||
# Charging allowed in this hour?
|
|
||||||
if hour is not None and self.charge_array[hour] == 0:
|
|
||||||
return 0.0, 0.0
|
|
||||||
|
|
||||||
# Provide fast (3x..5x) local read access (vs. self.xxx) for repetitive read access
|
|
||||||
soc_wh_fast = self.soc_wh
|
|
||||||
max_charge_power_w_fast = self.max_charge_power_w
|
|
||||||
charging_efficiency_fast = self.charging_efficiency
|
|
||||||
|
|
||||||
# Decide mode & determine raw_request_wh and raw_charge_wh
|
|
||||||
if wh is not None and charge_factor == 0.0: # mode 1
|
|
||||||
raw_request_wh = wh
|
|
||||||
raw_charge_wh = max(self.max_soc_wh - soc_wh_fast, 0.0) / charging_efficiency_fast
|
|
||||||
elif wh is None and charge_factor > 0.0: # mode 2
|
|
||||||
raw_request_wh = max_charge_power_w_fast * charge_factor
|
|
||||||
raw_charge_wh = max(self.max_soc_wh - soc_wh_fast, 0.0) / charging_efficiency_fast
|
|
||||||
if raw_request_wh > raw_charge_wh:
|
|
||||||
# Use a lower charge factor
|
|
||||||
lower_charge_factors = self._lower_charge_rates_desc(charge_factor)
|
|
||||||
for charge_factor in lower_charge_factors:
|
|
||||||
raw_request_wh = max_charge_power_w_fast * charge_factor
|
|
||||||
if raw_request_wh <= raw_charge_wh:
|
|
||||||
self.charge_array[hour] = charge_factor
|
|
||||||
break
|
|
||||||
if raw_request_wh > raw_charge_wh:
|
|
||||||
# ignore request - penalty for missing SoC will be applied
|
|
||||||
self.charge_array[hour] = 0
|
|
||||||
return 0.0, 0.0
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
f"{self.parameters.device_id}: charge_energy must be called either "
|
|
||||||
"with wh != None and charge_factor == 0, or with wh == None and charge_factor > 0."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Remaining capacity
|
|
||||||
max_raw_wh = min(raw_charge_wh, max_charge_power_w_fast)
|
|
||||||
|
|
||||||
# Actual raw intake
|
|
||||||
raw_input_wh = raw_request_wh if raw_request_wh < max_raw_wh else max_raw_wh
|
|
||||||
|
|
||||||
# Apply efficiency
|
|
||||||
stored_wh = raw_input_wh * charging_efficiency_fast
|
|
||||||
new_soc = soc_wh_fast + stored_wh
|
|
||||||
|
|
||||||
if new_soc > self.capacity_wh:
|
|
||||||
raise ValueError(
|
|
||||||
f"{self.parameters.device_id}: SoC {new_soc} Wh exceeds capacity {self.capacity_wh} Wh"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.soc_wh = new_soc
|
|
||||||
losses_wh = raw_input_wh - stored_wh
|
|
||||||
|
|
||||||
return stored_wh, losses_wh
|
|
||||||
|
|
||||||
def current_energy_content(self) -> float:
|
|
||||||
"""Returns the current usable energy in the battery."""
|
|
||||||
usable_energy = (self.soc_wh - self.min_soc_wh) * self.discharging_efficiency
|
|
||||||
return max(usable_energy, 0.0)
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import numpy as np
|
|
||||||
|
|
||||||
from akkudoktoreos.optimization.genetic.geneticdevices import HomeApplianceParameters
|
|
||||||
from akkudoktoreos.utils.datetimeutil import (
|
|
||||||
TimeWindow,
|
|
||||||
TimeWindowSequence,
|
|
||||||
to_datetime,
|
|
||||||
to_duration,
|
|
||||||
to_time,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HomeAppliance:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
parameters: HomeApplianceParameters,
|
|
||||||
optimization_hours: int,
|
|
||||||
prediction_hours: int,
|
|
||||||
):
|
|
||||||
self.parameters: HomeApplianceParameters = parameters
|
|
||||||
self.prediction_hours = prediction_hours
|
|
||||||
self._setup()
|
|
||||||
|
|
||||||
def _setup(self) -> None:
|
|
||||||
"""Sets up the home appliance parameters based provided parameters."""
|
|
||||||
self.load_curve = np.zeros(self.prediction_hours) # Initialize the load curve with zeros
|
|
||||||
self.duration_h = self.parameters.duration_h
|
|
||||||
self.consumption_wh = self.parameters.consumption_wh
|
|
||||||
# setup possible start times
|
|
||||||
if self.parameters.time_windows is None:
|
|
||||||
self.parameters.time_windows = TimeWindowSequence(
|
|
||||||
windows=[
|
|
||||||
TimeWindow(
|
|
||||||
start_time=to_time("00:00"),
|
|
||||||
duration=to_duration(f"{self.prediction_hours} hours"),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
start_datetime = to_datetime().set(hour=0, minute=0, second=0)
|
|
||||||
duration = to_duration(f"{self.duration_h} hours")
|
|
||||||
self.start_allowed: list[bool] = []
|
|
||||||
for hour in range(0, self.prediction_hours):
|
|
||||||
self.start_allowed.append(
|
|
||||||
self.parameters.time_windows.contains(
|
|
||||||
start_datetime.add(hours=hour), duration=duration
|
|
||||||
)
|
|
||||||
)
|
|
||||||
start_earliest = self.parameters.time_windows.earliest_start_time(duration, start_datetime)
|
|
||||||
if start_earliest:
|
|
||||||
self.start_earliest = start_earliest.hour
|
|
||||||
else:
|
|
||||||
self.start_earliest = 0
|
|
||||||
start_latest = self.parameters.time_windows.latest_start_time(duration, start_datetime)
|
|
||||||
if start_latest:
|
|
||||||
self.start_latest = start_latest.hour
|
|
||||||
else:
|
|
||||||
self.start_latest = 23
|
|
||||||
|
|
||||||
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> int:
|
|
||||||
"""Sets the start time of the device and generates the corresponding load curve.
|
|
||||||
|
|
||||||
:param start_hour: The hour at which the device should start.
|
|
||||||
"""
|
|
||||||
if not self.start_allowed[start_hour]:
|
|
||||||
# It is not allowed (by the time windows) to start the application at this time
|
|
||||||
if global_start_hour <= self.start_latest:
|
|
||||||
# There is a time window left to start the appliance. Use it
|
|
||||||
start_hour = self.start_latest
|
|
||||||
else:
|
|
||||||
# There is no time window left to run the application
|
|
||||||
# Set the start into tomorrow
|
|
||||||
start_hour = self.start_earliest + 24
|
|
||||||
|
|
||||||
self.reset_load_curve()
|
|
||||||
|
|
||||||
# Calculate power per hour based on total consumption and duration
|
|
||||||
power_per_hour = self.consumption_wh / self.duration_h # Convert to watt-hours
|
|
||||||
|
|
||||||
# Set the power for the duration of use in the load curve array
|
|
||||||
if start_hour < len(self.load_curve):
|
|
||||||
end_hour = min(start_hour + self.duration_h, self.prediction_hours)
|
|
||||||
self.load_curve[start_hour:end_hour] = power_per_hour
|
|
||||||
|
|
||||||
return start_hour
|
|
||||||
|
|
||||||
def reset_load_curve(self) -> None:
|
|
||||||
"""Resets the load curve."""
|
|
||||||
self.load_curve = np.zeros(self.prediction_hours)
|
|
||||||
|
|
||||||
def get_load_curve(self) -> np.ndarray:
|
|
||||||
"""Returns the current load curve."""
|
|
||||||
return self.load_curve
|
|
||||||
|
|
||||||
def get_load_for_hour(self, hour: int) -> float:
|
|
||||||
"""Returns the load for a specific hour.
|
|
||||||
|
|
||||||
:param hour: The hour for which the load is queried.
|
|
||||||
:return: The load in watts for the specified hour.
|
|
||||||
"""
|
|
||||||
if hour < 0 or hour >= self.prediction_hours:
|
|
||||||
raise ValueError(
|
|
||||||
f"The specified hour {hour} is outside the available time frame {self.prediction_hours}."
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.load_curve[hour]
|
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
|
import logging
|
||||||
from typing import List, Sequence
|
from typing import List, Sequence
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
|
|
||||||
class Heatpump:
|
class Heatpump:
|
||||||
MAX_HEAT_OUTPUT = 5000
|
MAX_HEAT_OUTPUT = 5000
|
||||||
@@ -19,9 +18,10 @@ class Heatpump:
|
|||||||
COP_COEFFICIENT = 0.1
|
COP_COEFFICIENT = 0.1
|
||||||
"""COP increase per degree"""
|
"""COP increase per degree"""
|
||||||
|
|
||||||
def __init__(self, max_heat_output: int, hours: int):
|
def __init__(self, max_heat_output: int, prediction_hours: int):
|
||||||
self.max_heat_output = max_heat_output
|
self.max_heat_output = max_heat_output
|
||||||
self.hours = hours
|
self.prediction_hours = prediction_hours
|
||||||
|
self.log = logging.getLogger(__name__)
|
||||||
|
|
||||||
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
||||||
"""Check if temperature is in valid range between -100 and 100 degree Celsius.
|
"""Check if temperature is in valid range between -100 and 100 degree Celsius.
|
||||||
@@ -58,7 +58,7 @@ class Heatpump:
|
|||||||
f"Outside temperature '{outside_temperature_celsius}' not in range "
|
f"Outside temperature '{outside_temperature_celsius}' not in range "
|
||||||
"(min: -100 Celsius, max: 100 Celsius)"
|
"(min: -100 Celsius, max: 100 Celsius)"
|
||||||
)
|
)
|
||||||
logger.error(err_msg)
|
self.log.error(err_msg)
|
||||||
raise ValueError(err_msg)
|
raise ValueError(err_msg)
|
||||||
|
|
||||||
def calculate_heating_output(self, outside_temperature_celsius: float) -> float:
|
def calculate_heating_output(self, outside_temperature_celsius: float) -> float:
|
||||||
@@ -86,7 +86,7 @@ class Heatpump:
|
|||||||
f"Outside temperature '{outside_temperature_celsius}' not in range "
|
f"Outside temperature '{outside_temperature_celsius}' not in range "
|
||||||
"(min: -100 Celsius, max: 100 Celsius)"
|
"(min: -100 Celsius, max: 100 Celsius)"
|
||||||
)
|
)
|
||||||
logger.error(err_msg)
|
self.log.error(err_msg)
|
||||||
raise ValueError(err_msg)
|
raise ValueError(err_msg)
|
||||||
|
|
||||||
def calculate_heat_power(self, outside_temperature_celsius: float) -> float:
|
def calculate_heat_power(self, outside_temperature_celsius: float) -> float:
|
||||||
@@ -110,16 +110,16 @@ class Heatpump:
|
|||||||
f"Outside temperature '{outside_temperature_celsius}' not in range "
|
f"Outside temperature '{outside_temperature_celsius}' not in range "
|
||||||
"(min: -100 Celsius, max: 100 Celsius)"
|
"(min: -100 Celsius, max: 100 Celsius)"
|
||||||
)
|
)
|
||||||
logger.error(err_msg)
|
self.log.error(err_msg)
|
||||||
raise ValueError(err_msg)
|
raise ValueError(err_msg)
|
||||||
|
|
||||||
def simulate_24h(self, temperatures: Sequence[float]) -> List[float]:
|
def simulate_24h(self, temperatures: Sequence[float]) -> List[float]:
|
||||||
"""Simulate power data for 24 hours based on provided temperatures."""
|
"""Simulate power data for 24 hours based on provided temperatures."""
|
||||||
power_data: List[float] = []
|
power_data: List[float] = []
|
||||||
|
|
||||||
if len(temperatures) != self.hours:
|
if len(temperatures) != self.prediction_hours:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"The temperature array must contain exactly {self.hours} entries, "
|
f"The temperature array must contain exactly {self.prediction_hours} entries, "
|
||||||
"one for each hour of the day."
|
"one for each hour of the day."
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1,31 +1,64 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from loguru import logger
|
from pydantic import Field
|
||||||
|
from scipy.interpolate import RegularGridInterpolator
|
||||||
|
|
||||||
from akkudoktoreos.devices.genetic.battery import Battery
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.optimization.genetic.geneticdevices import InverterParameters
|
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
||||||
from akkudoktoreos.prediction.interpolator import get_eos_load_interpolator
|
from akkudoktoreos.devices.battery import Battery
|
||||||
|
from akkudoktoreos.devices.devicesabc import DeviceBase
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Inverter:
|
class InverterParameters(ParametersBaseModel):
|
||||||
|
max_power_wh: float = Field(gt=0)
|
||||||
|
|
||||||
|
|
||||||
|
class Inverter(DeviceBase):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
parameters: InverterParameters,
|
self_consumption_predictor: RegularGridInterpolator,
|
||||||
|
parameters: Optional[InverterParameters] = None,
|
||||||
battery: Optional[Battery] = None,
|
battery: Optional[Battery] = None,
|
||||||
|
provider_id: Optional[str] = None,
|
||||||
):
|
):
|
||||||
self.parameters: InverterParameters = parameters
|
# Configuration initialisation
|
||||||
self.battery: Optional[Battery] = battery
|
self.provider_id = provider_id
|
||||||
self._setup()
|
self.prefix = "<invalid>"
|
||||||
|
if self.provider_id == "GenericInverter":
|
||||||
|
self.prefix = "inverter"
|
||||||
|
# Parameter initialisiation
|
||||||
|
self.parameters = parameters
|
||||||
|
if battery is None:
|
||||||
|
# For the moment raise exception
|
||||||
|
# TODO: Make battery configurable by config
|
||||||
|
error_msg = "Battery for PV inverter is mandatory."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise NotImplementedError(error_msg)
|
||||||
|
self.battery = battery # Connection to a battery object
|
||||||
|
self.self_consumption_predictor = self_consumption_predictor
|
||||||
|
|
||||||
def _setup(self) -> None:
|
self.initialised = False
|
||||||
if self.battery and self.parameters.battery_id != self.battery.parameters.device_id:
|
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
||||||
error_msg = f"Battery ID mismatch - {self.parameters.battery_id} is configured; got {self.battery.parameters.device_id}."
|
if self.parameters is not None:
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self) -> None:
|
||||||
|
if self.initialised:
|
||||||
|
return
|
||||||
|
if self.provider_id is not None:
|
||||||
|
# Setup by configuration
|
||||||
|
self.max_power_wh = getattr(self.config, f"{self.prefix}_power_max")
|
||||||
|
elif self.parameters is not None:
|
||||||
|
# Setup by parameters
|
||||||
|
self.max_power_wh = (
|
||||||
|
self.parameters.max_power_wh # Maximum power that the inverter can handle
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
error_msg = "Parameters and provider ID missing. Can't instantiate."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
self.self_consumption_predictor = get_eos_load_interpolator()
|
|
||||||
self.max_power_wh = (
|
|
||||||
self.parameters.max_power_wh
|
|
||||||
) # Maximum power that the inverter can handle
|
|
||||||
|
|
||||||
def process_energy(
|
def process_energy(
|
||||||
self, generation: float, consumption: float, hour: int
|
self, generation: float, consumption: float, hour: int
|
||||||
@@ -43,7 +76,6 @@ class Inverter:
|
|||||||
grid_import = -remaining_power # Negative indicates feeding into the grid
|
grid_import = -remaining_power # Negative indicates feeding into the grid
|
||||||
self_consumption = self.max_power_wh
|
self_consumption = self.max_power_wh
|
||||||
else:
|
else:
|
||||||
# Calculate scr using cached results per energy management/optimization run
|
|
||||||
scr = self.self_consumption_predictor.calculate_self_consumption(
|
scr = self.self_consumption_predictor.calculate_self_consumption(
|
||||||
consumption, generation
|
consumption, generation
|
||||||
)
|
)
|
||||||
@@ -55,12 +87,11 @@ class Inverter:
|
|||||||
|
|
||||||
if remaining_load_evq > 0:
|
if remaining_load_evq > 0:
|
||||||
# Akku muss den Restverbrauch decken
|
# Akku muss den Restverbrauch decken
|
||||||
if self.battery:
|
from_battery, discharge_losses = self.battery.discharge_energy(
|
||||||
from_battery, discharge_losses = self.battery.discharge_energy(
|
remaining_load_evq, hour
|
||||||
remaining_load_evq, hour
|
)
|
||||||
)
|
remaining_load_evq -= from_battery # Restverbrauch nach Akkuentladung
|
||||||
remaining_load_evq -= from_battery # Restverbrauch nach Akkuentladung
|
losses += discharge_losses
|
||||||
losses += discharge_losses
|
|
||||||
|
|
||||||
# Wenn der Akku den Restverbrauch nicht vollständig decken kann, wird der Rest ins Netz gezogen
|
# Wenn der Akku den Restverbrauch nicht vollständig decken kann, wird der Rest ins Netz gezogen
|
||||||
if remaining_load_evq > 0:
|
if remaining_load_evq > 0:
|
||||||
@@ -71,13 +102,10 @@ class Inverter:
|
|||||||
|
|
||||||
if remaining_power > 0:
|
if remaining_power > 0:
|
||||||
# Load battery with excess energy
|
# Load battery with excess energy
|
||||||
if self.battery:
|
charged_energie, charge_losses = self.battery.charge_energy(
|
||||||
charged_energie, charge_losses = self.battery.charge_energy(
|
remaining_power, hour
|
||||||
remaining_power, hour
|
)
|
||||||
)
|
remaining_surplus = remaining_power - (charged_energie + charge_losses)
|
||||||
remaining_surplus = remaining_power - (charged_energie + charge_losses)
|
|
||||||
else:
|
|
||||||
remaining_surplus = remaining_power
|
|
||||||
|
|
||||||
# Feed-in to the grid based on remaining capacity
|
# Feed-in to the grid based on remaining capacity
|
||||||
if remaining_surplus > self.max_power_wh - consumption:
|
if remaining_surplus > self.max_power_wh - consumption:
|
||||||
@@ -97,13 +125,10 @@ class Inverter:
|
|||||||
available_ac_power = max(self.max_power_wh - generation, 0)
|
available_ac_power = max(self.max_power_wh - generation, 0)
|
||||||
|
|
||||||
# Discharge battery to cover shortfall, if possible
|
# Discharge battery to cover shortfall, if possible
|
||||||
if self.battery:
|
battery_discharge, discharge_losses = self.battery.discharge_energy(
|
||||||
battery_discharge, discharge_losses = self.battery.discharge_energy(
|
min(shortfall, available_ac_power), hour
|
||||||
min(shortfall, available_ac_power), hour
|
)
|
||||||
)
|
losses += discharge_losses
|
||||||
losses += discharge_losses
|
|
||||||
else:
|
|
||||||
battery_discharge = 0
|
|
||||||
|
|
||||||
# Draw remaining required power from the grid (discharge_losses are already substraved in the battery)
|
# Draw remaining required power from the grid (discharge_losses are already substraved in the battery)
|
||||||
grid_import = shortfall - battery_discharge
|
grid_import = shortfall - battery_discharge
|
||||||
@@ -6,77 +6,90 @@ data records for measurements.
|
|||||||
The measurements can be added programmatically or imported from a file or JSON string.
|
The measurements can be added programmatically or imported from a file or JSON string.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any, Optional
|
from typing import Any, ClassVar, List, Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from loguru import logger
|
|
||||||
from numpydantic import NDArray, Shape
|
from numpydantic import NDArray, Shape
|
||||||
|
from pendulum import DateTime, Duration
|
||||||
from pydantic import Field, computed_field
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.coreabc import SingletonMixin
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
from akkudoktoreos.core.dataabc import DataImportMixin, DataRecord, DataSequence
|
from akkudoktoreos.core.dataabc import DataImportMixin, DataRecord, DataSequence
|
||||||
from akkudoktoreos.utils.datetimeutil import DateTime, Duration, to_duration
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MeasurementCommonSettings(SettingsBaseModel):
|
class MeasurementCommonSettings(SettingsBaseModel):
|
||||||
"""Measurement Configuration."""
|
measurement_load0_name: Optional[str] = Field(
|
||||||
|
default=None, description="Name of the load0 source (e.g. 'Household', 'Heat Pump')"
|
||||||
load_emr_keys: Optional[list[str]] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "The keys of the measurements that are energy meter readings of a load [kWh].",
|
|
||||||
"examples": [["load0_emr"]],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
measurement_load1_name: Optional[str] = Field(
|
||||||
grid_export_emr_keys: Optional[list[str]] = Field(
|
default=None, description="Name of the load1 source (e.g. 'Household', 'Heat Pump')"
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "The keys of the measurements that are energy meter readings of energy export to grid [kWh].",
|
|
||||||
"examples": [["grid_export_emr"]],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
measurement_load2_name: Optional[str] = Field(
|
||||||
grid_import_emr_keys: Optional[list[str]] = Field(
|
default=None, description="Name of the load2 source (e.g. 'Household', 'Heat Pump')"
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "The keys of the measurements that are energy meter readings of energy import from grid [kWh].",
|
|
||||||
"examples": [["grid_import_emr"]],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
measurement_load3_name: Optional[str] = Field(
|
||||||
pv_production_emr_keys: Optional[list[str]] = Field(
|
default=None, description="Name of the load3 source (e.g. 'Household', 'Heat Pump')"
|
||||||
default=None,
|
)
|
||||||
json_schema_extra={
|
measurement_load4_name: Optional[str] = Field(
|
||||||
"description": "The keys of the measurements that are PV production energy meter readings [kWh].",
|
default=None, description="Name of the load4 source (e.g. 'Household', 'Heat Pump')"
|
||||||
"examples": [["pv1_emr"]],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
## Computed fields
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def keys(self) -> list[str]:
|
|
||||||
"""The keys of the measurements that can be stored."""
|
|
||||||
key_list = []
|
|
||||||
for key in self.__class__.model_fields.keys():
|
|
||||||
if key.endswith("_keys") and (value := getattr(self, key)):
|
|
||||||
key_list.extend(value)
|
|
||||||
return sorted(set(key_list))
|
|
||||||
|
|
||||||
|
|
||||||
class MeasurementDataRecord(DataRecord):
|
class MeasurementDataRecord(DataRecord):
|
||||||
"""Represents a measurement data record containing various measurements at a specific datetime."""
|
"""Represents a measurement data record containing various measurements at a specific datetime.
|
||||||
|
|
||||||
@classmethod
|
Attributes:
|
||||||
def configured_data_keys(cls) -> Optional[list[str]]:
|
date_time (Optional[DateTime]): The datetime of the record.
|
||||||
"""Return the keys for the configured field like data."""
|
"""
|
||||||
keys = cls.config.measurement.keys
|
|
||||||
# Add measurment keys that are needed/ handled by the resource/ device simulations.
|
# Single loads, to be aggregated to total load
|
||||||
if cls.config.devices.measurement_keys:
|
measurement_load0_mr: Optional[float] = Field(
|
||||||
keys.extend(cls.config.devices.measurement_keys)
|
default=None, ge=0, description="Load0 meter reading [kWh]"
|
||||||
return keys
|
)
|
||||||
|
measurement_load1_mr: Optional[float] = Field(
|
||||||
|
default=None, ge=0, description="Load1 meter reading [kWh]"
|
||||||
|
)
|
||||||
|
measurement_load2_mr: Optional[float] = Field(
|
||||||
|
default=None, ge=0, description="Load2 meter reading [kWh]"
|
||||||
|
)
|
||||||
|
measurement_load3_mr: Optional[float] = Field(
|
||||||
|
default=None, ge=0, description="Load3 meter reading [kWh]"
|
||||||
|
)
|
||||||
|
measurement_load4_mr: Optional[float] = Field(
|
||||||
|
default=None, ge=0, description="Load4 meter reading [kWh]"
|
||||||
|
)
|
||||||
|
|
||||||
|
measurement_max_loads: ClassVar[int] = 5 # Maximum number of loads that can be set
|
||||||
|
|
||||||
|
measurement_grid_export_mr: Optional[float] = Field(
|
||||||
|
default=None, ge=0, description="Export to grid meter reading [kWh]"
|
||||||
|
)
|
||||||
|
|
||||||
|
measurement_grid_import_mr: Optional[float] = Field(
|
||||||
|
default=None, ge=0, description="Import from grid meter reading [kWh]"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def measurement_loads(self) -> List[str]:
|
||||||
|
"""Compute a list of active loads."""
|
||||||
|
active_loads = []
|
||||||
|
|
||||||
|
# Loop through measurement_loadx
|
||||||
|
for i in range(self.measurement_max_loads):
|
||||||
|
load_attr = f"measurement_load{i}_mr"
|
||||||
|
|
||||||
|
# Check if either attribute is set and add to active loads
|
||||||
|
if getattr(self, load_attr, None):
|
||||||
|
active_loads.append(load_attr)
|
||||||
|
|
||||||
|
return active_loads
|
||||||
|
|
||||||
|
|
||||||
class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
||||||
@@ -85,14 +98,13 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
Measurements can be provided programmatically or read from JSON string or file.
|
Measurements can be provided programmatically or read from JSON string or file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
records: list[MeasurementDataRecord] = Field(
|
records: List[MeasurementDataRecord] = Field(
|
||||||
default_factory=list, json_schema_extra={"description": "list of measurement data records"}
|
default_factory=list, description="List of measurement data records"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
topics: ClassVar[List[str]] = [
|
||||||
if hasattr(self, "_initialized"):
|
"measurement_load",
|
||||||
return
|
]
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def _interval_count(
|
def _interval_count(
|
||||||
self, start_datetime: DateTime, end_datetime: DateTime, interval: Duration
|
self, start_datetime: DateTime, end_datetime: DateTime, interval: Duration
|
||||||
@@ -124,6 +136,29 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
# Return ceiling of division to include partial intervals
|
# Return ceiling of division to include partial intervals
|
||||||
return int(np.ceil(diff_seconds / interval_seconds))
|
return int(np.ceil(diff_seconds / interval_seconds))
|
||||||
|
|
||||||
|
def name_to_key(self, name: str, topic: str) -> Optional[str]:
|
||||||
|
"""Provides measurement key for given name and topic."""
|
||||||
|
topic = topic.lower()
|
||||||
|
|
||||||
|
if topic not in self.topics:
|
||||||
|
return None
|
||||||
|
|
||||||
|
topic_keys = [key for key in self.config.config_keys if key.startswith(topic)]
|
||||||
|
key = None
|
||||||
|
if topic == "measurement_load":
|
||||||
|
for config_key in topic_keys:
|
||||||
|
if config_key.endswith("_name") and getattr(self.config, config_key) == name:
|
||||||
|
key = topic + config_key[len(topic) : len(topic) + 1] + "_mr"
|
||||||
|
break
|
||||||
|
|
||||||
|
if key is not None and key not in self.record_keys:
|
||||||
|
# Should never happen
|
||||||
|
error_msg = f"Key '{key}' not available."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise KeyError(error_msg)
|
||||||
|
|
||||||
|
return key
|
||||||
|
|
||||||
def _energy_from_meter_readings(
|
def _energy_from_meter_readings(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
@@ -208,20 +243,17 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
end_datetime = self[-1].date_time
|
end_datetime = self[-1].date_time
|
||||||
size = self._interval_count(start_datetime, end_datetime, interval)
|
size = self._interval_count(start_datetime, end_datetime, interval)
|
||||||
load_total_array = np.zeros(size)
|
load_total_array = np.zeros(size)
|
||||||
# Loop through all loads
|
# Loop through measurement_load<x>_mr
|
||||||
if isinstance(self.config.measurement.load_emr_keys, list):
|
for i in range(self.record_class().measurement_max_loads):
|
||||||
for key in self.config.measurement.load_emr_keys:
|
key = f"measurement_load{i}_mr"
|
||||||
# Calculate load per interval
|
# Calculate load per interval
|
||||||
load_array = self._energy_from_meter_readings(
|
load_array = self._energy_from_meter_readings(
|
||||||
key=key,
|
key=key, start_datetime=start_datetime, end_datetime=end_datetime, interval=interval
|
||||||
start_datetime=start_datetime,
|
)
|
||||||
end_datetime=end_datetime,
|
# Add calculated load to total load
|
||||||
interval=interval,
|
load_total_array += load_array
|
||||||
)
|
debug_msg = f"Total load '{key}' calculation: {load_total_array}"
|
||||||
# Add calculated load to total load
|
logger.debug(debug_msg)
|
||||||
load_total_array += load_array
|
|
||||||
debug_msg = f"Total load '{key}' calculation: {load_total_array}"
|
|
||||||
logger.debug(debug_msg)
|
|
||||||
|
|
||||||
return load_total_array
|
return load_total_array
|
||||||
|
|
||||||
|
|||||||
683
src/akkudoktoreos/optimization/genetic.py
Normal file
683
src/akkudoktoreos/optimization/genetic.py
Normal file
@@ -0,0 +1,683 @@
|
|||||||
|
import logging
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from deap import algorithms, base, creator, tools
|
||||||
|
from pydantic import Field, field_validator, model_validator
|
||||||
|
from typing_extensions import Self
|
||||||
|
|
||||||
|
from akkudoktoreos.core.coreabc import (
|
||||||
|
ConfigMixin,
|
||||||
|
DevicesMixin,
|
||||||
|
EnergyManagementSystemMixin,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.core.ems import EnergieManagementSystemParameters, SimulationResult
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
||||||
|
from akkudoktoreos.devices.battery import (
|
||||||
|
Battery,
|
||||||
|
ElectricVehicleParameters,
|
||||||
|
ElectricVehicleResult,
|
||||||
|
SolarPanelBatteryParameters,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
||||||
|
from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
||||||
|
from akkudoktoreos.prediction.interpolator import SelfConsumptionProbabilityInterpolator
|
||||||
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class OptimizationParameters(ParametersBaseModel):
|
||||||
|
ems: EnergieManagementSystemParameters
|
||||||
|
pv_akku: Optional[SolarPanelBatteryParameters]
|
||||||
|
inverter: Optional[InverterParameters]
|
||||||
|
eauto: Optional[ElectricVehicleParameters]
|
||||||
|
dishwasher: Optional[HomeApplianceParameters] = None
|
||||||
|
temperature_forecast: Optional[list[Optional[float]]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the temperature forecast in degrees Celsius for different time intervals.",
|
||||||
|
)
|
||||||
|
start_solution: Optional[list[float]] = Field(
|
||||||
|
default=None, description="Can be `null` or contain a previous solution (if available)."
|
||||||
|
)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def validate_list_length(self) -> Self:
|
||||||
|
arr_length = len(self.ems.pv_prognose_wh)
|
||||||
|
if self.temperature_forecast is not None and arr_length != len(self.temperature_forecast):
|
||||||
|
raise ValueError("Input lists have different lengths")
|
||||||
|
return self
|
||||||
|
|
||||||
|
@field_validator("start_solution")
|
||||||
|
def validate_start_solution(
|
||||||
|
cls, start_solution: Optional[list[float]]
|
||||||
|
) -> Optional[list[float]]:
|
||||||
|
if start_solution is not None and len(start_solution) < 2:
|
||||||
|
raise ValueError("Requires at least two values.")
|
||||||
|
return start_solution
|
||||||
|
|
||||||
|
|
||||||
|
class OptimizeResponse(ParametersBaseModel):
|
||||||
|
"""**Note**: The first value of "Last_Wh_per_hour", "Netzeinspeisung_Wh_per_hour", and "Netzbezug_Wh_per_hour", will be set to null in the JSON output and represented as NaN or None in the corresponding classes' data returns. This approach is adopted to ensure that the current hour's processing remains unchanged."""
|
||||||
|
|
||||||
|
ac_charge: list[float] = Field(
|
||||||
|
description="Array with AC charging values as relative power (0-1), other values set to 0."
|
||||||
|
)
|
||||||
|
dc_charge: list[float] = Field(
|
||||||
|
description="Array with DC charging values as relative power (0-1), other values set to 0."
|
||||||
|
)
|
||||||
|
discharge_allowed: list[int] = Field(
|
||||||
|
description="Array with discharge values (1 for discharge, 0 otherwise)."
|
||||||
|
)
|
||||||
|
eautocharge_hours_float: Optional[list[float]] = Field(description="TBD")
|
||||||
|
result: SimulationResult
|
||||||
|
eauto_obj: Optional[ElectricVehicleResult]
|
||||||
|
start_solution: Optional[list[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of binary values (0 or 1) representing a possible starting solution for the simulation.",
|
||||||
|
)
|
||||||
|
washingstart: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Can be `null` or contain an object representing the start of washing (if applicable).",
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator(
|
||||||
|
"ac_charge",
|
||||||
|
"dc_charge",
|
||||||
|
"discharge_allowed",
|
||||||
|
mode="before",
|
||||||
|
)
|
||||||
|
def convert_numpy(cls, field: Any) -> Any:
|
||||||
|
return NumpyEncoder.convert_numpy(field)[0]
|
||||||
|
|
||||||
|
@field_validator(
|
||||||
|
"eauto_obj",
|
||||||
|
mode="before",
|
||||||
|
)
|
||||||
|
def convert_eauto(cls, field: Any) -> Any:
|
||||||
|
if isinstance(field, Battery):
|
||||||
|
return ElectricVehicleResult(**field.to_dict())
|
||||||
|
return field
|
||||||
|
|
||||||
|
|
||||||
|
class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixin):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
verbose: bool = False,
|
||||||
|
fixed_seed: Optional[int] = None,
|
||||||
|
):
|
||||||
|
"""Initialize the optimization problem with the required parameters."""
|
||||||
|
self.opti_param: dict[str, Any] = {}
|
||||||
|
self.fixed_eauto_hours = self.config.prediction_hours - self.config.optimization_hours
|
||||||
|
self.possible_charge_values = self.config.optimization_ev_available_charge_rates_percent
|
||||||
|
self.verbose = verbose
|
||||||
|
self.fix_seed = fixed_seed
|
||||||
|
self.optimize_ev = True
|
||||||
|
self.optimize_dc_charge = False
|
||||||
|
self.fitness_history: dict[str, Any] = {}
|
||||||
|
|
||||||
|
# Set a fixed seed for random operations if provided or in debug mode
|
||||||
|
if self.fix_seed is not None:
|
||||||
|
random.seed(self.fix_seed)
|
||||||
|
elif logger.level == logging.DEBUG:
|
||||||
|
self.fix_seed = random.randint(1, 100000000000)
|
||||||
|
random.seed(self.fix_seed)
|
||||||
|
|
||||||
|
def decode_charge_discharge(
|
||||||
|
self, discharge_hours_bin: np.ndarray
|
||||||
|
) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||||
|
"""Decode the input array into ac_charge, dc_charge, and discharge arrays."""
|
||||||
|
discharge_hours_bin_np = np.array(discharge_hours_bin)
|
||||||
|
len_ac = len(self.possible_charge_values)
|
||||||
|
|
||||||
|
# Categorization:
|
||||||
|
# Idle: 0 .. len_ac-1
|
||||||
|
# Discharge: len_ac .. 2*len_ac - 1
|
||||||
|
# AC Charge: 2*len_ac .. 3*len_ac - 1
|
||||||
|
# DC optional: 3*len_ac (not allowed), 3*len_ac + 1 (allowed)
|
||||||
|
|
||||||
|
# Idle has no charge, Discharge has binary 1, AC Charge has corresponding values
|
||||||
|
# Idle states
|
||||||
|
idle_mask = (discharge_hours_bin_np >= 0) & (discharge_hours_bin_np < len_ac)
|
||||||
|
|
||||||
|
# Discharge states
|
||||||
|
discharge_mask = (discharge_hours_bin_np >= len_ac) & (discharge_hours_bin_np < 2 * len_ac)
|
||||||
|
|
||||||
|
# AC states
|
||||||
|
ac_mask = (discharge_hours_bin_np >= 2 * len_ac) & (discharge_hours_bin_np < 3 * len_ac)
|
||||||
|
ac_indices = (discharge_hours_bin_np[ac_mask] - 2 * len_ac).astype(int)
|
||||||
|
|
||||||
|
# DC states (if enabled)
|
||||||
|
if self.optimize_dc_charge:
|
||||||
|
dc_not_allowed_state = 3 * len_ac
|
||||||
|
dc_allowed_state = 3 * len_ac + 1
|
||||||
|
dc_charge = np.where(discharge_hours_bin_np == dc_allowed_state, 1, 0)
|
||||||
|
else:
|
||||||
|
dc_charge = np.ones_like(discharge_hours_bin_np, dtype=float)
|
||||||
|
|
||||||
|
# Generate the result arrays
|
||||||
|
discharge = np.zeros_like(discharge_hours_bin_np, dtype=int)
|
||||||
|
discharge[discharge_mask] = 1 # Set Discharge states to 1
|
||||||
|
|
||||||
|
ac_charge = np.zeros_like(discharge_hours_bin_np, dtype=float)
|
||||||
|
ac_charge[ac_mask] = [self.possible_charge_values[i] for i in ac_indices]
|
||||||
|
|
||||||
|
# Idle is just 0, already default.
|
||||||
|
|
||||||
|
return ac_charge, dc_charge, discharge
|
||||||
|
|
||||||
|
def mutate(self, individual: list[int]) -> tuple[list[int]]:
|
||||||
|
"""Custom mutation function for the individual."""
|
||||||
|
# Calculate the number of states
|
||||||
|
len_ac = len(self.possible_charge_values)
|
||||||
|
if self.optimize_dc_charge:
|
||||||
|
total_states = 3 * len_ac + 2
|
||||||
|
else:
|
||||||
|
total_states = 3 * len_ac
|
||||||
|
|
||||||
|
# 1. Mutating the charge_discharge part
|
||||||
|
charge_discharge_part = individual[: self.config.prediction_hours]
|
||||||
|
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
||||||
|
|
||||||
|
# Instead of a fixed clamping to 0..8 or 0..6 dynamically:
|
||||||
|
charge_discharge_mutated = np.clip(charge_discharge_mutated, 0, total_states - 1)
|
||||||
|
individual[: self.config.prediction_hours] = charge_discharge_mutated
|
||||||
|
|
||||||
|
# 2. Mutating the EV charge part, if active
|
||||||
|
if self.optimize_ev:
|
||||||
|
ev_charge_part = individual[
|
||||||
|
self.config.prediction_hours : self.config.prediction_hours * 2
|
||||||
|
]
|
||||||
|
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
||||||
|
ev_charge_part_mutated[self.config.prediction_hours - self.fixed_eauto_hours :] = [
|
||||||
|
0
|
||||||
|
] * self.fixed_eauto_hours
|
||||||
|
individual[self.config.prediction_hours : self.config.prediction_hours * 2] = (
|
||||||
|
ev_charge_part_mutated
|
||||||
|
)
|
||||||
|
|
||||||
|
# 3. Mutating the appliance start time, if applicable
|
||||||
|
if self.opti_param["home_appliance"] > 0:
|
||||||
|
appliance_part = [individual[-1]]
|
||||||
|
(appliance_part_mutated,) = self.toolbox.mutate_hour(appliance_part)
|
||||||
|
individual[-1] = appliance_part_mutated[0]
|
||||||
|
|
||||||
|
return (individual,)
|
||||||
|
|
||||||
|
# Method to create an individual based on the conditions
|
||||||
|
def create_individual(self) -> list[int]:
|
||||||
|
# Start with discharge states for the individual
|
||||||
|
individual_components = [
|
||||||
|
self.toolbox.attr_discharge_state() for _ in range(self.config.prediction_hours)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add EV charge index values if optimize_ev is True
|
||||||
|
if self.optimize_ev:
|
||||||
|
individual_components += [
|
||||||
|
self.toolbox.attr_ev_charge_index() for _ in range(self.config.prediction_hours)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add the start time of the household appliance if it's being optimized
|
||||||
|
if self.opti_param["home_appliance"] > 0:
|
||||||
|
individual_components += [self.toolbox.attr_int()]
|
||||||
|
|
||||||
|
return creator.Individual(individual_components)
|
||||||
|
|
||||||
|
def merge_individual(
|
||||||
|
self,
|
||||||
|
discharge_hours_bin: np.ndarray,
|
||||||
|
eautocharge_hours_index: Optional[np.ndarray],
|
||||||
|
washingstart_int: Optional[int],
|
||||||
|
) -> list[int]:
|
||||||
|
"""Merge the individual components back into a single solution list.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
discharge_hours_bin (np.ndarray): Binary discharge hours.
|
||||||
|
eautocharge_hours_index (Optional[np.ndarray]): EV charge hours as integers, or None.
|
||||||
|
washingstart_int (Optional[int]): Dishwasher start time as integer, or None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[int]: The merged individual solution as a list of integers.
|
||||||
|
"""
|
||||||
|
# Start with the discharge hours
|
||||||
|
individual = discharge_hours_bin.tolist()
|
||||||
|
|
||||||
|
# Add EV charge hours if applicable
|
||||||
|
if self.optimize_ev and eautocharge_hours_index is not None:
|
||||||
|
individual.extend(eautocharge_hours_index.tolist())
|
||||||
|
elif self.optimize_ev:
|
||||||
|
# Falls optimize_ev aktiv ist, aber keine EV-Daten vorhanden sind, fügen wir Nullen hinzu
|
||||||
|
individual.extend([0] * self.config.prediction_hours)
|
||||||
|
|
||||||
|
# Add dishwasher start time if applicable
|
||||||
|
if self.opti_param.get("home_appliance", 0) > 0 and washingstart_int is not None:
|
||||||
|
individual.append(washingstart_int)
|
||||||
|
elif self.opti_param.get("home_appliance", 0) > 0:
|
||||||
|
# Falls ein Haushaltsgerät optimiert wird, aber kein Startzeitpunkt vorhanden ist
|
||||||
|
individual.append(0)
|
||||||
|
|
||||||
|
return individual
|
||||||
|
|
||||||
|
def split_individual(
|
||||||
|
self, individual: list[int]
|
||||||
|
) -> tuple[np.ndarray, Optional[np.ndarray], Optional[int]]:
|
||||||
|
"""Split the individual solution into its components.
|
||||||
|
|
||||||
|
Components:
|
||||||
|
1. Discharge hours (binary as int NumPy array),
|
||||||
|
2. Electric vehicle charge hours (float as int NumPy array, if applicable),
|
||||||
|
3. Dishwasher start time (integer if applicable).
|
||||||
|
"""
|
||||||
|
# Discharge hours as a NumPy array of ints
|
||||||
|
discharge_hours_bin = np.array(individual[: self.config.prediction_hours], dtype=int)
|
||||||
|
|
||||||
|
# EV charge hours as a NumPy array of ints (if optimize_ev is True)
|
||||||
|
eautocharge_hours_index = (
|
||||||
|
np.array(
|
||||||
|
individual[self.config.prediction_hours : self.config.prediction_hours * 2],
|
||||||
|
dtype=int,
|
||||||
|
)
|
||||||
|
if self.optimize_ev
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Washing machine start time as an integer (if applicable)
|
||||||
|
washingstart_int = (
|
||||||
|
int(individual[-1])
|
||||||
|
if self.opti_param and self.opti_param.get("home_appliance", 0) > 0
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
return discharge_hours_bin, eautocharge_hours_index, washingstart_int
|
||||||
|
|
||||||
|
def setup_deap_environment(self, opti_param: dict[str, Any], start_hour: int) -> None:
|
||||||
|
"""Set up the DEAP environment with fitness and individual creation rules."""
|
||||||
|
self.opti_param = opti_param
|
||||||
|
|
||||||
|
# Remove existing definitions if any
|
||||||
|
for attr in ["FitnessMin", "Individual"]:
|
||||||
|
if attr in creator.__dict__:
|
||||||
|
del creator.__dict__[attr]
|
||||||
|
|
||||||
|
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
|
||||||
|
creator.create("Individual", list, fitness=creator.FitnessMin)
|
||||||
|
|
||||||
|
self.toolbox = base.Toolbox()
|
||||||
|
len_ac = len(self.possible_charge_values)
|
||||||
|
|
||||||
|
# Total number of states without DC:
|
||||||
|
# Idle: len_ac states
|
||||||
|
# Discharge: len_ac states
|
||||||
|
# AC-Charge: len_ac states
|
||||||
|
# Total without DC: 3 * len_ac
|
||||||
|
|
||||||
|
# With DC: + 2 states
|
||||||
|
if self.optimize_dc_charge:
|
||||||
|
total_states = 3 * len_ac + 2
|
||||||
|
else:
|
||||||
|
total_states = 3 * len_ac
|
||||||
|
|
||||||
|
# State space: 0 .. (total_states - 1)
|
||||||
|
self.toolbox.register("attr_discharge_state", random.randint, 0, total_states - 1)
|
||||||
|
|
||||||
|
# EV attributes
|
||||||
|
if self.optimize_ev:
|
||||||
|
self.toolbox.register(
|
||||||
|
"attr_ev_charge_index",
|
||||||
|
random.randint,
|
||||||
|
0,
|
||||||
|
len_ac - 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Household appliance start time
|
||||||
|
self.toolbox.register("attr_int", random.randint, start_hour, 23)
|
||||||
|
|
||||||
|
self.toolbox.register("individual", self.create_individual)
|
||||||
|
self.toolbox.register("population", tools.initRepeat, list, self.toolbox.individual)
|
||||||
|
self.toolbox.register("mate", tools.cxTwoPoint)
|
||||||
|
|
||||||
|
# Mutation operator for charge/discharge states
|
||||||
|
self.toolbox.register(
|
||||||
|
"mutate_charge_discharge", tools.mutUniformInt, low=0, up=total_states - 1, indpb=0.2
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mutation operator for EV states
|
||||||
|
self.toolbox.register(
|
||||||
|
"mutate_ev_charge_index",
|
||||||
|
tools.mutUniformInt,
|
||||||
|
low=0,
|
||||||
|
up=len_ac - 1,
|
||||||
|
indpb=0.2,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mutation for household appliance
|
||||||
|
self.toolbox.register("mutate_hour", tools.mutUniformInt, low=start_hour, up=23, indpb=0.2)
|
||||||
|
|
||||||
|
# Custom mutate function remains unchanged
|
||||||
|
self.toolbox.register("mutate", self.mutate)
|
||||||
|
self.toolbox.register("select", tools.selTournament, tournsize=3)
|
||||||
|
|
||||||
|
def evaluate_inner(self, individual: list[int]) -> dict[str, Any]:
|
||||||
|
"""Simulates the energy management system (EMS) using the provided individual solution.
|
||||||
|
|
||||||
|
This is an internal function.
|
||||||
|
"""
|
||||||
|
self.ems.reset()
|
||||||
|
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
|
||||||
|
individual
|
||||||
|
)
|
||||||
|
if self.opti_param.get("home_appliance", 0) > 0:
|
||||||
|
self.ems.set_home_appliance_start(
|
||||||
|
washingstart_int, global_start_hour=self.ems.start_datetime.hour
|
||||||
|
)
|
||||||
|
|
||||||
|
ac, dc, discharge = self.decode_charge_discharge(discharge_hours_bin)
|
||||||
|
|
||||||
|
self.ems.set_akku_discharge_hours(discharge)
|
||||||
|
# Set DC charge hours only if DC optimization is enabled
|
||||||
|
if self.optimize_dc_charge:
|
||||||
|
self.ems.set_akku_dc_charge_hours(dc)
|
||||||
|
self.ems.set_akku_ac_charge_hours(ac)
|
||||||
|
|
||||||
|
if eautocharge_hours_index is not None:
|
||||||
|
eautocharge_hours_float = np.array(
|
||||||
|
[self.possible_charge_values[i] for i in eautocharge_hours_index],
|
||||||
|
float,
|
||||||
|
)
|
||||||
|
self.ems.set_ev_charge_hours(eautocharge_hours_float)
|
||||||
|
else:
|
||||||
|
self.ems.set_ev_charge_hours(np.full(self.config.prediction_hours, 0))
|
||||||
|
|
||||||
|
return self.ems.simulate(self.ems.start_datetime.hour)
|
||||||
|
|
||||||
|
def evaluate(
|
||||||
|
self,
|
||||||
|
individual: list[int],
|
||||||
|
parameters: OptimizationParameters,
|
||||||
|
start_hour: int,
|
||||||
|
worst_case: bool,
|
||||||
|
) -> tuple[float]:
|
||||||
|
"""Evaluate the fitness of an individual solution based on the simulation results."""
|
||||||
|
try:
|
||||||
|
o = self.evaluate_inner(individual)
|
||||||
|
except Exception as e:
|
||||||
|
return (100000.0,) # Return a high penalty in case of an exception
|
||||||
|
|
||||||
|
gesamtbilanz = o["Gesamtbilanz_Euro"] * (-1.0 if worst_case else 1.0)
|
||||||
|
|
||||||
|
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
|
||||||
|
individual
|
||||||
|
)
|
||||||
|
|
||||||
|
# EV 100% & charge not allowed
|
||||||
|
if self.optimize_ev:
|
||||||
|
eauto_soc_per_hour = np.array(o.get("EAuto_SoC_pro_Stunde", [])) # Beispielkey
|
||||||
|
|
||||||
|
if eauto_soc_per_hour is None or eautocharge_hours_index is None:
|
||||||
|
raise ValueError("eauto_soc_per_hour or eautocharge_hours_index is None")
|
||||||
|
min_length = min(eauto_soc_per_hour.size, eautocharge_hours_index.size)
|
||||||
|
eauto_soc_per_hour_tail = eauto_soc_per_hour[-min_length:]
|
||||||
|
eautocharge_hours_index_tail = eautocharge_hours_index[-min_length:]
|
||||||
|
|
||||||
|
# Mask
|
||||||
|
invalid_charge_mask = (eauto_soc_per_hour_tail == 100) & (
|
||||||
|
eautocharge_hours_index_tail > 0
|
||||||
|
)
|
||||||
|
|
||||||
|
if np.any(invalid_charge_mask):
|
||||||
|
invalid_indices = np.where(invalid_charge_mask)[0]
|
||||||
|
if len(invalid_indices) > 1:
|
||||||
|
eautocharge_hours_index_tail[invalid_indices[1:]] = 0
|
||||||
|
|
||||||
|
eautocharge_hours_index[-min_length:] = eautocharge_hours_index_tail.tolist()
|
||||||
|
|
||||||
|
adjusted_individual = self.merge_individual(
|
||||||
|
discharge_hours_bin, eautocharge_hours_index, washingstart_int
|
||||||
|
)
|
||||||
|
|
||||||
|
individual[:] = adjusted_individual
|
||||||
|
|
||||||
|
# New check: Activate discharge when battery SoC is 0
|
||||||
|
# battery_soc_per_hour = np.array(
|
||||||
|
# o.get("akku_soc_pro_stunde", [])
|
||||||
|
# ) # Example key for battery SoC
|
||||||
|
|
||||||
|
# if battery_soc_per_hour is not None:
|
||||||
|
# if battery_soc_per_hour is None or discharge_hours_bin is None:
|
||||||
|
# raise ValueError("battery_soc_per_hour or discharge_hours_bin is None")
|
||||||
|
# min_length = min(battery_soc_per_hour.size, discharge_hours_bin.size)
|
||||||
|
# battery_soc_per_hour_tail = battery_soc_per_hour[-min_length:]
|
||||||
|
# discharge_hours_bin_tail = discharge_hours_bin[-min_length:]
|
||||||
|
# len_ac = len(self.config.optimization_ev_available_charge_rates_percent)
|
||||||
|
|
||||||
|
# # # Find hours where battery SoC is 0
|
||||||
|
# # zero_soc_mask = battery_soc_per_hour_tail == 0
|
||||||
|
# # discharge_hours_bin_tail[zero_soc_mask] = (
|
||||||
|
# # len_ac + 2
|
||||||
|
# # ) # Activate discharge for these hours
|
||||||
|
|
||||||
|
# # When Battery SoC then set the Discharge randomly to 0 or 1. otherwise it's very unlikely to get a state where a battery can store energy for a longer time
|
||||||
|
# # Find hours where battery SoC is 0
|
||||||
|
# zero_soc_mask = battery_soc_per_hour_tail == 0
|
||||||
|
# # discharge_hours_bin_tail[zero_soc_mask] = (
|
||||||
|
# # len_ac + 2
|
||||||
|
# # ) # Activate discharge for these hours
|
||||||
|
# set_to_len_ac_plus_2 = np.random.rand() < 0.5 # True mit 50% Wahrscheinlichkeit
|
||||||
|
|
||||||
|
# # Werte setzen basierend auf der zufälligen Entscheidung
|
||||||
|
# value_to_set = len_ac + 2 if set_to_len_ac_plus_2 else 0
|
||||||
|
# discharge_hours_bin_tail[zero_soc_mask] = value_to_set
|
||||||
|
|
||||||
|
# # Merge the updated discharge_hours_bin back into the individual
|
||||||
|
# adjusted_individual = self.merge_individual(
|
||||||
|
# discharge_hours_bin, eautocharge_hours_index, washingstart_int
|
||||||
|
# )
|
||||||
|
# individual[:] = adjusted_individual
|
||||||
|
|
||||||
|
# More metrics
|
||||||
|
individual.extra_data = ( # type: ignore[attr-defined]
|
||||||
|
o["Gesamtbilanz_Euro"],
|
||||||
|
o["Gesamt_Verluste"],
|
||||||
|
parameters.eauto.min_soc_percentage - self.ems.ev.current_soc_percentage()
|
||||||
|
if parameters.eauto and self.ems.ev
|
||||||
|
else 0,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Adjust total balance with battery value and penalties for unmet SOC
|
||||||
|
restwert_akku = (
|
||||||
|
self.ems.battery.current_energy_content() * parameters.ems.preis_euro_pro_wh_akku
|
||||||
|
)
|
||||||
|
gesamtbilanz += -restwert_akku
|
||||||
|
|
||||||
|
if self.optimize_ev:
|
||||||
|
gesamtbilanz += max(
|
||||||
|
0,
|
||||||
|
(
|
||||||
|
parameters.eauto.min_soc_percentage - self.ems.ev.current_soc_percentage()
|
||||||
|
if parameters.eauto and self.ems.ev
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
* self.config.optimization_penalty,
|
||||||
|
)
|
||||||
|
|
||||||
|
return (gesamtbilanz,)
|
||||||
|
|
||||||
|
def optimize(
|
||||||
|
self, start_solution: Optional[list[float]] = None, ngen: int = 200
|
||||||
|
) -> tuple[Any, dict[str, list[Any]]]:
|
||||||
|
"""Run the optimization process using a genetic algorithm."""
|
||||||
|
population = self.toolbox.population(n=300)
|
||||||
|
hof = tools.HallOfFame(1)
|
||||||
|
stats = tools.Statistics(lambda ind: ind.fitness.values)
|
||||||
|
stats.register("min", np.min)
|
||||||
|
stats.register("avg", np.mean)
|
||||||
|
stats.register("max", np.max)
|
||||||
|
|
||||||
|
if self.verbose:
|
||||||
|
print("Start optimize:", start_solution)
|
||||||
|
|
||||||
|
# Insert the start solution into the population if provided
|
||||||
|
if start_solution is not None:
|
||||||
|
for _ in range(10):
|
||||||
|
population.insert(0, creator.Individual(start_solution))
|
||||||
|
|
||||||
|
# Run the evolutionary algorithm
|
||||||
|
pop, log = algorithms.eaMuPlusLambda(
|
||||||
|
population,
|
||||||
|
self.toolbox,
|
||||||
|
mu=100,
|
||||||
|
lambda_=150,
|
||||||
|
cxpb=0.6,
|
||||||
|
mutpb=0.4,
|
||||||
|
ngen=ngen,
|
||||||
|
stats=stats,
|
||||||
|
halloffame=hof,
|
||||||
|
verbose=self.verbose,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store fitness history
|
||||||
|
self.fitness_history = {
|
||||||
|
"gen": log.select("gen"), # Generation numbers (X-axis)
|
||||||
|
"avg": log.select("avg"), # Average fitness for each generation (Y-axis)
|
||||||
|
"max": log.select("max"), # Maximum fitness for each generation (Y-axis)
|
||||||
|
"min": log.select("min"), # Minimum fitness for each generation (Y-axis)
|
||||||
|
}
|
||||||
|
|
||||||
|
member: dict[str, list[float]] = {"bilanz": [], "verluste": [], "nebenbedingung": []}
|
||||||
|
for ind in population:
|
||||||
|
if hasattr(ind, "extra_data"):
|
||||||
|
extra_value1, extra_value2, extra_value3 = ind.extra_data
|
||||||
|
member["bilanz"].append(extra_value1)
|
||||||
|
member["verluste"].append(extra_value2)
|
||||||
|
member["nebenbedingung"].append(extra_value3)
|
||||||
|
|
||||||
|
return hof[0], member
|
||||||
|
|
||||||
|
def optimierung_ems(
|
||||||
|
self,
|
||||||
|
parameters: OptimizationParameters,
|
||||||
|
start_hour: Optional[int] = None,
|
||||||
|
worst_case: bool = False,
|
||||||
|
ngen: int = 400,
|
||||||
|
) -> OptimizeResponse:
|
||||||
|
"""Perform EMS (Energy Management System) optimization and visualize results."""
|
||||||
|
if start_hour is None:
|
||||||
|
start_hour = self.ems.start_datetime.hour
|
||||||
|
|
||||||
|
einspeiseverguetung_euro_pro_wh = np.full(
|
||||||
|
self.config.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
||||||
|
)
|
||||||
|
|
||||||
|
# 1h Load to Sub 1h Load Distribution -> SelfConsumptionRate
|
||||||
|
sc = SelfConsumptionProbabilityInterpolator(
|
||||||
|
Path(__file__).parent.resolve() / ".." / "data" / "regular_grid_interpolator.pkl"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize PV and EV batteries
|
||||||
|
akku: Optional[Battery] = None
|
||||||
|
if parameters.pv_akku:
|
||||||
|
akku = Battery(
|
||||||
|
parameters.pv_akku,
|
||||||
|
hours=self.config.prediction_hours,
|
||||||
|
)
|
||||||
|
akku.set_charge_per_hour(np.full(self.config.prediction_hours, 1))
|
||||||
|
|
||||||
|
eauto: Optional[Battery] = None
|
||||||
|
if parameters.eauto:
|
||||||
|
eauto = Battery(
|
||||||
|
parameters.eauto,
|
||||||
|
hours=self.config.prediction_hours,
|
||||||
|
)
|
||||||
|
eauto.set_charge_per_hour(np.full(self.config.prediction_hours, 1))
|
||||||
|
self.optimize_ev = (
|
||||||
|
parameters.eauto.min_soc_percentage - parameters.eauto.initial_soc_percentage >= 0
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.optimize_ev = False
|
||||||
|
|
||||||
|
# Initialize household appliance if applicable
|
||||||
|
dishwasher = (
|
||||||
|
HomeAppliance(
|
||||||
|
parameters=parameters.dishwasher,
|
||||||
|
hours=self.config.prediction_hours,
|
||||||
|
)
|
||||||
|
if parameters.dishwasher is not None
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize the inverter and energy management system
|
||||||
|
inverter: Optional[Inverter] = None
|
||||||
|
if parameters.inverter:
|
||||||
|
inverter = Inverter(
|
||||||
|
sc,
|
||||||
|
parameters.inverter,
|
||||||
|
akku,
|
||||||
|
)
|
||||||
|
self.ems.set_parameters(
|
||||||
|
parameters.ems,
|
||||||
|
inverter=inverter,
|
||||||
|
ev=eauto,
|
||||||
|
home_appliance=dishwasher,
|
||||||
|
)
|
||||||
|
self.ems.set_start_hour(start_hour)
|
||||||
|
|
||||||
|
# Setup the DEAP environment and optimization process
|
||||||
|
self.setup_deap_environment({"home_appliance": 1 if dishwasher else 0}, start_hour)
|
||||||
|
self.toolbox.register(
|
||||||
|
"evaluate",
|
||||||
|
lambda ind: self.evaluate(ind, parameters, start_hour, worst_case),
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.verbose:
|
||||||
|
start_time = time.time()
|
||||||
|
start_solution, extra_data = self.optimize(parameters.start_solution, ngen=ngen)
|
||||||
|
|
||||||
|
if self.verbose:
|
||||||
|
elapsed_time = time.time() - start_time
|
||||||
|
print(f"Time evaluate inner: {elapsed_time:.4f} sec.")
|
||||||
|
# Perform final evaluation on the best solution
|
||||||
|
o = self.evaluate_inner(start_solution)
|
||||||
|
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
|
||||||
|
start_solution
|
||||||
|
)
|
||||||
|
eautocharge_hours_float = (
|
||||||
|
[self.possible_charge_values[i] for i in eautocharge_hours_index]
|
||||||
|
if eautocharge_hours_index is not None
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
ac_charge, dc_charge, discharge = self.decode_charge_discharge(discharge_hours_bin)
|
||||||
|
# Visualize the results
|
||||||
|
visualize = {
|
||||||
|
"ac_charge": ac_charge.tolist(),
|
||||||
|
"dc_charge": dc_charge.tolist(),
|
||||||
|
"discharge_allowed": discharge.tolist(),
|
||||||
|
"eautocharge_hours_float": eautocharge_hours_float,
|
||||||
|
"result": o,
|
||||||
|
"eauto_obj": self.ems.ev.to_dict(),
|
||||||
|
"start_solution": start_solution,
|
||||||
|
"spuelstart": washingstart_int,
|
||||||
|
"extra_data": extra_data,
|
||||||
|
"fitness_history": self.fitness_history,
|
||||||
|
"fixed_seed": self.fix_seed,
|
||||||
|
}
|
||||||
|
from akkudoktoreos.utils.visualize import prepare_visualize
|
||||||
|
|
||||||
|
prepare_visualize(parameters, visualize, start_hour=start_hour)
|
||||||
|
|
||||||
|
return OptimizeResponse(
|
||||||
|
**{
|
||||||
|
"ac_charge": ac_charge,
|
||||||
|
"dc_charge": dc_charge,
|
||||||
|
"discharge_allowed": discharge,
|
||||||
|
"eautocharge_hours_float": eautocharge_hours_float,
|
||||||
|
"result": SimulationResult(**o),
|
||||||
|
"eauto_obj": self.ems.ev,
|
||||||
|
"start_solution": start_solution,
|
||||||
|
"washingstart": washingstart_int,
|
||||||
|
}
|
||||||
|
)
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +0,0 @@
|
|||||||
"""Genetic optimization algorithm abstract and base classes."""
|
|
||||||
|
|
||||||
from pydantic import ConfigDict
|
|
||||||
|
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class GeneticParametersBaseModel(PydanticBaseModel):
|
|
||||||
"""Pydantic base model for parameters for the GENETIC algorithm."""
|
|
||||||
|
|
||||||
model_config = ConfigDict(extra="forbid")
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
"""Genetic optimization algorithm device interfaces/ parameters."""
|
|
||||||
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import Field
|
|
||||||
|
|
||||||
from akkudoktoreos.optimization.genetic.geneticabc import GeneticParametersBaseModel
|
|
||||||
from akkudoktoreos.utils.datetimeutil import TimeWindowSequence
|
|
||||||
|
|
||||||
|
|
||||||
class DeviceParameters(GeneticParametersBaseModel):
|
|
||||||
device_id: str = Field(json_schema_extra={"description": "ID of device", "examples": "device1"})
|
|
||||||
hours: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Number of prediction hours. Defaults to global config prediction hours.",
|
|
||||||
"examples": [None],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def max_charging_power_field(description: Optional[str] = None) -> float:
|
|
||||||
if description is None:
|
|
||||||
description = "Maximum charging power in watts."
|
|
||||||
return Field(default=5000, gt=0, json_schema_extra={"description": description})
|
|
||||||
|
|
||||||
|
|
||||||
def initial_soc_percentage_field(description: str) -> int:
|
|
||||||
return Field(
|
|
||||||
default=0, ge=0, le=100, json_schema_extra={"description": description, "examples": [42]}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def discharging_efficiency_field(default_value: float) -> float:
|
|
||||||
return Field(
|
|
||||||
default=default_value,
|
|
||||||
gt=0,
|
|
||||||
le=1,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "A float representing the discharge efficiency of the battery."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseBatteryParameters(DeviceParameters):
|
|
||||||
"""Battery Device Simulation Configuration."""
|
|
||||||
|
|
||||||
device_id: str = Field(
|
|
||||||
json_schema_extra={"description": "ID of battery", "examples": ["battery1"]}
|
|
||||||
)
|
|
||||||
capacity_wh: int = Field(
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An integer representing the capacity of the battery in watt-hours.",
|
|
||||||
"examples": [8000],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
charging_efficiency: float = Field(
|
|
||||||
default=0.88,
|
|
||||||
gt=0,
|
|
||||||
le=1,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "A float representing the charging efficiency of the battery."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
discharging_efficiency: float = discharging_efficiency_field(0.88)
|
|
||||||
max_charge_power_w: Optional[float] = max_charging_power_field()
|
|
||||||
initial_soc_percentage: int = initial_soc_percentage_field(
|
|
||||||
"An integer representing the state of charge of the battery at the **start** of the current hour (not the current state)."
|
|
||||||
)
|
|
||||||
min_soc_percentage: int = Field(
|
|
||||||
default=0,
|
|
||||||
ge=0,
|
|
||||||
le=100,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An integer representing the minimum state of charge (SOC) of the battery in percentage.",
|
|
||||||
"examples": [10],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
max_soc_percentage: int = Field(
|
|
||||||
default=100,
|
|
||||||
ge=0,
|
|
||||||
le=100,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An integer representing the maximum state of charge (SOC) of the battery in percentage."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
charge_rates: Optional[list[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Charge rates as factor of maximum charging power [0.00 ... 1.00]. None denotes all charge rates are available.",
|
|
||||||
"examples": [[0.0, 0.25, 0.5, 0.75, 1.0], None],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SolarPanelBatteryParameters(BaseBatteryParameters):
|
|
||||||
"""PV battery device simulation configuration."""
|
|
||||||
|
|
||||||
max_charge_power_w: Optional[float] = max_charging_power_field()
|
|
||||||
|
|
||||||
|
|
||||||
class ElectricVehicleParameters(BaseBatteryParameters):
|
|
||||||
"""Battery Electric Vehicle Device Simulation Configuration."""
|
|
||||||
|
|
||||||
device_id: str = Field(
|
|
||||||
json_schema_extra={"description": "ID of electric vehicle", "examples": ["ev1"]}
|
|
||||||
)
|
|
||||||
discharging_efficiency: float = discharging_efficiency_field(1.0)
|
|
||||||
initial_soc_percentage: int = initial_soc_percentage_field(
|
|
||||||
"An integer representing the current state of charge (SOC) of the battery in percentage."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HomeApplianceParameters(DeviceParameters):
|
|
||||||
"""Home Appliance Device Simulation Configuration."""
|
|
||||||
|
|
||||||
device_id: str = Field(
|
|
||||||
json_schema_extra={"description": "ID of home appliance", "examples": ["dishwasher"]}
|
|
||||||
)
|
|
||||||
consumption_wh: int = Field(
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An integer representing the energy consumption of a household device in watt-hours.",
|
|
||||||
"examples": [2000],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
duration_h: int = Field(
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An integer representing the usage duration of a household device in hours.",
|
|
||||||
"examples": [3],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
time_windows: Optional[TimeWindowSequence] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "List of allowed time windows. Defaults to optimization general time window.",
|
|
||||||
"examples": [
|
|
||||||
[
|
|
||||||
{"start_time": "10:00", "duration": "2 hours"},
|
|
||||||
],
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InverterParameters(DeviceParameters):
|
|
||||||
"""Inverter Device Simulation Configuration."""
|
|
||||||
|
|
||||||
device_id: str = Field(
|
|
||||||
json_schema_extra={"description": "ID of inverter", "examples": ["inverter1"]}
|
|
||||||
)
|
|
||||||
max_power_wh: float = Field(gt=0, json_schema_extra={"examples": [10000]})
|
|
||||||
battery_id: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={"description": "ID of battery", "examples": [None, "battery1"]},
|
|
||||||
)
|
|
||||||
@@ -1,653 +0,0 @@
|
|||||||
"""GENETIC algorithm paramters.
|
|
||||||
|
|
||||||
This module defines the Pydantic-based configuration and input parameter models
|
|
||||||
used in the energy optimization routines, including photovoltaic forecasts,
|
|
||||||
electricity pricing, and system component parameters.
|
|
||||||
|
|
||||||
It also provides a method to assemble these parameters from predictions,
|
|
||||||
forecasts, and fallback defaults, preparing them for optimization runs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Optional, Union
|
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
from pydantic import Field, field_validator, model_validator
|
|
||||||
from typing_extensions import Self
|
|
||||||
|
|
||||||
from akkudoktoreos.core.coreabc import (
|
|
||||||
ConfigMixin,
|
|
||||||
MeasurementMixin,
|
|
||||||
PredictionMixin,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.optimization.genetic.geneticabc import GeneticParametersBaseModel
|
|
||||||
from akkudoktoreos.optimization.genetic.geneticdevices import (
|
|
||||||
ElectricVehicleParameters,
|
|
||||||
HomeApplianceParameters,
|
|
||||||
InverterParameters,
|
|
||||||
SolarPanelBatteryParameters,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_duration
|
|
||||||
|
|
||||||
# Do not import directly from akkudoktoreos.core.coreabc
|
|
||||||
# EnergyManagementSystemMixin - Creates circular dependency with ems.py
|
|
||||||
# StartMixin - Creates circular dependency with ems.py
|
|
||||||
|
|
||||||
|
|
||||||
class GeneticEnergyManagementParameters(GeneticParametersBaseModel):
|
|
||||||
"""Encapsulates energy-related forecasts and costs used in GENETIC optimization."""
|
|
||||||
|
|
||||||
pv_prognose_wh: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An array of floats representing the forecasted photovoltaic output in watts for different time intervals."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
strompreis_euro_pro_wh: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An array of floats representing the electricity price in euros per watt-hour for different time intervals."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
einspeiseverguetung_euro_pro_wh: Union[list[float], float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "A float or array of floats representing the feed-in compensation in euros per watt-hour."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
preis_euro_pro_wh_akku: float = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "A float representing the cost of battery energy per watt-hour."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
gesamtlast: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An array of floats representing the total load (consumption) in watts for different time intervals."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
@model_validator(mode="after")
|
|
||||||
def validate_list_length(self) -> Self:
|
|
||||||
"""Validate that all input lists are of the same length.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If input list lengths differ.
|
|
||||||
"""
|
|
||||||
pv_prognose_length = len(self.pv_prognose_wh)
|
|
||||||
if (
|
|
||||||
pv_prognose_length != len(self.strompreis_euro_pro_wh)
|
|
||||||
or pv_prognose_length != len(self.gesamtlast)
|
|
||||||
or (
|
|
||||||
isinstance(self.einspeiseverguetung_euro_pro_wh, list)
|
|
||||||
and pv_prognose_length != len(self.einspeiseverguetung_euro_pro_wh)
|
|
||||||
)
|
|
||||||
):
|
|
||||||
raise ValueError("Input lists have different lengths")
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class GeneticOptimizationParameters(
|
|
||||||
ConfigMixin,
|
|
||||||
MeasurementMixin,
|
|
||||||
PredictionMixin,
|
|
||||||
# EnergyManagementSystemMixin, # Creates circular dependency with ems.py
|
|
||||||
# StartMixin, # Creates circular dependency with ems.py
|
|
||||||
GeneticParametersBaseModel,
|
|
||||||
):
|
|
||||||
"""Main parameter class for running the genetic energy optimization.
|
|
||||||
|
|
||||||
Collects all model and configuration parameters necessary to run the
|
|
||||||
optimization process, such as forecasts, pricing, battery and appliance models.
|
|
||||||
"""
|
|
||||||
|
|
||||||
ems: GeneticEnergyManagementParameters
|
|
||||||
pv_akku: Optional[SolarPanelBatteryParameters]
|
|
||||||
inverter: Optional[InverterParameters]
|
|
||||||
eauto: Optional[ElectricVehicleParameters]
|
|
||||||
dishwasher: Optional[HomeApplianceParameters] = None
|
|
||||||
temperature_forecast: Optional[list[Optional[float]]] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An array of floats representing the temperature forecast in degrees Celsius for different time intervals."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
start_solution: Optional[list[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Can be `null` or contain a previous solution (if available)."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@model_validator(mode="after")
|
|
||||||
def validate_list_length(self) -> Self:
|
|
||||||
"""Ensure that temperature forecast list matches the PV forecast length.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If list lengths mismatch.
|
|
||||||
"""
|
|
||||||
arr_length = len(self.ems.pv_prognose_wh)
|
|
||||||
if self.temperature_forecast is not None and arr_length != len(self.temperature_forecast):
|
|
||||||
raise ValueError("Input lists have different lengths")
|
|
||||||
return self
|
|
||||||
|
|
||||||
@field_validator("start_solution")
|
|
||||||
def validate_start_solution(
|
|
||||||
cls, start_solution: Optional[list[float]]
|
|
||||||
) -> Optional[list[float]]:
|
|
||||||
"""Validate that the starting solution has at least two elements.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_solution (list[float]): Optional list of solution values.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[float]: Validated list.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the solution is too short.
|
|
||||||
"""
|
|
||||||
if start_solution is not None and len(start_solution) < 2:
|
|
||||||
raise ValueError("Requires at least two values.")
|
|
||||||
return start_solution
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def prepare(cls) -> "Optional[GeneticOptimizationParameters]":
|
|
||||||
"""Prepare optimization parameters from config, forecast and measurement data.
|
|
||||||
|
|
||||||
Fills in values needed for optimization from available configuration, predictions and
|
|
||||||
measurements. If some data is missing, default or demo values are used.
|
|
||||||
|
|
||||||
Parameters start by definition of the genetic algorithm at hour 0 of the actual date
|
|
||||||
(not at start datetime of energy management run)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
GeneticOptimizationParameters: The fully prepared optimization parameters.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If required configuration values like start time are missing.
|
|
||||||
"""
|
|
||||||
# Avoid circular dependency
|
|
||||||
from akkudoktoreos.core.ems import get_ems
|
|
||||||
|
|
||||||
ems = get_ems()
|
|
||||||
|
|
||||||
# The optimization paramters
|
|
||||||
oparams: "Optional[GeneticOptimizationParameters]" = None
|
|
||||||
|
|
||||||
# Check for run definitions
|
|
||||||
if ems.start_datetime is None:
|
|
||||||
error_msg = "Start datetime unknown."
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
# Check for general predictions conditions
|
|
||||||
if cls.config.general.latitude is None:
|
|
||||||
default_latitude = 52.52
|
|
||||||
logger.error(f"Latitude unknown - defaulting to {default_latitude}.")
|
|
||||||
cls.config.general.latitude = default_latitude
|
|
||||||
if cls.config.general.longitude is None:
|
|
||||||
default_longitude = 13.405
|
|
||||||
logger.error(f"Longitude unknown - defaulting to {default_longitude}.")
|
|
||||||
cls.config.general.longitude = default_longitude
|
|
||||||
if cls.config.prediction.hours is None:
|
|
||||||
logger.error("Prediction hours unknown - defaulting to 48 hours.")
|
|
||||||
cls.config.prediction.hours = 48
|
|
||||||
if cls.config.prediction.historic_hours is None:
|
|
||||||
logger.error("Prediction historic hours unknown - defaulting to 24 hours.")
|
|
||||||
cls.config.prediction.historic_hours = 24
|
|
||||||
# Check optimization definitions
|
|
||||||
if cls.config.optimization.horizon_hours is None:
|
|
||||||
logger.error("Optimization horizon unknown - defaulting to 24 hours.")
|
|
||||||
cls.config.optimization.horizon_hours = 24
|
|
||||||
if cls.config.optimization.interval is None:
|
|
||||||
logger.error("Optimization interval unknown - defaulting to 3600 seconds.")
|
|
||||||
cls.config.optimization.interval = 3600
|
|
||||||
if cls.config.optimization.interval != 3600:
|
|
||||||
logger.error(
|
|
||||||
"Optimization interval '{}' seconds not supported - forced to 3600 seconds."
|
|
||||||
)
|
|
||||||
cls.config.optimization.interval = 3600
|
|
||||||
# Check genetic algorithm definitions
|
|
||||||
if cls.config.optimization.genetic is None:
|
|
||||||
logger.error(
|
|
||||||
"Genetic optimization configuration not configured - defaulting to demo config."
|
|
||||||
)
|
|
||||||
cls.config.optimization.genetic = {
|
|
||||||
"individuals": 300,
|
|
||||||
"generations": 400,
|
|
||||||
"seed": None,
|
|
||||||
"penalties": {
|
|
||||||
"ev_soc_miss": 10,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
if cls.config.optimization.genetic.individuals is None:
|
|
||||||
logger.error("Genetic individuals unknown - defaulting to 300.")
|
|
||||||
cls.config.optimization.genetic.individuals = 300
|
|
||||||
if cls.config.optimization.genetic.generations is None:
|
|
||||||
logger.error("Genetic generations unknown - defaulting to 400.")
|
|
||||||
cls.config.optimization.genetic.generations = 400
|
|
||||||
if cls.config.optimization.genetic.penalties is None:
|
|
||||||
logger.error("Genetic penalties unknown - defaulting to demo config.")
|
|
||||||
cls.config.optimization.genetic.penalties = {"ev_soc_miss": 10}
|
|
||||||
if "ev_soc_miss" not in cls.config.optimization.genetic.penalties:
|
|
||||||
logger.error("ev_soc_miss penalty function parameter unknown - defaulting to 100.")
|
|
||||||
cls.config.optimization.genetic.penalties["ev_soc_miss"] = 10
|
|
||||||
|
|
||||||
# Get start solution from last run
|
|
||||||
start_solution = None
|
|
||||||
last_solution = ems.genetic_solution()
|
|
||||||
if last_solution and last_solution.start_solution:
|
|
||||||
start_solution = last_solution.start_solution
|
|
||||||
|
|
||||||
# Add forecast and device data
|
|
||||||
interval = to_duration(cls.config.optimization.interval)
|
|
||||||
power_to_energy_per_interval_factor = cls.config.optimization.interval / 3600
|
|
||||||
parameter_start_datetime = ems.start_datetime.set(hour=0, second=0, microsecond=0)
|
|
||||||
parameter_end_datetime = parameter_start_datetime.add(hours=cls.config.prediction.hours)
|
|
||||||
max_retries = 10
|
|
||||||
|
|
||||||
for attempt in range(1, max_retries + 1):
|
|
||||||
# Collect all the data for optimisation, but do not exceed max retries
|
|
||||||
if attempt > max_retries:
|
|
||||||
error_msg = f"Maximum retries {max_retries} for parameter collection exceeded. Parameter preparation attempt {attempt}."
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
# Assure predictions are uptodate
|
|
||||||
cls.prediction.update_data()
|
|
||||||
|
|
||||||
try:
|
|
||||||
pvforecast_ac_power = (
|
|
||||||
cls.prediction.key_to_array(
|
|
||||||
key="pvforecast_ac_power",
|
|
||||||
start_datetime=parameter_start_datetime,
|
|
||||||
end_datetime=parameter_end_datetime,
|
|
||||||
interval=interval,
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
* power_to_energy_per_interval_factor
|
|
||||||
).tolist()
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No PV forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.merge_settings_from_dict(
|
|
||||||
{
|
|
||||||
"pvforecast": {
|
|
||||||
"provider": "PVForecastAkkudoktor",
|
|
||||||
"planes": [
|
|
||||||
{
|
|
||||||
"peakpower": 5.0,
|
|
||||||
"surface_azimuth": 170,
|
|
||||||
"surface_tilt": 7,
|
|
||||||
"userhorizon": [20, 27, 22, 20],
|
|
||||||
"inverter_paco": 10000,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"peakpower": 4.8,
|
|
||||||
"surface_azimuth": 90,
|
|
||||||
"surface_tilt": 7,
|
|
||||||
"userhorizon": [30, 30, 30, 50],
|
|
||||||
"inverter_paco": 10000,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"peakpower": 1.4,
|
|
||||||
"surface_azimuth": 140,
|
|
||||||
"surface_tilt": 60,
|
|
||||||
"userhorizon": [60, 30, 0, 30],
|
|
||||||
"inverter_paco": 2000,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"peakpower": 1.6,
|
|
||||||
"surface_azimuth": 185,
|
|
||||||
"surface_tilt": 45,
|
|
||||||
"userhorizon": [45, 25, 30, 60],
|
|
||||||
"inverter_paco": 1400,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
elecprice_marketprice_wh = cls.prediction.key_to_array(
|
|
||||||
key="elecprice_marketprice_wh",
|
|
||||||
start_datetime=parameter_start_datetime,
|
|
||||||
end_datetime=parameter_end_datetime,
|
|
||||||
interval=interval,
|
|
||||||
fill_method="ffill",
|
|
||||||
).tolist()
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No Electricity Marketprice forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.elecprice.provider = "ElecPriceAkkudoktor"
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
loadforecast_power_w = cls.prediction.key_to_array(
|
|
||||||
key="loadforecast_power_w",
|
|
||||||
start_datetime=parameter_start_datetime,
|
|
||||||
end_datetime=parameter_end_datetime,
|
|
||||||
interval=interval,
|
|
||||||
fill_method="ffill",
|
|
||||||
).tolist()
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No Load forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.merge_settings_from_dict(
|
|
||||||
{
|
|
||||||
"load": {
|
|
||||||
"provider": "LoadAkkudoktor",
|
|
||||||
"provider_settings": {
|
|
||||||
"LoadAkkudoktor": {
|
|
||||||
"loadakkudoktor_year_energy_kwh": "3000",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
feed_in_tariff_wh = cls.prediction.key_to_array(
|
|
||||||
key="feed_in_tariff_wh",
|
|
||||||
start_datetime=parameter_start_datetime,
|
|
||||||
end_datetime=parameter_end_datetime,
|
|
||||||
interval=interval,
|
|
||||||
fill_method="ffill",
|
|
||||||
).tolist()
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No feed in tariff forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.merge_settings_from_dict(
|
|
||||||
{
|
|
||||||
"feedintariff": {
|
|
||||||
"provider": "FeedInTariffFixed",
|
|
||||||
"provider_settings": {
|
|
||||||
"FeedInTariffFixed": {
|
|
||||||
"feed_in_tariff_kwh": 0.078,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
weather_temp_air = cls.prediction.key_to_array(
|
|
||||||
key="weather_temp_air",
|
|
||||||
start_datetime=parameter_start_datetime,
|
|
||||||
end_datetime=parameter_end_datetime,
|
|
||||||
interval=interval,
|
|
||||||
fill_method="ffill",
|
|
||||||
).tolist()
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No weather forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.weather.provider = "BrightSky"
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Add device data
|
|
||||||
|
|
||||||
# Batteries
|
|
||||||
# ---------
|
|
||||||
if cls.config.devices.max_batteries is None:
|
|
||||||
logger.error("Number of battery devices not configured - defaulting to 1.")
|
|
||||||
cls.config.devices.max_batteries = 1
|
|
||||||
if cls.config.devices.max_batteries == 0:
|
|
||||||
battery_params = None
|
|
||||||
battery_lcos_kwh = 0
|
|
||||||
else:
|
|
||||||
if cls.config.devices.batteries is None:
|
|
||||||
logger.error("No battery device data available - defaulting to demo data.")
|
|
||||||
cls.config.devices.batteries = [{"device_id": "battery1", "capacity_wh": 8000}]
|
|
||||||
try:
|
|
||||||
battery_config = cls.config.devices.batteries[0]
|
|
||||||
battery_params = SolarPanelBatteryParameters(
|
|
||||||
device_id=battery_config.device_id,
|
|
||||||
capacity_wh=battery_config.capacity_wh,
|
|
||||||
charging_efficiency=battery_config.charging_efficiency,
|
|
||||||
discharging_efficiency=battery_config.discharging_efficiency,
|
|
||||||
max_charge_power_w=battery_config.max_charge_power_w,
|
|
||||||
min_soc_percentage=battery_config.min_soc_percentage,
|
|
||||||
max_soc_percentage=battery_config.max_soc_percentage,
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No battery device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.devices.batteries = [{"device_id": "battery1", "capacity_wh": 8000}]
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
# Levelized cost of ownership
|
|
||||||
if battery_config.levelized_cost_of_storage_kwh is None:
|
|
||||||
logger.error(
|
|
||||||
"No battery device LCOS data available - defaulting to 0 €/kWh. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
battery_config.levelized_cost_of_storage_kwh = 0
|
|
||||||
battery_lcos_kwh = battery_config.levelized_cost_of_storage_kwh
|
|
||||||
# Initial SOC
|
|
||||||
try:
|
|
||||||
initial_soc_factor = cls.measurement.key_to_value(
|
|
||||||
key=battery_config.measurement_key_soc_factor,
|
|
||||||
target_datetime=ems.start_datetime,
|
|
||||||
)
|
|
||||||
if initial_soc_factor > 1.0 or initial_soc_factor < 0.0:
|
|
||||||
logger.error(
|
|
||||||
f"Invalid battery initial SoC factor {initial_soc_factor} - defaulting to 0.0."
|
|
||||||
)
|
|
||||||
initial_soc_factor = 0.0
|
|
||||||
# genetic parameter is 0..100 as int
|
|
||||||
initial_soc_percentage = int(initial_soc_factor * 100)
|
|
||||||
except:
|
|
||||||
initial_soc_percentage = None
|
|
||||||
if initial_soc_percentage is None:
|
|
||||||
logger.error(
|
|
||||||
f"No battery device SoC data (measurement key = '{battery_config.measurement_key_soc_factor}') available - defaulting to 0."
|
|
||||||
)
|
|
||||||
initial_soc_percentage = 0
|
|
||||||
battery_params.initial_soc_percentage = initial_soc_percentage
|
|
||||||
|
|
||||||
# Electric Vehicles
|
|
||||||
# -----------------
|
|
||||||
if cls.config.devices.max_electric_vehicles is None:
|
|
||||||
logger.error("Number of electric_vehicle devices not configured - defaulting to 1.")
|
|
||||||
cls.config.devices.max_electric_vehicles = 1
|
|
||||||
if cls.config.devices.max_electric_vehicles == 0:
|
|
||||||
electric_vehicle_params = None
|
|
||||||
else:
|
|
||||||
if cls.config.devices.electric_vehicles is None:
|
|
||||||
logger.error(
|
|
||||||
"No electric vehicle device data available - defaulting to demo data."
|
|
||||||
)
|
|
||||||
cls.config.devices.max_electric_vehicles = 1
|
|
||||||
cls.config.devices.electric_vehicles = [
|
|
||||||
{
|
|
||||||
"device_id": "ev11",
|
|
||||||
"capacity_wh": 50000,
|
|
||||||
"charge_rates": [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
|
|
||||||
"min_soc_percentage": 70,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
electric_vehicle_config = cls.config.devices.electric_vehicles[0]
|
|
||||||
electric_vehicle_params = ElectricVehicleParameters(
|
|
||||||
device_id=electric_vehicle_config.device_id,
|
|
||||||
capacity_wh=electric_vehicle_config.capacity_wh,
|
|
||||||
charging_efficiency=electric_vehicle_config.charging_efficiency,
|
|
||||||
discharging_efficiency=electric_vehicle_config.discharging_efficiency,
|
|
||||||
charge_rates=electric_vehicle_config.charge_rates,
|
|
||||||
max_charge_power_w=electric_vehicle_config.max_charge_power_w,
|
|
||||||
min_soc_percentage=electric_vehicle_config.min_soc_percentage,
|
|
||||||
max_soc_percentage=electric_vehicle_config.max_soc_percentage,
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No electric_vehicle device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.devices.max_electric_vehicles = 1
|
|
||||||
cls.config.devices.electric_vehicles = [
|
|
||||||
{
|
|
||||||
"device_id": "ev12",
|
|
||||||
"capacity_wh": 50000,
|
|
||||||
"charge_rates": [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
|
|
||||||
"min_soc_percentage": 70,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
# Initial SOC
|
|
||||||
try:
|
|
||||||
initial_soc_factor = cls.measurement.key_to_value(
|
|
||||||
key=electric_vehicle_config.measurement_key_soc_factor,
|
|
||||||
target_datetime=ems.start_datetime,
|
|
||||||
)
|
|
||||||
if initial_soc_factor > 1.0 or initial_soc_factor < 0.0:
|
|
||||||
logger.error(
|
|
||||||
f"Invalid electric vehicle initial SoC factor {initial_soc_factor} - defaulting to 0.0."
|
|
||||||
)
|
|
||||||
initial_soc_factor = 0.0
|
|
||||||
# genetic parameter is 0..100 as int
|
|
||||||
initial_soc_percentage = int(initial_soc_factor * 100)
|
|
||||||
except:
|
|
||||||
initial_soc_percentage = None
|
|
||||||
if initial_soc_percentage is None:
|
|
||||||
logger.error(
|
|
||||||
f"No electric vehicle device SoC data (measurement key = '{electric_vehicle_config.measurement_key_soc_factor}') available - defaulting to 0."
|
|
||||||
)
|
|
||||||
initial_soc_percentage = 0
|
|
||||||
electric_vehicle_params.initial_soc_percentage = initial_soc_percentage
|
|
||||||
|
|
||||||
# Inverters
|
|
||||||
# ---------
|
|
||||||
if cls.config.devices.max_inverters is None:
|
|
||||||
logger.error("Number of inverter devices not configured - defaulting to 1.")
|
|
||||||
cls.config.devices.max_inverters = 1
|
|
||||||
if cls.config.devices.max_inverters == 0:
|
|
||||||
inverter_params = None
|
|
||||||
else:
|
|
||||||
if cls.config.devices.inverters is None:
|
|
||||||
logger.error("No inverter device data available - defaulting to demo data.")
|
|
||||||
cls.config.devices.inverters = [
|
|
||||||
{
|
|
||||||
"device_id": "inverter1",
|
|
||||||
"max_power_w": 10000,
|
|
||||||
"battery_id": battery_config.device_id,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
inverter_config = cls.config.devices.inverters[0]
|
|
||||||
inverter_params = InverterParameters(
|
|
||||||
device_id=inverter_config.device_id,
|
|
||||||
max_power_wh=inverter_config.max_power_w,
|
|
||||||
battery_id=inverter_config.battery_id,
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No inverter device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.devices.inverters = [
|
|
||||||
{
|
|
||||||
"device_id": "inverter1",
|
|
||||||
"max_power_w": 10000,
|
|
||||||
"battery_id": battery_config.device_id,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Home Appliances
|
|
||||||
# ---------------
|
|
||||||
if cls.config.devices.max_home_appliances is None:
|
|
||||||
logger.error("Number of home appliance devices not configured - defaulting to 1.")
|
|
||||||
cls.config.devices.max_home_appliances = 1
|
|
||||||
if cls.config.devices.max_home_appliances == 0:
|
|
||||||
home_appliance_params = None
|
|
||||||
else:
|
|
||||||
home_appliance_params = None
|
|
||||||
if cls.config.devices.home_appliances is None:
|
|
||||||
logger.error(
|
|
||||||
"No home appliance device data available - defaulting to demo data."
|
|
||||||
)
|
|
||||||
cls.config.devices.home_appliances = [
|
|
||||||
{
|
|
||||||
"device_id": "dishwasher1",
|
|
||||||
"consumption_wh": 2000,
|
|
||||||
"duration_h": 3.0,
|
|
||||||
"time_windows": {
|
|
||||||
"windows": [
|
|
||||||
{
|
|
||||||
"start_time": "08:00",
|
|
||||||
"duration": "5 hours",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"start_time": "15:00",
|
|
||||||
"duration": "3 hours",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
home_appliance_config = cls.config.devices.home_appliances[0]
|
|
||||||
home_appliance_params = HomeApplianceParameters(
|
|
||||||
device_id=home_appliance_config.device_id,
|
|
||||||
consumption_wh=home_appliance_config.consumption_wh,
|
|
||||||
duration_h=home_appliance_config.duration_h,
|
|
||||||
time_windows=home_appliance_config.time_windows,
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"No home appliance device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
cls.config.devices.home_appliances = [
|
|
||||||
{
|
|
||||||
"device_id": "dishwasher1",
|
|
||||||
"consumption_wh": 2000,
|
|
||||||
"duration_h": 3.0,
|
|
||||||
"time_windows": None,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
|
|
||||||
# We got all parameter data
|
|
||||||
try:
|
|
||||||
oparams = GeneticOptimizationParameters(
|
|
||||||
ems=GeneticEnergyManagementParameters(
|
|
||||||
pv_prognose_wh=pvforecast_ac_power,
|
|
||||||
strompreis_euro_pro_wh=elecprice_marketprice_wh,
|
|
||||||
einspeiseverguetung_euro_pro_wh=feed_in_tariff_wh,
|
|
||||||
gesamtlast=loadforecast_power_w,
|
|
||||||
preis_euro_pro_wh_akku=battery_lcos_kwh / 1000,
|
|
||||||
),
|
|
||||||
temperature_forecast=weather_temp_air,
|
|
||||||
pv_akku=battery_params,
|
|
||||||
eauto=electric_vehicle_params,
|
|
||||||
inverter=inverter_params,
|
|
||||||
dishwasher=home_appliance_params,
|
|
||||||
start_solution=start_solution,
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
logger.exception(
|
|
||||||
"Can not prepare optimization parameters - will retry. Parameter preparation attempt {}.",
|
|
||||||
attempt,
|
|
||||||
)
|
|
||||||
oparams = None
|
|
||||||
# Retry
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Parameters prepared
|
|
||||||
break
|
|
||||||
|
|
||||||
return oparams
|
|
||||||
@@ -1,652 +0,0 @@
|
|||||||
"""Genetic algorithm optimisation solution."""
|
|
||||||
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
import pandas as pd
|
|
||||||
from loguru import logger
|
|
||||||
from pydantic import Field, field_validator
|
|
||||||
|
|
||||||
from akkudoktoreos.core.coreabc import (
|
|
||||||
ConfigMixin,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.core.emplan import (
|
|
||||||
DDBCInstruction,
|
|
||||||
EnergyManagementPlan,
|
|
||||||
FRBCInstruction,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.core.pydantic import PydanticDateTimeDataFrame
|
|
||||||
from akkudoktoreos.devices.devicesabc import (
|
|
||||||
ApplianceOperationMode,
|
|
||||||
BatteryOperationMode,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.devices.genetic.battery import Battery
|
|
||||||
from akkudoktoreos.optimization.genetic.geneticdevices import GeneticParametersBaseModel
|
|
||||||
from akkudoktoreos.optimization.optimization import OptimizationSolution
|
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
|
||||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
|
||||||
|
|
||||||
|
|
||||||
class DeviceOptimizeResult(GeneticParametersBaseModel):
|
|
||||||
device_id: str = Field(
|
|
||||||
json_schema_extra={"description": "ID of device", "examples": ["device1"]}
|
|
||||||
)
|
|
||||||
hours: int = Field(
|
|
||||||
gt=0,
|
|
||||||
json_schema_extra={"description": "Number of hours in the simulation.", "examples": [24]},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ElectricVehicleResult(DeviceOptimizeResult):
|
|
||||||
"""Result class containing information related to the electric vehicle's charging and discharging behavior."""
|
|
||||||
|
|
||||||
device_id: str = Field(
|
|
||||||
json_schema_extra={"description": "ID of electric vehicle", "examples": ["ev1"]}
|
|
||||||
)
|
|
||||||
charge_array: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Hourly charging status (0 for no charging, 1 for charging)."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
discharge_array: list[int] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Hourly discharging status (0 for no discharging, 1 for discharging)."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
discharging_efficiency: float = Field(
|
|
||||||
json_schema_extra={"description": "The discharge efficiency as a float.."}
|
|
||||||
)
|
|
||||||
capacity_wh: int = Field(
|
|
||||||
json_schema_extra={"description": "Capacity of the EV’s battery in watt-hours."}
|
|
||||||
)
|
|
||||||
charging_efficiency: float = Field(
|
|
||||||
json_schema_extra={"description": "Charging efficiency as a float.."}
|
|
||||||
)
|
|
||||||
max_charge_power_w: int = Field(
|
|
||||||
json_schema_extra={"description": "Maximum charging power in watts."}
|
|
||||||
)
|
|
||||||
soc_wh: float = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "State of charge of the battery in watt-hours at the start of the simulation."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
initial_soc_percentage: int = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "State of charge at the start of the simulation in percentage."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
@field_validator("discharge_array", "charge_array", mode="before")
|
|
||||||
def convert_numpy(cls, field: Any) -> Any:
|
|
||||||
return NumpyEncoder.convert_numpy(field)[0]
|
|
||||||
|
|
||||||
|
|
||||||
class GeneticSimulationResult(GeneticParametersBaseModel):
|
|
||||||
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
|
|
||||||
|
|
||||||
Last_Wh_pro_Stunde: list[float] = Field(json_schema_extra={"description": "TBD"})
|
|
||||||
EAuto_SoC_pro_Stunde: list[float] = Field(
|
|
||||||
json_schema_extra={"description": "The state of charge of the EV for each hour."}
|
|
||||||
)
|
|
||||||
Einnahmen_Euro_pro_Stunde: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "The revenue from grid feed-in or other sources in euros per hour."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
Gesamt_Verluste: float = Field(
|
|
||||||
json_schema_extra={"description": "The total losses in watt-hours over the entire period."}
|
|
||||||
)
|
|
||||||
Gesamtbilanz_Euro: float = Field(
|
|
||||||
json_schema_extra={"description": "The total balance of revenues minus costs in euros."}
|
|
||||||
)
|
|
||||||
Gesamteinnahmen_Euro: float = Field(
|
|
||||||
json_schema_extra={"description": "The total revenues in euros."}
|
|
||||||
)
|
|
||||||
Gesamtkosten_Euro: float = Field(json_schema_extra={"description": "The total costs in euros."})
|
|
||||||
Home_appliance_wh_per_hour: list[Optional[float]] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "The energy consumption of a household appliance in watt-hours per hour."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
Kosten_Euro_pro_Stunde: list[float] = Field(
|
|
||||||
json_schema_extra={"description": "The costs in euros per hour."}
|
|
||||||
)
|
|
||||||
Netzbezug_Wh_pro_Stunde: list[float] = Field(
|
|
||||||
json_schema_extra={"description": "The grid energy drawn in watt-hours per hour."}
|
|
||||||
)
|
|
||||||
Netzeinspeisung_Wh_pro_Stunde: list[float] = Field(
|
|
||||||
json_schema_extra={"description": "The energy fed into the grid in watt-hours per hour."}
|
|
||||||
)
|
|
||||||
Verluste_Pro_Stunde: list[float] = Field(
|
|
||||||
json_schema_extra={"description": "The losses in watt-hours per hour."}
|
|
||||||
)
|
|
||||||
akku_soc_pro_stunde: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "The state of charge of the battery (not the EV) in percentage per hour."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
Electricity_price: list[float] = Field(
|
|
||||||
json_schema_extra={"description": "Used Electricity Price, including predictions"}
|
|
||||||
)
|
|
||||||
|
|
||||||
@field_validator(
|
|
||||||
"Last_Wh_pro_Stunde",
|
|
||||||
"Netzeinspeisung_Wh_pro_Stunde",
|
|
||||||
"akku_soc_pro_stunde",
|
|
||||||
"Netzbezug_Wh_pro_Stunde",
|
|
||||||
"Kosten_Euro_pro_Stunde",
|
|
||||||
"Einnahmen_Euro_pro_Stunde",
|
|
||||||
"EAuto_SoC_pro_Stunde",
|
|
||||||
"Verluste_Pro_Stunde",
|
|
||||||
"Home_appliance_wh_per_hour",
|
|
||||||
"Electricity_price",
|
|
||||||
mode="before",
|
|
||||||
)
|
|
||||||
def convert_numpy(cls, field: Any) -> Any:
|
|
||||||
return NumpyEncoder.convert_numpy(field)[0]
|
|
||||||
|
|
||||||
|
|
||||||
class GeneticSolution(ConfigMixin, GeneticParametersBaseModel):
|
|
||||||
"""**Note**: The first value of "Last_Wh_per_hour", "Netzeinspeisung_Wh_per_hour", and "Netzbezug_Wh_per_hour", will be set to null in the JSON output and represented as NaN or None in the corresponding classes' data returns. This approach is adopted to ensure that the current hour's processing remains unchanged."""
|
|
||||||
|
|
||||||
ac_charge: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Array with AC charging values as relative power (0.0-1.0), other values set to 0."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
dc_charge: list[float] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Array with DC charging values as relative power (0-1), other values set to 0."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
discharge_allowed: list[int] = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Array with discharge values (1 for discharge, 0 otherwise)."
|
|
||||||
}
|
|
||||||
)
|
|
||||||
eautocharge_hours_float: Optional[list[float]] = Field(json_schema_extra={"description": "TBD"})
|
|
||||||
result: GeneticSimulationResult
|
|
||||||
eauto_obj: Optional[ElectricVehicleResult]
|
|
||||||
start_solution: Optional[list[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "An array of binary values (0 or 1) representing a possible starting solution for the simulation."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
washingstart: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Can be `null` or contain an object representing the start of washing (if applicable)."
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@field_validator(
|
|
||||||
"ac_charge",
|
|
||||||
"dc_charge",
|
|
||||||
"discharge_allowed",
|
|
||||||
mode="before",
|
|
||||||
)
|
|
||||||
def convert_numpy(cls, field: Any) -> Any:
|
|
||||||
return NumpyEncoder.convert_numpy(field)[0]
|
|
||||||
|
|
||||||
@field_validator(
|
|
||||||
"eauto_obj",
|
|
||||||
mode="before",
|
|
||||||
)
|
|
||||||
def convert_eauto(cls, field: Any) -> Any:
|
|
||||||
if isinstance(field, Battery):
|
|
||||||
return ElectricVehicleResult(**field.to_dict())
|
|
||||||
return field
|
|
||||||
|
|
||||||
def _battery_operation_from_solution(
|
|
||||||
self,
|
|
||||||
ac_charge: float,
|
|
||||||
dc_charge: float,
|
|
||||||
discharge_allowed: bool,
|
|
||||||
) -> tuple[BatteryOperationMode, float]:
|
|
||||||
"""Maps low-level solution to a representative operation mode and factor.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ac_charge (float): Allowed AC-side charging power (relative units).
|
|
||||||
dc_charge (float): Allowed DC-side charging power (relative units).
|
|
||||||
discharge_allowed (bool): Whether discharging is permitted.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[BatteryOperationMode, float]:
|
|
||||||
A tuple containing:
|
|
||||||
- `BatteryOperationMode`: the representative high-level operation mode.
|
|
||||||
- `float`: the operation factor corresponding to the active signal.
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
- The mapping prioritizes AC charge > DC charge > discharge.
|
|
||||||
- Multiple strategies can produce the same low-level signals; this function
|
|
||||||
returns a representative mode based on a defined priority order.
|
|
||||||
"""
|
|
||||||
# (0,0,0) → Nothing allowed
|
|
||||||
if ac_charge <= 0.0 and dc_charge <= 0.0 and not discharge_allowed:
|
|
||||||
return BatteryOperationMode.IDLE, 1.0
|
|
||||||
|
|
||||||
# (0,0,1) → Discharge only
|
|
||||||
if ac_charge <= 0.0 and dc_charge <= 0.0 and discharge_allowed:
|
|
||||||
return BatteryOperationMode.PEAK_SHAVING, 1.0
|
|
||||||
|
|
||||||
# (ac>0,0,0) → AC charge only
|
|
||||||
if ac_charge > 0.0 and dc_charge <= 0.0 and not discharge_allowed:
|
|
||||||
return BatteryOperationMode.GRID_SUPPORT_IMPORT, ac_charge
|
|
||||||
|
|
||||||
# (0,dc>0,0) → DC charge only
|
|
||||||
if ac_charge <= 0.0 and dc_charge > 0.0 and not discharge_allowed:
|
|
||||||
return BatteryOperationMode.NON_EXPORT, dc_charge
|
|
||||||
|
|
||||||
# (ac>0,dc>0,0) → Both charge paths, no discharge
|
|
||||||
if ac_charge > 0.0 and dc_charge > 0.0 and not discharge_allowed:
|
|
||||||
return BatteryOperationMode.FORCED_CHARGE, ac_charge
|
|
||||||
|
|
||||||
# (ac>0,0,1) → AC charge + discharge - does not make sense
|
|
||||||
if ac_charge > 0.0 and dc_charge <= 0.0 and discharge_allowed:
|
|
||||||
raise ValueError(
|
|
||||||
f"Illegal state: ac_charge: {ac_charge} and discharge_allowed: {discharge_allowed}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# (0,dc>0,1) → DC charge + discharge
|
|
||||||
if ac_charge <= 0.0 and dc_charge > 0.0 and discharge_allowed:
|
|
||||||
return BatteryOperationMode.SELF_CONSUMPTION, dc_charge
|
|
||||||
|
|
||||||
# (ac>0,dc>0,1) → Fully flexible - does not make sense
|
|
||||||
if ac_charge > 0.0 and dc_charge > 0.0 and discharge_allowed:
|
|
||||||
raise ValueError(
|
|
||||||
f"Illegal state: ac_charge: {ac_charge} and discharge_allowed: {discharge_allowed}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Fallback → safe idle
|
|
||||||
return BatteryOperationMode.IDLE, 1.0
|
|
||||||
|
|
||||||
def optimization_solution(self) -> OptimizationSolution:
|
|
||||||
"""Provide the genetic solution as a general optimization solution.
|
|
||||||
|
|
||||||
The battery modes are controlled by the grid control triggers:
|
|
||||||
- ac_charge: charge from grid
|
|
||||||
- discharge_allowed: discharge to grid
|
|
||||||
|
|
||||||
The following battery modes are supported:
|
|
||||||
- SELF_CONSUMPTION: ac_charge == 0 and discharge_allowed == 0
|
|
||||||
- GRID_SUPPORT_EXPORT: ac_charge == 0 and discharge_allowed == 1
|
|
||||||
- GRID_SUPPORT_IMPORT: ac_charge > 0 and discharge_allowed == 0 or 1
|
|
||||||
"""
|
|
||||||
from akkudoktoreos.core.ems import get_ems
|
|
||||||
|
|
||||||
start_datetime = get_ems().start_datetime
|
|
||||||
start_day_hour = start_datetime.in_timezone(self.config.general.timezone).hour
|
|
||||||
interval_hours = 1
|
|
||||||
power_to_energy_per_interval_factor = 1.0
|
|
||||||
|
|
||||||
# --- Create index based on list length and interval ---
|
|
||||||
# Ensure we only use the minimum of results and commands if differing
|
|
||||||
periods = min(len(self.result.Kosten_Euro_pro_Stunde), len(self.ac_charge) - start_day_hour)
|
|
||||||
time_index = pd.date_range(
|
|
||||||
start=start_datetime,
|
|
||||||
periods=periods,
|
|
||||||
freq=f"{interval_hours}h",
|
|
||||||
)
|
|
||||||
n_points = len(time_index)
|
|
||||||
end_datetime = start_datetime.add(hours=n_points)
|
|
||||||
|
|
||||||
# Fill solution into dataframe with correct column names
|
|
||||||
# - load_energy_wh: Load of all energy consumers in wh"
|
|
||||||
# - grid_energy_wh: Grid energy feed in (negative) or consumption (positive) in wh"
|
|
||||||
# - costs_amt: Costs in money amount"
|
|
||||||
# - revenue_amt: Revenue in money amount"
|
|
||||||
# - losses_energy_wh: Energy losses in wh"
|
|
||||||
# - <device-id>_<operation>_op_mode: Operation mode of the device (1.0 when active)."
|
|
||||||
# - <device-id>_<operation>_op_factor: Operation mode factor of the device."
|
|
||||||
# - <device-id>_soc_factor: State of charge of a battery/ electric vehicle device as factor of total capacity."
|
|
||||||
# - <device-id>_energy_wh: Energy consumption (positive) of a device in wh."
|
|
||||||
|
|
||||||
solution = pd.DataFrame(
|
|
||||||
{
|
|
||||||
"date_time": time_index,
|
|
||||||
# result starts at start_day_hour
|
|
||||||
"load_energy_wh": self.result.Last_Wh_pro_Stunde[:n_points],
|
|
||||||
"grid_feedin_energy_wh": self.result.Netzeinspeisung_Wh_pro_Stunde[:n_points],
|
|
||||||
"grid_consumption_energy_wh": self.result.Netzbezug_Wh_pro_Stunde[:n_points],
|
|
||||||
"costs_amt": self.result.Kosten_Euro_pro_Stunde[:n_points],
|
|
||||||
"revenue_amt": self.result.Einnahmen_Euro_pro_Stunde[:n_points],
|
|
||||||
"losses_energy_wh": self.result.Verluste_Pro_Stunde[:n_points],
|
|
||||||
},
|
|
||||||
index=time_index,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add battery data
|
|
||||||
solution["battery1_soc_factor"] = [
|
|
||||||
v / 100
|
|
||||||
for v in self.result.akku_soc_pro_stunde[:n_points] # result starts at start_day_hour
|
|
||||||
]
|
|
||||||
operation: dict[str, list[float]] = {
|
|
||||||
"genetic_ac_charge_factor": [],
|
|
||||||
"genetic_dc_charge_factor": [],
|
|
||||||
"genetic_discharge_allowed_factor": [],
|
|
||||||
}
|
|
||||||
# ac_charge, dc_charge, discharge_allowed start at hour 0 of start day
|
|
||||||
for hour_idx, rate in enumerate(self.ac_charge):
|
|
||||||
if hour_idx < start_day_hour:
|
|
||||||
continue
|
|
||||||
if hour_idx >= start_day_hour + n_points:
|
|
||||||
break
|
|
||||||
ac_charge_hour = self.ac_charge[hour_idx]
|
|
||||||
dc_charge_hour = self.dc_charge[hour_idx]
|
|
||||||
discharge_allowed_hour = bool(self.discharge_allowed[hour_idx])
|
|
||||||
operation_mode, operation_mode_factor = self._battery_operation_from_solution(
|
|
||||||
ac_charge_hour, dc_charge_hour, discharge_allowed_hour
|
|
||||||
)
|
|
||||||
operation["genetic_ac_charge_factor"].append(ac_charge_hour)
|
|
||||||
operation["genetic_dc_charge_factor"].append(dc_charge_hour)
|
|
||||||
operation["genetic_discharge_allowed_factor"].append(discharge_allowed_hour)
|
|
||||||
for mode in BatteryOperationMode:
|
|
||||||
mode_key = f"battery1_{mode.lower()}_op_mode"
|
|
||||||
factor_key = f"battery1_{mode.lower()}_op_factor"
|
|
||||||
if mode_key not in operation.keys():
|
|
||||||
operation[mode_key] = []
|
|
||||||
operation[factor_key] = []
|
|
||||||
if mode == operation_mode:
|
|
||||||
operation[mode_key].append(1.0)
|
|
||||||
operation[factor_key].append(operation_mode_factor)
|
|
||||||
else:
|
|
||||||
operation[mode_key].append(0.0)
|
|
||||||
operation[factor_key].append(0.0)
|
|
||||||
for key in operation.keys():
|
|
||||||
if len(operation[key]) != n_points:
|
|
||||||
error_msg = f"instruction {key} has invalid length {len(operation[key])} - expected {n_points}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
solution[key] = operation[key]
|
|
||||||
|
|
||||||
# Add EV battery solution
|
|
||||||
# eautocharge_hours_float start at hour 0 of start day
|
|
||||||
# result.EAuto_SoC_pro_Stunde start at start_datetime.hour
|
|
||||||
if self.eauto_obj:
|
|
||||||
if self.eautocharge_hours_float is None:
|
|
||||||
# Electric vehicle is full enough. No load times.
|
|
||||||
solution[f"{self.eauto_obj.device_id}_soc_factor"] = [
|
|
||||||
self.eauto_obj.initial_soc_percentage / 100.0
|
|
||||||
] * n_points
|
|
||||||
solution["genetic_ev_charge_factor"] = [0.0] * n_points
|
|
||||||
# operation modes
|
|
||||||
operation_mode = BatteryOperationMode.IDLE
|
|
||||||
for mode in BatteryOperationMode:
|
|
||||||
mode_key = f"{self.eauto_obj.device_id}_{mode.lower()}_op_mode"
|
|
||||||
factor_key = f"{self.eauto_obj.device_id}_{mode.lower()}_op_factor"
|
|
||||||
if mode == operation_mode:
|
|
||||||
solution[mode_key] = [1.0] * n_points
|
|
||||||
solution[factor_key] = [1.0] * n_points
|
|
||||||
else:
|
|
||||||
solution[mode_key] = [0.0] * n_points
|
|
||||||
solution[factor_key] = [0.0] * n_points
|
|
||||||
else:
|
|
||||||
solution[f"{self.eauto_obj.device_id}_soc_factor"] = [
|
|
||||||
v / 100 for v in self.result.EAuto_SoC_pro_Stunde[:n_points]
|
|
||||||
]
|
|
||||||
operation = {
|
|
||||||
"genetic_ev_charge_factor": [],
|
|
||||||
}
|
|
||||||
for hour_idx, rate in enumerate(self.eautocharge_hours_float):
|
|
||||||
if hour_idx < start_day_hour:
|
|
||||||
continue
|
|
||||||
if hour_idx >= start_day_hour + n_points:
|
|
||||||
break
|
|
||||||
operation["genetic_ev_charge_factor"].append(rate)
|
|
||||||
operation_mode, operation_mode_factor = self._battery_operation_from_solution(
|
|
||||||
rate, 0.0, False
|
|
||||||
)
|
|
||||||
for mode in BatteryOperationMode:
|
|
||||||
mode_key = f"{self.eauto_obj.device_id}_{mode.lower()}_op_mode"
|
|
||||||
factor_key = f"{self.eauto_obj.device_id}_{mode.lower()}_op_factor"
|
|
||||||
if mode_key not in operation.keys():
|
|
||||||
operation[mode_key] = []
|
|
||||||
operation[factor_key] = []
|
|
||||||
if mode == operation_mode:
|
|
||||||
operation[mode_key].append(1.0)
|
|
||||||
operation[factor_key].append(operation_mode_factor)
|
|
||||||
else:
|
|
||||||
operation[mode_key].append(0.0)
|
|
||||||
operation[factor_key].append(0.0)
|
|
||||||
for key in operation.keys():
|
|
||||||
if len(operation[key]) != n_points:
|
|
||||||
error_msg = f"instruction {key} has invalid length {len(operation[key])} - expected {n_points}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
solution[key] = operation[key]
|
|
||||||
|
|
||||||
# Add home appliance data
|
|
||||||
if self.washingstart:
|
|
||||||
# result starts at start_day_hour
|
|
||||||
solution["homeappliance1_energy_wh"] = self.result.Home_appliance_wh_per_hour[:n_points]
|
|
||||||
|
|
||||||
# Fill prediction into dataframe with correct column names
|
|
||||||
# - pvforecast_ac_energy_wh_energy_wh: PV energy prediction (positive) in wh
|
|
||||||
# - elec_price_amt_kwh: Electricity price prediction in money per kwh
|
|
||||||
# - weather_temp_air_celcius: Temperature in °C"
|
|
||||||
# - loadforecast_energy_wh: Load energy prediction in wh
|
|
||||||
# - loadakkudoktor_std_energy_wh: Load energy standard deviation prediction in wh
|
|
||||||
# - loadakkudoktor_mean_energy_wh: Load mean energy prediction in wh
|
|
||||||
prediction = pd.DataFrame(
|
|
||||||
{
|
|
||||||
"date_time": time_index,
|
|
||||||
},
|
|
||||||
index=time_index,
|
|
||||||
)
|
|
||||||
pred = get_prediction()
|
|
||||||
|
|
||||||
if "pvforecast_ac_power" in pred.record_keys:
|
|
||||||
prediction["pvforecast_ac_energy_wh"] = (
|
|
||||||
pred.key_to_array(
|
|
||||||
key="pvforecast_ac_power",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
* power_to_energy_per_interval_factor
|
|
||||||
).tolist()
|
|
||||||
if "pvforecast_dc_power" in pred.record_keys:
|
|
||||||
prediction["pvforecast_dc_energy_wh"] = (
|
|
||||||
pred.key_to_array(
|
|
||||||
key="pvforecast_dc_power",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
* power_to_energy_per_interval_factor
|
|
||||||
).tolist()
|
|
||||||
if "elecprice_marketprice_wh" in pred.record_keys:
|
|
||||||
prediction["elec_price_amt_kwh"] = (
|
|
||||||
pred.key_to_array(
|
|
||||||
key="elecprice_marketprice_wh",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="ffill",
|
|
||||||
)
|
|
||||||
* 1000
|
|
||||||
).tolist()
|
|
||||||
if "feed_in_tariff_wh" in pred.record_keys:
|
|
||||||
prediction["feed_in_tariff_amt_kwh"] = (
|
|
||||||
pred.key_to_array(
|
|
||||||
key="feed_in_tariff_wh",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
* 1000
|
|
||||||
).tolist()
|
|
||||||
if "weather_temp_air" in pred.record_keys:
|
|
||||||
prediction["weather_air_temp_celcius"] = pred.key_to_array(
|
|
||||||
key="weather_temp_air",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="linear",
|
|
||||||
).tolist()
|
|
||||||
if "loadforecast_power_w" in pred.record_keys:
|
|
||||||
prediction["loadforecast_energy_wh"] = (
|
|
||||||
pred.key_to_array(
|
|
||||||
key="loadforecast_power_w",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
* power_to_energy_per_interval_factor
|
|
||||||
).tolist()
|
|
||||||
if "loadakkudoktor_std_power_w" in pred.record_keys:
|
|
||||||
prediction["loadakkudoktor_std_energy_wh"] = (
|
|
||||||
pred.key_to_array(
|
|
||||||
key="loadakkudoktor_std_power_w",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
* power_to_energy_per_interval_factor
|
|
||||||
).tolist()
|
|
||||||
if "loadakkudoktor_mean_power_w" in pred.record_keys:
|
|
||||||
prediction["loadakkudoktor_mean_energy_wh"] = (
|
|
||||||
pred.key_to_array(
|
|
||||||
key="loadakkudoktor_mean_power_w",
|
|
||||||
start_datetime=start_datetime,
|
|
||||||
end_datetime=end_datetime,
|
|
||||||
interval=to_duration(f"{interval_hours} hours"),
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
* power_to_energy_per_interval_factor
|
|
||||||
).tolist()
|
|
||||||
|
|
||||||
optimization_solution = OptimizationSolution(
|
|
||||||
id=f"optimization-genetic@{to_datetime(as_string=True)}",
|
|
||||||
generated_at=to_datetime(),
|
|
||||||
comment="Optimization solution derived from GeneticSolution.",
|
|
||||||
valid_from=start_datetime,
|
|
||||||
valid_until=start_datetime.add(hours=self.config.optimization.horizon_hours),
|
|
||||||
total_losses_energy_wh=self.result.Gesamt_Verluste,
|
|
||||||
total_revenues_amt=self.result.Gesamteinnahmen_Euro,
|
|
||||||
total_costs_amt=self.result.Gesamtkosten_Euro,
|
|
||||||
fitness_score={
|
|
||||||
self.result.Gesamtkosten_Euro,
|
|
||||||
},
|
|
||||||
prediction=PydanticDateTimeDataFrame.from_dataframe(prediction),
|
|
||||||
solution=PydanticDateTimeDataFrame.from_dataframe(solution),
|
|
||||||
)
|
|
||||||
|
|
||||||
return optimization_solution
|
|
||||||
|
|
||||||
def energy_management_plan(self) -> EnergyManagementPlan:
|
|
||||||
"""Provide the genetic solution as an energy management plan."""
|
|
||||||
from akkudoktoreos.core.ems import get_ems
|
|
||||||
|
|
||||||
start_datetime = get_ems().start_datetime
|
|
||||||
start_day_hour = start_datetime.in_timezone(self.config.general.timezone).hour
|
|
||||||
plan = EnergyManagementPlan(
|
|
||||||
id=f"plan-genetic@{to_datetime(as_string=True)}",
|
|
||||||
generated_at=to_datetime(),
|
|
||||||
instructions=[],
|
|
||||||
comment="Energy management plan derived from GeneticSolution.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add battery instructions (fill rate based control)
|
|
||||||
last_operation_mode: Optional[str] = None
|
|
||||||
last_operation_mode_factor: Optional[float] = None
|
|
||||||
resource_id = "battery1"
|
|
||||||
# ac_charge, dc_charge, discharge_allowed start at hour 0 of start day
|
|
||||||
logger.debug("BAT: {} - {}", resource_id, self.ac_charge[start_day_hour:])
|
|
||||||
for hour_idx, rate in enumerate(self.ac_charge):
|
|
||||||
if hour_idx < start_day_hour:
|
|
||||||
continue
|
|
||||||
operation_mode, operation_mode_factor = self._battery_operation_from_solution(
|
|
||||||
self.ac_charge[hour_idx],
|
|
||||||
self.dc_charge[hour_idx],
|
|
||||||
bool(self.discharge_allowed[hour_idx]),
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
operation_mode == last_operation_mode
|
|
||||||
and operation_mode_factor == last_operation_mode_factor
|
|
||||||
):
|
|
||||||
# Skip, we already added the instruction
|
|
||||||
continue
|
|
||||||
last_operation_mode = operation_mode
|
|
||||||
last_operation_mode_factor = operation_mode_factor
|
|
||||||
execution_time = start_datetime.add(hours=hour_idx - start_day_hour)
|
|
||||||
plan.add_instruction(
|
|
||||||
FRBCInstruction(
|
|
||||||
resource_id=resource_id,
|
|
||||||
execution_time=execution_time,
|
|
||||||
actuator_id=resource_id,
|
|
||||||
operation_mode_id=operation_mode,
|
|
||||||
operation_mode_factor=operation_mode_factor,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add EV battery instructions (fill rate based control)
|
|
||||||
# eautocharge_hours_float start at hour 0 of start day
|
|
||||||
if self.eauto_obj:
|
|
||||||
resource_id = self.eauto_obj.device_id
|
|
||||||
if self.eautocharge_hours_float is None:
|
|
||||||
# Electric vehicle is full enough. No load times.
|
|
||||||
logger.debug("EV: {} - SoC >= min, no optimization", resource_id)
|
|
||||||
plan.add_instruction(
|
|
||||||
FRBCInstruction(
|
|
||||||
resource_id=resource_id,
|
|
||||||
execution_time=start_datetime,
|
|
||||||
actuator_id=resource_id,
|
|
||||||
operation_mode_id=BatteryOperationMode.IDLE,
|
|
||||||
operation_mode_factor=1.0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
last_operation_mode = None
|
|
||||||
last_operation_mode_factor = None
|
|
||||||
logger.debug(
|
|
||||||
"EV: {} - {}", resource_id, self.eautocharge_hours_float[start_day_hour:]
|
|
||||||
)
|
|
||||||
for hour_idx, rate in enumerate(self.eautocharge_hours_float):
|
|
||||||
if hour_idx < start_day_hour:
|
|
||||||
continue
|
|
||||||
operation_mode, operation_mode_factor = self._battery_operation_from_solution(
|
|
||||||
rate, 0.0, False
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
operation_mode == last_operation_mode
|
|
||||||
and operation_mode_factor == last_operation_mode_factor
|
|
||||||
):
|
|
||||||
# Skip, we already added the instruction
|
|
||||||
continue
|
|
||||||
last_operation_mode = operation_mode
|
|
||||||
last_operation_mode_factor = operation_mode_factor
|
|
||||||
execution_time = start_datetime.add(hours=hour_idx - start_day_hour)
|
|
||||||
plan.add_instruction(
|
|
||||||
FRBCInstruction(
|
|
||||||
resource_id=resource_id,
|
|
||||||
execution_time=execution_time,
|
|
||||||
actuator_id=resource_id,
|
|
||||||
operation_mode_id=operation_mode,
|
|
||||||
operation_mode_factor=operation_mode_factor,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add home appliance instructions (demand driven based control)
|
|
||||||
if self.washingstart:
|
|
||||||
resource_id = "homeappliance1"
|
|
||||||
operation_mode = ApplianceOperationMode.RUN # type: ignore[assignment]
|
|
||||||
operation_mode_factor = 1.0
|
|
||||||
execution_time = start_datetime.add(hours=self.washingstart - start_day_hour)
|
|
||||||
plan.add_instruction(
|
|
||||||
DDBCInstruction(
|
|
||||||
resource_id=resource_id,
|
|
||||||
execution_time=execution_time,
|
|
||||||
actuator_id=resource_id,
|
|
||||||
operation_mode_id=operation_mode,
|
|
||||||
operation_mode_factor=operation_mode_factor,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return plan
|
|
||||||
@@ -1,168 +1,42 @@
|
|||||||
from typing import Optional, Union
|
from typing import List, Optional
|
||||||
|
|
||||||
from pydantic import Field, model_validator
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.pydantic import (
|
from akkudoktoreos.core.logging import get_logger
|
||||||
PydanticBaseModel,
|
|
||||||
PydanticDateTimeDataFrame,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.utils.datetimeutil import DateTime
|
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
class GeneticCommonSettings(SettingsBaseModel):
|
|
||||||
"""General Genetic Optimization Algorithm Configuration."""
|
|
||||||
|
|
||||||
individuals: Optional[int] = Field(
|
|
||||||
default=300,
|
|
||||||
ge=10,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Number of individuals (solutions) to generate for the (initial) generation [>= 10]. Defaults to 300.",
|
|
||||||
"examples": [300],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
generations: Optional[int] = Field(
|
|
||||||
default=400,
|
|
||||||
ge=10,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Number of generations to evaluate the optimal solution [>= 10]. Defaults to 400.",
|
|
||||||
"examples": [400],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
seed: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
ge=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Fixed seed for genetic algorithm. Defaults to 'None' which means random seed.",
|
|
||||||
"examples": [None],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
penalties: Optional[dict[str, Union[float, int, str]]] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "A dictionary of penalty function parameters consisting of a penalty function parameter name and the associated value.",
|
|
||||||
"examples": [
|
|
||||||
{"ev_soc_miss": 10},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OptimizationCommonSettings(SettingsBaseModel):
|
class OptimizationCommonSettings(SettingsBaseModel):
|
||||||
"""General Optimization Configuration."""
|
"""Base configuration for optimization settings.
|
||||||
|
|
||||||
horizon_hours: Optional[int] = Field(
|
Attributes:
|
||||||
default=24,
|
optimization_hours (int): Number of hours for optimizations.
|
||||||
ge=0,
|
"""
|
||||||
json_schema_extra={
|
|
||||||
"description": "The general time window within which the energy optimization goal shall be achieved [h]. Defaults to 24 hours.",
|
optimization_hours: Optional[int] = Field(
|
||||||
"examples": [24],
|
default=24, ge=0, description="Number of hours into the future for optimizations."
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
interval: Optional[int] = Field(
|
optimization_penalty: Optional[int] = Field(
|
||||||
default=3600,
|
default=10, description="Penalty factor used in optimization."
|
||||||
ge=15 * 60,
|
|
||||||
le=60 * 60,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "The optimization interval [sec].",
|
|
||||||
"examples": [60 * 60, 15 * 60],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
algorithm: Optional[str] = Field(
|
optimization_ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
||||||
default="GENETIC",
|
default=[
|
||||||
json_schema_extra={"description": "The optimization algorithm.", "examples": ["GENETIC"]},
|
0.0,
|
||||||
)
|
6.0 / 16.0,
|
||||||
|
# 7.0 / 16.0,
|
||||||
genetic: Optional[GeneticCommonSettings] = Field(
|
8.0 / 16.0,
|
||||||
default=None,
|
# 9.0 / 16.0,
|
||||||
json_schema_extra={
|
10.0 / 16.0,
|
||||||
"description": "Genetic optimization algorithm configuration.",
|
# 11.0 / 16.0,
|
||||||
"examples": [{"individuals": 400, "seed": None, "penalties": {"ev_soc_miss": 10}}],
|
12.0 / 16.0,
|
||||||
},
|
# 13.0 / 16.0,
|
||||||
)
|
14.0 / 16.0,
|
||||||
|
# 15.0 / 16.0,
|
||||||
@model_validator(mode="after")
|
1.0,
|
||||||
def _enforce_algorithm_configuration(self) -> "OptimizationCommonSettings":
|
],
|
||||||
"""Ensure algorithm default configuration is set."""
|
description="Charge rates available for the EV in percent of maximum charge.",
|
||||||
if self.algorithm is not None:
|
|
||||||
if self.algorithm.lower() == "genetic" and self.genetic is None:
|
|
||||||
self.genetic = GeneticCommonSettings()
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class OptimizationSolution(PydanticBaseModel):
|
|
||||||
"""General Optimization Solution."""
|
|
||||||
|
|
||||||
id: str = Field(
|
|
||||||
..., json_schema_extra={"description": "Unique ID for the optimization solution."}
|
|
||||||
)
|
|
||||||
|
|
||||||
generated_at: DateTime = Field(
|
|
||||||
..., json_schema_extra={"description": "Timestamp when the solution was generated."}
|
|
||||||
)
|
|
||||||
|
|
||||||
comment: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={"description": "Optional comment or annotation for the solution."},
|
|
||||||
)
|
|
||||||
|
|
||||||
valid_from: Optional[DateTime] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "Start time of the optimization solution."}
|
|
||||||
)
|
|
||||||
|
|
||||||
valid_until: Optional[DateTime] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "End time of the optimization solution."}
|
|
||||||
)
|
|
||||||
|
|
||||||
total_losses_energy_wh: float = Field(
|
|
||||||
json_schema_extra={"description": "The total losses in watt-hours over the entire period."}
|
|
||||||
)
|
|
||||||
|
|
||||||
total_revenues_amt: float = Field(
|
|
||||||
json_schema_extra={"description": "The total revenues [money amount]."}
|
|
||||||
)
|
|
||||||
|
|
||||||
total_costs_amt: float = Field(
|
|
||||||
json_schema_extra={"description": "The total costs [money amount]."}
|
|
||||||
)
|
|
||||||
|
|
||||||
fitness_score: set[float] = Field(
|
|
||||||
json_schema_extra={"description": "The fitness score as a set of fitness values."}
|
|
||||||
)
|
|
||||||
|
|
||||||
prediction: PydanticDateTimeDataFrame = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": (
|
|
||||||
"Datetime data frame with time series prediction data per optimization interval:"
|
|
||||||
"- pv_energy_wh: PV energy prediction (positive) in wh"
|
|
||||||
"- elec_price_amt_kwh: Electricity price prediction in money per kwh"
|
|
||||||
"- feed_in_tariff_amt_kwh: Feed in tariff prediction in money per kwh"
|
|
||||||
"- weather_temp_air_celcius: Temperature in °C"
|
|
||||||
"- loadforecast_energy_wh: Load mean energy prediction in wh"
|
|
||||||
"- loadakkudoktor_std_energy_wh: Load energy standard deviation prediction in wh"
|
|
||||||
"- loadakkudoktor_mean_energy_wh: Load mean energy prediction in wh"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
solution: PydanticDateTimeDataFrame = Field(
|
|
||||||
json_schema_extra={
|
|
||||||
"description": (
|
|
||||||
"Datetime data frame with time series solution data per optimization interval:"
|
|
||||||
"- load_energy_wh: Load of all energy consumers in wh"
|
|
||||||
"- grid_energy_wh: Grid energy feed in (negative) or consumption (positive) in wh"
|
|
||||||
"- costs_amt: Costs in money amount"
|
|
||||||
"- revenue_amt: Revenue in money amount"
|
|
||||||
"- losses_energy_wh: Energy losses in wh"
|
|
||||||
"- <device-id>_operation_mode_id: Operation mode id of the device."
|
|
||||||
"- <device-id>_operation_mode_factor: Operation mode factor of the device."
|
|
||||||
"- <device-id>_soc_factor: State of charge of a battery/ electric vehicle device as factor of total capacity."
|
|
||||||
"- <device-id>_energy_wh: Energy consumption (positive) of a device in wh."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,14 +2,14 @@
|
|||||||
|
|
||||||
from pydantic import ConfigDict
|
from pydantic import ConfigDict
|
||||||
|
|
||||||
from akkudoktoreos.core.coreabc import (
|
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin
|
||||||
ConfigMixin,
|
from akkudoktoreos.core.logging import get_logger
|
||||||
EnergyManagementSystemMixin,
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
PredictionMixin,
|
|
||||||
)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class OptimizationBase(ConfigMixin, PredictionMixin, EnergyManagementSystemMixin):
|
class OptimizationBase(ConfigMixin, PredictionMixin, PydanticBaseModel):
|
||||||
"""Base class for handling optimization data.
|
"""Base class for handling optimization data.
|
||||||
|
|
||||||
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
||||||
|
|||||||
@@ -1,77 +1,14 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
|
||||||
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettings
|
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
|
||||||
|
|
||||||
prediction_eos = get_prediction()
|
|
||||||
|
|
||||||
# Valid elecprice providers
|
|
||||||
elecprice_providers = [
|
|
||||||
provider.provider_id()
|
|
||||||
for provider in prediction_eos.providers
|
|
||||||
if isinstance(provider, ElecPriceProvider)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ElecPriceCommonProviderSettings(SettingsBaseModel):
|
|
||||||
"""Electricity Price Prediction Provider Configuration."""
|
|
||||||
|
|
||||||
ElecPriceImport: Optional[ElecPriceImportCommonSettings] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={"description": "ElecPriceImport settings", "examples": [None]},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ElecPriceCommonSettings(SettingsBaseModel):
|
class ElecPriceCommonSettings(SettingsBaseModel):
|
||||||
"""Electricity Price Prediction Configuration."""
|
elecprice_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Electricity price provider id of provider to be used."
|
||||||
provider: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Electricity price provider id of provider to be used.",
|
|
||||||
"examples": ["ElecPriceAkkudoktor"],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
charges_kwh: Optional[float] = Field(
|
elecprice_charges_kwh: Optional[float] = Field(
|
||||||
default=None,
|
default=None, ge=0, description="Electricity price charges (€/kWh)."
|
||||||
ge=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Electricity price charges [€/kWh]. Will be added to variable market price.",
|
|
||||||
"examples": [0.21],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
vat_rate: Optional[float] = Field(
|
|
||||||
default=1.19,
|
|
||||||
ge=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "VAT rate factor applied to electricity price when charges are used.",
|
|
||||||
"examples": [1.19],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
provider_settings: ElecPriceCommonProviderSettings = Field(
|
|
||||||
default_factory=ElecPriceCommonProviderSettings,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Provider settings",
|
|
||||||
"examples": [
|
|
||||||
# Example 1: Empty/default settings (all providers None)
|
|
||||||
{
|
|
||||||
"ElecPriceImport": None,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validators
|
|
||||||
@field_validator("provider", mode="after")
|
|
||||||
@classmethod
|
|
||||||
def validate_provider(cls, value: Optional[str]) -> Optional[str]:
|
|
||||||
if value is None or value in elecprice_providers:
|
|
||||||
return value
|
|
||||||
raise ValueError(
|
|
||||||
f"Provider '{value}' is not a valid electricity price provider: {elecprice_providers}."
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -9,8 +9,11 @@ from typing import List, Optional
|
|||||||
|
|
||||||
from pydantic import Field, computed_field
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ElecPriceDataRecord(PredictionRecord):
|
class ElecPriceDataRecord(PredictionRecord):
|
||||||
"""Represents a electricity price data record containing various price attributes at a specific datetime.
|
"""Represents a electricity price data record containing various price attributes at a specific datetime.
|
||||||
@@ -21,7 +24,7 @@ class ElecPriceDataRecord(PredictionRecord):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
elecprice_marketprice_wh: Optional[float] = Field(
|
elecprice_marketprice_wh: Optional[float] = Field(
|
||||||
None, json_schema_extra={"description": "Electricity market price per Wh (€/Wh)"}
|
None, description="Electricity market price per Wh (€/Wh)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Computed fields
|
# Computed fields
|
||||||
@@ -46,21 +49,20 @@ class ElecPriceProvider(PredictionProvider):
|
|||||||
electricity price_provider (str): Prediction provider for electricity price.
|
electricity price_provider (str): Prediction provider for electricity price.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
hours (int, optional): The number of hours into the future for which predictions are generated.
|
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `hours`.
|
calculated based on `start_datetime` and `prediction_hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `historic_hours`.
|
based on `start_datetime` and `prediction_historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
records: List[ElecPriceDataRecord] = Field(
|
records: List[ElecPriceDataRecord] = Field(
|
||||||
default_factory=list,
|
default_factory=list, description="List of ElecPriceDataRecord records"
|
||||||
json_schema_extra={"description": "List of ElecPriceDataRecord records"},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -69,4 +71,4 @@ class ElecPriceProvider(PredictionProvider):
|
|||||||
return "ElecPriceProvider"
|
return "ElecPriceProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.elecprice.provider
|
return self.provider_id() == self.config.elecprice_provider
|
||||||
|
|||||||
@@ -11,15 +11,17 @@ from typing import Any, List, Optional, Union
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import requests
|
import requests
|
||||||
from loguru import logger
|
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
from statsmodels.tsa.holtwinters import ExponentialSmoothing
|
from statsmodels.tsa.holtwinters import ExponentialSmoothing
|
||||||
|
|
||||||
from akkudoktoreos.core.cache import cache_in_file
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
||||||
|
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class AkkudoktorElecPriceMeta(PydanticBaseModel):
|
class AkkudoktorElecPriceMeta(PydanticBaseModel):
|
||||||
start_timestamp: str
|
start_timestamp: str
|
||||||
@@ -52,11 +54,11 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
hours (int, optional): Number of hours in the future for the forecast.
|
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
||||||
historic_hours (int, optional): Number of past hours for retaining data.
|
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
@@ -102,18 +104,17 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
- add the file cache again.
|
- add the file cache again.
|
||||||
"""
|
"""
|
||||||
source = "https://api.akkudoktor.net"
|
source = "https://api.akkudoktor.net"
|
||||||
if not self.ems_start_datetime:
|
assert self.start_datetime # mypy fix
|
||||||
raise ValueError(f"Start DateTime not set: {self.ems_start_datetime}")
|
|
||||||
# Try to take data from 5 weeks back for prediction
|
# Try to take data from 5 weeks back for prediction
|
||||||
date = to_datetime(self.ems_start_datetime - to_duration("35 days"), as_string="YYYY-MM-DD")
|
date = to_datetime(self.start_datetime - to_duration("35 days"), as_string="YYYY-MM-DD")
|
||||||
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
||||||
url = f"{source}/prices?start={date}&end={last_date}&tz={self.config.general.timezone}"
|
url = f"{source}/prices?start={date}&end={last_date}&tz={self.config.timezone}"
|
||||||
response = requests.get(url, timeout=10)
|
response = requests.get(url)
|
||||||
logger.debug(f"Response from {url}: {response}")
|
logger.debug(f"Response from {url}: {response}")
|
||||||
response.raise_for_status() # Raise an error for bad responses
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
akkudoktor_data = self._validate_data(response.content)
|
akkudoktor_data = self._validate_data(response.content)
|
||||||
# We are working on fresh data (no cache), report update time
|
# We are working on fresh data (no cache), report update time
|
||||||
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
||||||
return akkudoktor_data
|
return akkudoktor_data
|
||||||
|
|
||||||
def _cap_outliers(self, data: np.ndarray, sigma: int = 2) -> np.ndarray:
|
def _cap_outliers(self, data: np.ndarray, sigma: int = 2) -> np.ndarray:
|
||||||
@@ -124,16 +125,18 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
||||||
return capped_data
|
return capped_data
|
||||||
|
|
||||||
def _predict_ets(self, history: np.ndarray, seasonal_periods: int, hours: int) -> np.ndarray:
|
def _predict_ets(
|
||||||
|
self, history: np.ndarray, seasonal_periods: int, prediction_hours: int
|
||||||
|
) -> np.ndarray:
|
||||||
clean_history = self._cap_outliers(history)
|
clean_history = self._cap_outliers(history)
|
||||||
model = ExponentialSmoothing(
|
model = ExponentialSmoothing(
|
||||||
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
||||||
).fit()
|
).fit()
|
||||||
return model.forecast(hours)
|
return model.forecast(prediction_hours)
|
||||||
|
|
||||||
def _predict_median(self, history: np.ndarray, hours: int) -> np.ndarray:
|
def _predict_median(self, history: np.ndarray, prediction_hours: int) -> np.ndarray:
|
||||||
clean_history = self._cap_outliers(history)
|
clean_history = self._cap_outliers(history)
|
||||||
return np.full(hours, np.median(clean_history))
|
return np.full(prediction_hours, np.median(clean_history))
|
||||||
|
|
||||||
def _update_data(
|
def _update_data(
|
||||||
self, force_update: Optional[bool] = False
|
self, force_update: Optional[bool] = False
|
||||||
@@ -147,20 +150,19 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
"""
|
"""
|
||||||
# Get Akkudoktor electricity price data
|
# Get Akkudoktor electricity price data
|
||||||
akkudoktor_data = self._request_forecast(force_update=force_update) # type: ignore
|
akkudoktor_data = self._request_forecast(force_update=force_update) # type: ignore
|
||||||
if not self.ems_start_datetime:
|
assert self.start_datetime # mypy fix
|
||||||
raise ValueError(f"Start DateTime not set: {self.ems_start_datetime}")
|
|
||||||
|
|
||||||
# Assumption that all lists are the same length and are ordered chronologically
|
# Assumption that all lists are the same length and are ordered chronologically
|
||||||
# in ascending order and have the same timestamps.
|
# in ascending order and have the same timestamps.
|
||||||
|
|
||||||
# Get charges_kwh in wh
|
# Get elecprice_charges_kwh in wh
|
||||||
charges_wh = (self.config.elecprice.charges_kwh or 0) / 1000
|
charges_wh = (self.config.elecprice_charges_kwh or 0) / 1000
|
||||||
|
|
||||||
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
||||||
series_data = pd.Series(dtype=float) # Initialize an empty series
|
series_data = pd.Series(dtype=float) # Initialize an empty series
|
||||||
|
|
||||||
for value in akkudoktor_data.values:
|
for value in akkudoktor_data.values:
|
||||||
orig_datetime = to_datetime(value.start, in_timezone=self.config.general.timezone)
|
orig_datetime = to_datetime(value.start, in_timezone=self.config.timezone)
|
||||||
if highest_orig_datetime is None or orig_datetime > highest_orig_datetime:
|
if highest_orig_datetime is None or orig_datetime > highest_orig_datetime:
|
||||||
highest_orig_datetime = orig_datetime
|
highest_orig_datetime = orig_datetime
|
||||||
|
|
||||||
@@ -178,29 +180,30 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
)
|
)
|
||||||
|
|
||||||
amount_datasets = len(self.records)
|
amount_datasets = len(self.records)
|
||||||
if not highest_orig_datetime: # mypy fix
|
assert highest_orig_datetime # mypy fix
|
||||||
error_msg = f"Highest original datetime not available: {highest_orig_datetime}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
||||||
needed_hours = int(
|
needed_prediction_hours = int(
|
||||||
self.config.prediction.hours
|
self.config.prediction_hours
|
||||||
- ((highest_orig_datetime - self.ems_start_datetime).total_seconds() // 3600)
|
- ((highest_orig_datetime - self.start_datetime).total_seconds() // 3600)
|
||||||
)
|
)
|
||||||
|
|
||||||
if needed_hours <= 0:
|
if needed_prediction_hours <= 0:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"No prediction needed. needed_hours={needed_hours}, hours={self.config.prediction.hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.ems_start_datetime}"
|
f"No prediction needed. needed_prediction_hours={needed_prediction_hours}, prediction_hours={self.config.prediction_hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.start_datetime}"
|
||||||
) # this might keep data longer than self.ems_start_datetime + self.config.prediction.hours in the records
|
) # this might keep data longer than self.start_datetime + self.config.prediction_hours in the records
|
||||||
return
|
return
|
||||||
|
|
||||||
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
||||||
prediction = self._predict_ets(history, seasonal_periods=168, hours=needed_hours)
|
prediction = self._predict_ets(
|
||||||
|
history, seasonal_periods=168, prediction_hours=needed_prediction_hours
|
||||||
|
)
|
||||||
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
||||||
prediction = self._predict_ets(history, seasonal_periods=24, hours=needed_hours)
|
prediction = self._predict_ets(
|
||||||
|
history, seasonal_periods=24, prediction_hours=needed_prediction_hours
|
||||||
|
)
|
||||||
elif amount_datasets > 0: # not enough data for ets, do median
|
elif amount_datasets > 0: # not enough data for ets, do median
|
||||||
prediction = self._predict_median(history, hours=needed_hours)
|
prediction = self._predict_median(history, prediction_hours=needed_prediction_hours)
|
||||||
else:
|
else:
|
||||||
logger.error("No data available for prediction")
|
logger.error("No data available for prediction")
|
||||||
raise ValueError("No data available")
|
raise ValueError("No data available")
|
||||||
|
|||||||
@@ -1,257 +0,0 @@
|
|||||||
"""Retrieves and processes electricity price forecast data from Energy-Charts.
|
|
||||||
|
|
||||||
This module provides classes and mappings to manage electricity price data obtained from the
|
|
||||||
Energy-Charts API, including support for various electricity price attributes such as temperature,
|
|
||||||
humidity, cloud cover, and solar irradiance. The data is mapped to the `ElecPriceDataRecord`
|
|
||||||
format, enabling consistent access to forecasted and historical electricity price attributes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any, List, Optional, Union
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import pandas as pd
|
|
||||||
import requests
|
|
||||||
from loguru import logger
|
|
||||||
from pydantic import ValidationError
|
|
||||||
from statsmodels.tsa.holtwinters import ExponentialSmoothing
|
|
||||||
|
|
||||||
from akkudoktoreos.core.cache import cache_in_file
|
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
|
||||||
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
|
||||||
|
|
||||||
|
|
||||||
class EnergyChartsElecPrice(PydanticBaseModel):
|
|
||||||
license_info: str
|
|
||||||
unix_seconds: List[int]
|
|
||||||
price: List[float]
|
|
||||||
unit: str
|
|
||||||
deprecated: bool
|
|
||||||
|
|
||||||
|
|
||||||
class ElecPriceEnergyCharts(ElecPriceProvider):
|
|
||||||
"""Fetch and process electricity price forecast data from Energy-Charts.
|
|
||||||
|
|
||||||
ElecPriceEnergyCharts is a singleton-based class that retrieves electricity price forecast data
|
|
||||||
from the Energy-Charts API and maps it to `ElecPriceDataRecord` fields, applying
|
|
||||||
any necessary scaling or unit corrections. It manages the forecast over a range
|
|
||||||
of hours into the future and retains historical data.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
hours (int, optional): Number of hours in the future for the forecast.
|
|
||||||
historic_hours (int, optional): Number of past hours for retaining data.
|
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
|
||||||
_request_forecast(): Fetches the forecast from the Energy-Charts API.
|
|
||||||
_update_data(): Processes and updates forecast data from Energy-Charts in ElecPriceDataRecord format.
|
|
||||||
"""
|
|
||||||
|
|
||||||
highest_orig_datetime: Optional[datetime] = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def provider_id(cls) -> str:
|
|
||||||
"""Return the unique identifier for the Energy-Charts provider."""
|
|
||||||
return "ElecPriceEnergyCharts"
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _validate_data(cls, json_str: Union[bytes, Any]) -> EnergyChartsElecPrice:
|
|
||||||
"""Validate Energy-Charts Electricity Price forecast data."""
|
|
||||||
try:
|
|
||||||
energy_charts_data = EnergyChartsElecPrice.model_validate_json(json_str)
|
|
||||||
except ValidationError as e:
|
|
||||||
error_msg = ""
|
|
||||||
for error in e.errors():
|
|
||||||
field = " -> ".join(str(x) for x in error["loc"])
|
|
||||||
message = error["msg"]
|
|
||||||
error_type = error["type"]
|
|
||||||
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
|
||||||
logger.error(f"Energy-Charts schema change: {error_msg}")
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
return energy_charts_data
|
|
||||||
|
|
||||||
@cache_in_file(with_ttl="1 hour")
|
|
||||||
def _request_forecast(self, start_date: Optional[str] = None) -> EnergyChartsElecPrice:
|
|
||||||
"""Fetch electricity price forecast data from Energy-Charts API.
|
|
||||||
|
|
||||||
This method sends a request to Energy-Charts API to retrieve forecast data for a specified
|
|
||||||
date range. The response data is parsed and returned as JSON for further processing.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: The parsed JSON response from Energy-Charts API containing forecast data.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the API response does not include expected `electricity price` data.
|
|
||||||
"""
|
|
||||||
source = "https://api.energy-charts.info"
|
|
||||||
if start_date is None:
|
|
||||||
# Try to take data from 5 weeks back for prediction
|
|
||||||
start_date = to_datetime(
|
|
||||||
self.ems_start_datetime - to_duration("35 days"), as_string="YYYY-MM-DD"
|
|
||||||
)
|
|
||||||
|
|
||||||
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
|
||||||
url = f"{source}/price?bzn=DE-LU&start={start_date}&end={last_date}"
|
|
||||||
response = requests.get(url, timeout=30)
|
|
||||||
logger.debug(f"Response from {url}: {response}")
|
|
||||||
response.raise_for_status() # Raise an error for bad responses
|
|
||||||
energy_charts_data = self._validate_data(response.content)
|
|
||||||
# We are working on fresh data (no cache), report update time
|
|
||||||
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
|
||||||
return energy_charts_data
|
|
||||||
|
|
||||||
def _parse_data(self, energy_charts_data: EnergyChartsElecPrice) -> pd.Series:
|
|
||||||
# Assumption that all lists are the same length and are ordered chronologically
|
|
||||||
# in ascending order and have the same timestamps.
|
|
||||||
|
|
||||||
# Get charges_kwh in wh
|
|
||||||
charges_wh = (self.config.elecprice.charges_kwh or 0) / 1000
|
|
||||||
|
|
||||||
# Initialize
|
|
||||||
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
|
||||||
series_data = pd.Series(dtype=float) # Initialize an empty series
|
|
||||||
|
|
||||||
# Iterate over timestamps and prices together
|
|
||||||
for unix_sec, price_eur_per_mwh in zip(
|
|
||||||
energy_charts_data.unix_seconds, energy_charts_data.price
|
|
||||||
):
|
|
||||||
orig_datetime = to_datetime(unix_sec, in_timezone=self.config.general.timezone)
|
|
||||||
|
|
||||||
# Track the latest datetime
|
|
||||||
if highest_orig_datetime is None or orig_datetime > highest_orig_datetime:
|
|
||||||
highest_orig_datetime = orig_datetime
|
|
||||||
|
|
||||||
# Convert EUR/MWh to EUR/Wh, apply charges and VAT if charges > 0
|
|
||||||
if charges_wh > 0:
|
|
||||||
vat_rate = self.config.elecprice.vat_rate or 1.19
|
|
||||||
price_wh = ((price_eur_per_mwh / 1_000_000) + charges_wh) * vat_rate
|
|
||||||
else:
|
|
||||||
price_wh = price_eur_per_mwh / 1_000_000
|
|
||||||
|
|
||||||
# Store in series
|
|
||||||
series_data.at[orig_datetime] = price_wh
|
|
||||||
|
|
||||||
return series_data
|
|
||||||
|
|
||||||
def _cap_outliers(self, data: np.ndarray, sigma: int = 2) -> np.ndarray:
|
|
||||||
mean = data.mean()
|
|
||||||
std = data.std()
|
|
||||||
lower_bound = mean - sigma * std
|
|
||||||
upper_bound = mean + sigma * std
|
|
||||||
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
|
||||||
return capped_data
|
|
||||||
|
|
||||||
def _predict_ets(self, history: np.ndarray, seasonal_periods: int, hours: int) -> np.ndarray:
|
|
||||||
clean_history = self._cap_outliers(history)
|
|
||||||
model = ExponentialSmoothing(
|
|
||||||
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
|
||||||
).fit()
|
|
||||||
return model.forecast(hours)
|
|
||||||
|
|
||||||
def _predict_median(self, history: np.ndarray, hours: int) -> np.ndarray:
|
|
||||||
clean_history = self._cap_outliers(history)
|
|
||||||
return np.full(hours, np.median(clean_history))
|
|
||||||
|
|
||||||
def _update_data(
|
|
||||||
self, force_update: Optional[bool] = False
|
|
||||||
) -> None: # tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
||||||
"""Update forecast data in the ElecPriceDataRecord format.
|
|
||||||
|
|
||||||
Retrieves data from Energy-Charts, maps each Energy-Charts field to the corresponding
|
|
||||||
`ElecPriceDataRecord` and applies any necessary scaling.
|
|
||||||
|
|
||||||
The final mapped and processed data is inserted into the sequence as `ElecPriceDataRecord`.
|
|
||||||
"""
|
|
||||||
# New prices are available every day at 14:00
|
|
||||||
now = pd.Timestamp.now(tz=self.config.general.timezone)
|
|
||||||
midnight = now.normalize()
|
|
||||||
hours_ahead = 23 if now.time() < pd.Timestamp("14:00").time() else 47
|
|
||||||
end = midnight + pd.Timedelta(hours=hours_ahead)
|
|
||||||
|
|
||||||
if not self.ems_start_datetime:
|
|
||||||
raise ValueError(f"Start DateTime not set: {self.ems_start_datetime}")
|
|
||||||
|
|
||||||
# Determine if update is needed and how many days
|
|
||||||
past_days = 35
|
|
||||||
if self.highest_orig_datetime:
|
|
||||||
history_series = self.key_to_series(
|
|
||||||
key="elecprice_marketprice_wh", start_datetime=self.ems_start_datetime
|
|
||||||
)
|
|
||||||
# If history lower, then start_datetime
|
|
||||||
if history_series.index.min() <= self.ems_start_datetime:
|
|
||||||
past_days = 0
|
|
||||||
|
|
||||||
needs_update = end > self.highest_orig_datetime
|
|
||||||
else:
|
|
||||||
needs_update = True
|
|
||||||
|
|
||||||
if needs_update:
|
|
||||||
logger.info(
|
|
||||||
f"Update ElecPriceEnergyCharts is needed, last in history: {self.highest_orig_datetime}"
|
|
||||||
)
|
|
||||||
# Set start_date try to take data from 5 weeks back for prediction
|
|
||||||
start_date = to_datetime(
|
|
||||||
self.ems_start_datetime - to_duration(f"{past_days} days"), as_string="YYYY-MM-DD"
|
|
||||||
)
|
|
||||||
# Get Energy-Charts electricity price data
|
|
||||||
energy_charts_data = self._request_forecast(
|
|
||||||
start_date=start_date, force_update=force_update
|
|
||||||
) # type: ignore
|
|
||||||
|
|
||||||
# Parse and store data
|
|
||||||
series_data = self._parse_data(energy_charts_data)
|
|
||||||
self.highest_orig_datetime = series_data.index.max()
|
|
||||||
self.key_from_series("elecprice_marketprice_wh", series_data)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"No Update ElecPriceEnergyCharts is needed, last in history: {self.highest_orig_datetime}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generate history array for prediction
|
|
||||||
history = self.key_to_array(
|
|
||||||
key="elecprice_marketprice_wh",
|
|
||||||
end_datetime=self.highest_orig_datetime,
|
|
||||||
fill_method="linear",
|
|
||||||
)
|
|
||||||
|
|
||||||
amount_datasets = len(self.records)
|
|
||||||
if not self.highest_orig_datetime: # mypy fix
|
|
||||||
error_msg = f"Highest original datetime not available: {self.highest_orig_datetime}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
|
||||||
needed_hours = int(
|
|
||||||
self.config.prediction.hours
|
|
||||||
- ((self.highest_orig_datetime - self.ems_start_datetime).total_seconds() // 3600)
|
|
||||||
)
|
|
||||||
|
|
||||||
if needed_hours <= 0:
|
|
||||||
logger.warning(
|
|
||||||
f"No prediction needed. needed_hours={needed_hours}, hours={self.config.prediction.hours},highest_orig_datetime {self.highest_orig_datetime}, start_datetime {self.ems_start_datetime}"
|
|
||||||
) # this might keep data longer than self.ems_start_datetime + self.config.prediction.hours in the records
|
|
||||||
return
|
|
||||||
|
|
||||||
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
|
||||||
prediction = self._predict_ets(history, seasonal_periods=168, hours=needed_hours)
|
|
||||||
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
|
||||||
prediction = self._predict_ets(history, seasonal_periods=24, hours=needed_hours)
|
|
||||||
elif amount_datasets > 0: # not enough data for ets, do median
|
|
||||||
prediction = self._predict_median(history, hours=needed_hours)
|
|
||||||
else:
|
|
||||||
logger.error("No data available for prediction")
|
|
||||||
raise ValueError("No data available")
|
|
||||||
|
|
||||||
# write predictions into the records, update if exist.
|
|
||||||
prediction_series = pd.Series(
|
|
||||||
data=prediction,
|
|
||||||
index=[
|
|
||||||
self.highest_orig_datetime + to_duration(f"{i + 1} hours")
|
|
||||||
for i in range(len(prediction))
|
|
||||||
],
|
|
||||||
)
|
|
||||||
self.key_from_series("elecprice_marketprice_wh", prediction_series)
|
|
||||||
@@ -9,37 +9,34 @@ format, enabling consistent access to forecasted and historical elecprice attrib
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
||||||
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
|
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for elecprice data import from file or JSON String."""
|
"""Common settings for elecprice data import from file or JSON String."""
|
||||||
|
|
||||||
import_file_path: Optional[Union[str, Path]] = Field(
|
elecpriceimport_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None,
|
default=None, description="Path to the file to import elecprice data from."
|
||||||
json_schema_extra={
|
|
||||||
"description": "Path to the file to import elecprice data from.",
|
|
||||||
"examples": [None, "/path/to/prices.json"],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
import_json: Optional[str] = Field(
|
elecpriceimport_json: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
json_schema_extra={
|
description="JSON string, dictionary of electricity price forecast value lists.",
|
||||||
"description": "JSON string, dictionary of electricity price forecast value lists.",
|
|
||||||
"examples": ['{"elecprice_marketprice_wh": [0.0003384, 0.0003318, 0.0003284]}'],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("import_file_path", mode="after")
|
@field_validator("elecpriceimport_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
def validate_elecpriceimport_file_path(
|
||||||
|
cls, value: Optional[Union[str, Path]]
|
||||||
|
) -> Optional[Path]:
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
@@ -65,16 +62,7 @@ class ElecPriceImport(ElecPriceProvider, PredictionImportProvider):
|
|||||||
return "ElecPriceImport"
|
return "ElecPriceImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.elecprice.provider_settings.ElecPriceImport is None:
|
if self.config.elecpriceimport_file_path is not None:
|
||||||
logger.debug(f"{self.provider_id()} data update without provider settings.")
|
self.import_from_file(self.config.elecpriceimport_file_path, key_prefix="elecprice")
|
||||||
return
|
if self.config.elecpriceimport_json is not None:
|
||||||
if self.config.elecprice.provider_settings.ElecPriceImport.import_file_path:
|
self.import_from_json(self.config.elecpriceimport_json, key_prefix="elecprice")
|
||||||
self.import_from_file(
|
|
||||||
self.config.elecprice.provider_settings.ElecPriceImport.import_file_path,
|
|
||||||
key_prefix="elecprice",
|
|
||||||
)
|
|
||||||
if self.config.elecprice.provider_settings.ElecPriceImport.import_json:
|
|
||||||
self.import_from_json(
|
|
||||||
self.config.elecprice.provider_settings.ElecPriceImport.import_json,
|
|
||||||
key_prefix="elecprice",
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import Field, field_validator
|
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
|
||||||
from akkudoktoreos.prediction.feedintariffabc import FeedInTariffProvider
|
|
||||||
from akkudoktoreos.prediction.feedintarifffixed import FeedInTariffFixedCommonSettings
|
|
||||||
from akkudoktoreos.prediction.feedintariffimport import FeedInTariffImportCommonSettings
|
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
|
||||||
|
|
||||||
prediction_eos = get_prediction()
|
|
||||||
|
|
||||||
# Valid feedintariff providers
|
|
||||||
feedintariff_providers = [
|
|
||||||
provider.provider_id()
|
|
||||||
for provider in prediction_eos.providers
|
|
||||||
if isinstance(provider, FeedInTariffProvider)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffCommonProviderSettings(SettingsBaseModel):
|
|
||||||
"""Feed In Tariff Prediction Provider Configuration."""
|
|
||||||
|
|
||||||
FeedInTariffFixed: Optional[FeedInTariffFixedCommonSettings] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={"description": "FeedInTariffFixed settings", "examples": [None]},
|
|
||||||
)
|
|
||||||
FeedInTariffImport: Optional[FeedInTariffImportCommonSettings] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={"description": "FeedInTariffImport settings", "examples": [None]},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffCommonSettings(SettingsBaseModel):
|
|
||||||
"""Feed In Tariff Prediction Configuration."""
|
|
||||||
|
|
||||||
provider: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Feed in tariff provider id of provider to be used.",
|
|
||||||
"examples": ["FeedInTariffFixed", "FeedInTarifImport"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
provider_settings: FeedInTariffCommonProviderSettings = Field(
|
|
||||||
default_factory=FeedInTariffCommonProviderSettings,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Provider settings",
|
|
||||||
"examples": [
|
|
||||||
# Example 1: Empty/default settings (all providers None)
|
|
||||||
{
|
|
||||||
"FeedInTariffFixed": None,
|
|
||||||
"FeedInTariffImport": None,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validators
|
|
||||||
@field_validator("provider", mode="after")
|
|
||||||
@classmethod
|
|
||||||
def validate_provider(cls, value: Optional[str]) -> Optional[str]:
|
|
||||||
if value is None or value in feedintariff_providers:
|
|
||||||
return value
|
|
||||||
raise ValueError(
|
|
||||||
f"Provider '{value}' is not a valid feed in tariff provider: {feedintariff_providers}."
|
|
||||||
)
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
"""Abstract and base classes for feed in tariff predictions.
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
- Ensure appropriate API keys or configurations are set up if required by external data sources.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from abc import abstractmethod
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from pydantic import Field, computed_field
|
|
||||||
|
|
||||||
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffDataRecord(PredictionRecord):
|
|
||||||
"""Represents a feed in tariff data record containing various price attributes at a specific datetime.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
date_time (Optional[AwareDatetime]): The datetime of the record.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
feed_in_tariff_wh: Optional[float] = Field(
|
|
||||||
None, json_schema_extra={"description": "Feed in tariff per Wh (€/Wh)"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Computed fields
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def feed_in_tariff_kwh(self) -> Optional[float]:
|
|
||||||
"""Feed in tariff per kWh (€/kWh).
|
|
||||||
|
|
||||||
Convenience attribute calculated from `feed_in_tariff_wh`.
|
|
||||||
"""
|
|
||||||
if self.feed_in_tariff_wh is None:
|
|
||||||
return None
|
|
||||||
return self.feed_in_tariff_wh * 1000.0
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffProvider(PredictionProvider):
|
|
||||||
"""Abstract base class for feed in tariff providers.
|
|
||||||
|
|
||||||
FeedInTariffProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
|
||||||
|
|
||||||
Configuration variables:
|
|
||||||
feed in tariff_provider (str): Prediction provider for feed in tarif.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# overload
|
|
||||||
records: List[FeedInTariffDataRecord] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
json_schema_extra={"description": "List of FeedInTariffDataRecord records"},
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@abstractmethod
|
|
||||||
def provider_id(cls) -> str:
|
|
||||||
return "FeedInTariffProvider"
|
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
|
||||||
return self.provider_id() == self.config.feedintariff.provider
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
"""Provides feed in tariff data."""
|
|
||||||
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
from pydantic import Field
|
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
|
||||||
from akkudoktoreos.prediction.feedintariffabc import FeedInTariffProvider
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffFixedCommonSettings(SettingsBaseModel):
|
|
||||||
"""Common settings for elecprice fixed price."""
|
|
||||||
|
|
||||||
feed_in_tariff_kwh: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
ge=0,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Electricity price feed in tariff [€/kWH].",
|
|
||||||
"examples": [0.078],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffFixed(FeedInTariffProvider):
|
|
||||||
"""Fixed price feed in tariff data.
|
|
||||||
|
|
||||||
FeedInTariffFixed is a singleton-based class that retrieves elecprice data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def provider_id(cls) -> str:
|
|
||||||
"""Return the unique identifier for the FeedInTariffFixed provider."""
|
|
||||||
return "FeedInTariffFixed"
|
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
|
||||||
error_msg = "Feed in tariff not provided"
|
|
||||||
try:
|
|
||||||
feed_in_tariff = (
|
|
||||||
self.config.feedintariff.provider_settings.FeedInTariffFixed.feed_in_tariff_kwh
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
logger.exception(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
if feed_in_tariff is None:
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
feed_in_tariff_wh = feed_in_tariff / 1000
|
|
||||||
self.update_value(to_datetime(), "feed_in_tariff_wh", feed_in_tariff_wh)
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
"""Retrieves feed in tariff forecast data from an import file.
|
|
||||||
|
|
||||||
This module provides classes and mappings to manage feed in tariff data obtained from
|
|
||||||
an import file. The data is mapped to the `FeedInTariffDataRecord` format, enabling consistent
|
|
||||||
access to forecasted and historical feed in tariff attributes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional, Union
|
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
from pydantic import Field, field_validator
|
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
|
||||||
from akkudoktoreos.prediction.feedintariffabc import FeedInTariffProvider
|
|
||||||
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffImportCommonSettings(SettingsBaseModel):
|
|
||||||
"""Common settings for feed in tariff data import from file or JSON string."""
|
|
||||||
|
|
||||||
import_file_path: Optional[Union[str, Path]] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Path to the file to import feed in tariff data from.",
|
|
||||||
"examples": [None, "/path/to/feedintariff.json"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
import_json: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "JSON string, dictionary of feed in tariff forecast value lists.",
|
|
||||||
"examples": ['{"fead_in_tariff_wh": [0.000078, 0.000078, 0.000023]}'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validators
|
|
||||||
@field_validator("import_file_path", mode="after")
|
|
||||||
@classmethod
|
|
||||||
def validate_feedintariffimport_file_path(
|
|
||||||
cls, value: Optional[Union[str, Path]]
|
|
||||||
) -> Optional[Path]:
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
if isinstance(value, str):
|
|
||||||
value = Path(value)
|
|
||||||
"""Ensure file is available."""
|
|
||||||
value.resolve()
|
|
||||||
if not value.is_file():
|
|
||||||
raise ValueError(f"Import file path '{value}' is not a file.")
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class FeedInTariffImport(FeedInTariffProvider, PredictionImportProvider):
|
|
||||||
"""Fetch Feed In Tariff data from import file or JSON string.
|
|
||||||
|
|
||||||
FeedInTariffImport is a singleton-based class that retrieves fedd in tariff forecast data
|
|
||||||
from a file or JSON string and maps it to `FeedInTariffDataRecord` fields. It manages the forecast
|
|
||||||
over a range of hours into the future and retains historical data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def provider_id(cls) -> str:
|
|
||||||
"""Return the unique identifier for the FeedInTariffImport provider."""
|
|
||||||
return "FeedInTariffImport"
|
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
|
||||||
if self.config.feedintariff.provider_settings.FeedInTariffImport is None:
|
|
||||||
logger.debug(f"{self.provider_id()} data update without provider settings.")
|
|
||||||
return
|
|
||||||
if self.config.feedintariff.provider_settings.FeedInTariffImport.import_file_path:
|
|
||||||
self.import_from_file(
|
|
||||||
self.config.provider_settings.FeedInTariffImport.import_file_path,
|
|
||||||
key_prefix="feedintariff",
|
|
||||||
)
|
|
||||||
if self.config.feedintariff.provider_settings.FeedInTariffImport.import_json:
|
|
||||||
self.import_from_json(
|
|
||||||
self.config.feedintariff.provider_settings.FeedInTariffImport.import_json,
|
|
||||||
key_prefix="feedintariff",
|
|
||||||
)
|
|
||||||
@@ -1,22 +1,21 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
import pickle
|
import pickle
|
||||||
|
from functools import lru_cache
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy.interpolate import RegularGridInterpolator
|
from scipy.interpolate import RegularGridInterpolator
|
||||||
|
|
||||||
from akkudoktoreos.core.cache import cachemethod_energy_management
|
|
||||||
from akkudoktoreos.core.coreabc import SingletonMixin
|
|
||||||
|
|
||||||
|
|
||||||
class SelfConsumptionProbabilityInterpolator:
|
class SelfConsumptionProbabilityInterpolator:
|
||||||
def __init__(self, filepath: str | Path):
|
def __init__(self, filepath: str | Path):
|
||||||
self.filepath = filepath
|
self.filepath = filepath
|
||||||
# Load the RegularGridInterpolator
|
# Load the RegularGridInterpolator
|
||||||
with open(self.filepath, "rb") as file:
|
with open(self.filepath, "rb") as file:
|
||||||
self.interpolator: RegularGridInterpolator = pickle.load(file) # noqa: S301
|
self.interpolator: RegularGridInterpolator = pickle.load(file)
|
||||||
|
|
||||||
def _generate_points(
|
@lru_cache(maxsize=128)
|
||||||
|
def generate_points(
|
||||||
self, load_1h_power: float, pv_power: float
|
self, load_1h_power: float, pv_power: float
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
"""Generate the grid points for interpolation."""
|
"""Generate the grid points for interpolation."""
|
||||||
@@ -24,20 +23,8 @@ class SelfConsumptionProbabilityInterpolator:
|
|||||||
points = np.array([np.full_like(partial_loads, load_1h_power), partial_loads]).T
|
points = np.array([np.full_like(partial_loads, load_1h_power), partial_loads]).T
|
||||||
return points, partial_loads
|
return points, partial_loads
|
||||||
|
|
||||||
@cachemethod_energy_management
|
|
||||||
def calculate_self_consumption(self, load_1h_power: float, pv_power: float) -> float:
|
def calculate_self_consumption(self, load_1h_power: float, pv_power: float) -> float:
|
||||||
"""Calculate the PV self-consumption rate using RegularGridInterpolator.
|
points, partial_loads = self.generate_points(load_1h_power, pv_power)
|
||||||
|
|
||||||
The results are cached until the start of the next energy management run/ optimization.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
- last_1h_power: 1h power levels (W).
|
|
||||||
- pv_power: Current PV power output (W).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- Self-consumption rate as a float.
|
|
||||||
"""
|
|
||||||
points, partial_loads = self._generate_points(load_1h_power, pv_power)
|
|
||||||
probabilities = self.interpolator(points)
|
probabilities = self.interpolator(points)
|
||||||
return probabilities.sum()
|
return probabilities.sum()
|
||||||
|
|
||||||
@@ -80,17 +67,5 @@ class SelfConsumptionProbabilityInterpolator:
|
|||||||
# return self_consumption_rate
|
# return self_consumption_rate
|
||||||
|
|
||||||
|
|
||||||
class EOSLoadInterpolator(SelfConsumptionProbabilityInterpolator, SingletonMixin):
|
# Test the function
|
||||||
def __init__(self) -> None:
|
# print(calculate_self_consumption(1000, 1200))
|
||||||
if hasattr(self, "_initialized"):
|
|
||||||
return
|
|
||||||
filename = Path(__file__).parent.resolve() / ".." / "data" / "regular_grid_interpolator.pkl"
|
|
||||||
super().__init__(filename)
|
|
||||||
|
|
||||||
|
|
||||||
# Initialize the Energy Management System, it is a singleton.
|
|
||||||
eos_load_interpolator = EOSLoadInterpolator()
|
|
||||||
|
|
||||||
|
|
||||||
def get_eos_load_interpolator() -> EOSLoadInterpolator:
|
|
||||||
return eos_load_interpolator
|
|
||||||
|
|||||||
@@ -2,70 +2,17 @@
|
|||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.prediction.loadabc import LoadProvider
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
|
||||||
from akkudoktoreos.prediction.loadimport import LoadImportCommonSettings
|
|
||||||
from akkudoktoreos.prediction.loadvrm import LoadVrmCommonSettings
|
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
|
||||||
|
|
||||||
prediction_eos = get_prediction()
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
# Valid load providers
|
|
||||||
load_providers = [
|
|
||||||
provider.provider_id()
|
|
||||||
for provider in prediction_eos.providers
|
|
||||||
if isinstance(provider, LoadProvider)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class LoadCommonProviderSettings(SettingsBaseModel):
|
|
||||||
"""Load Prediction Provider Configuration."""
|
|
||||||
|
|
||||||
LoadAkkudoktor: Optional[LoadAkkudoktorCommonSettings] = Field(
|
|
||||||
default=None,
|
|
||||||
json_schema_extra={"description": "LoadAkkudoktor settings", "examples": [None]},
|
|
||||||
)
|
|
||||||
LoadVrm: Optional[LoadVrmCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "LoadVrm settings", "examples": [None]}
|
|
||||||
)
|
|
||||||
LoadImport: Optional[LoadImportCommonSettings] = Field(
|
|
||||||
default=None, json_schema_extra={"description": "LoadImport settings", "examples": [None]}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LoadCommonSettings(SettingsBaseModel):
|
class LoadCommonSettings(SettingsBaseModel):
|
||||||
"""Load Prediction Configuration."""
|
"""Common settings for loaod forecast providers."""
|
||||||
|
|
||||||
provider: Optional[str] = Field(
|
load_provider: Optional[str] = Field(
|
||||||
default=None,
|
default=None, description="Load provider id of provider to be used."
|
||||||
json_schema_extra={
|
|
||||||
"description": "Load provider id of provider to be used.",
|
|
||||||
"examples": ["LoadAkkudoktor"],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
provider_settings: LoadCommonProviderSettings = Field(
|
|
||||||
default_factory=LoadCommonProviderSettings,
|
|
||||||
json_schema_extra={
|
|
||||||
"description": "Provider settings",
|
|
||||||
"examples": [
|
|
||||||
# Example 1: Empty/default settings (all providers None)
|
|
||||||
{
|
|
||||||
"LoadAkkudoktor": None,
|
|
||||||
"LoadVrm": None,
|
|
||||||
"LoadImport": None,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validators
|
|
||||||
@field_validator("provider", mode="after")
|
|
||||||
@classmethod
|
|
||||||
def validate_provider(cls, value: Optional[str]) -> Optional[str]:
|
|
||||||
if value is None or value in load_providers:
|
|
||||||
return value
|
|
||||||
raise ValueError(f"Provider '{value}' is not a valid load provider: {load_providers}.")
|
|
||||||
|
|||||||
@@ -9,14 +9,21 @@ from typing import List, Optional
|
|||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class LoadDataRecord(PredictionRecord):
|
class LoadDataRecord(PredictionRecord):
|
||||||
"""Represents a load data record containing various load attributes at a specific datetime."""
|
"""Represents a load data record containing various load attributes at a specific datetime."""
|
||||||
|
|
||||||
loadforecast_power_w: Optional[float] = Field(
|
load_mean: Optional[float] = Field(default=None, description="Predicted load mean value (W).")
|
||||||
default=None, json_schema_extra={"description": "Predicted load mean value (W)."}
|
load_std: Optional[float] = Field(
|
||||||
|
default=None, description="Predicted load standard deviation (W)."
|
||||||
|
)
|
||||||
|
load_mean_adjusted: Optional[float] = Field(
|
||||||
|
default=None, description="Predicted load mean value adjusted by load measurement (W)."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -26,23 +33,23 @@ class LoadProvider(PredictionProvider):
|
|||||||
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Configuration variables:
|
Configuration variables:
|
||||||
provider (str): Prediction provider for load.
|
load_provider (str): Prediction provider for load.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
hours (int, optional): The number of hours into the future for which predictions are generated.
|
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `hours`.
|
calculated based on `start_datetime` and `prediction_hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `historic_hours`.
|
based on `start_datetime` and `prediction_historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
records: List[LoadDataRecord] = Field(
|
records: List[LoadDataRecord] = Field(
|
||||||
default_factory=list, json_schema_extra={"description": "List of LoadDataRecord records"}
|
default_factory=list, description="List of LoadDataRecord records"
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -51,4 +58,4 @@ class LoadProvider(PredictionProvider):
|
|||||||
return "LoadProvider"
|
return "LoadProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.load.provider
|
return self.provider_id() == self.config.load_provider
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user