Compare commits
21 Commits
dl_dev-arc
...
NormannK-p
Author | SHA1 | Date | |
---|---|---|---|
|
eb0f49310c | ||
|
9c961d886c | ||
|
82d633c9b0 | ||
|
d86b4c089a | ||
|
2a5879c177 | ||
|
a7d58eed9a | ||
|
1020a46435 | ||
|
8258b1cca1 | ||
|
afbe50c388 | ||
|
c8cad0f277 | ||
|
694655311f | ||
|
1cd38d93ba | ||
|
7dfd50475a | ||
|
ab6a518b5f | ||
|
80bfe4d0f0 | ||
|
1a2cb4d37d | ||
|
d05b161e24 | ||
|
da4994ca39 | ||
|
94618f5f66 | ||
|
1bb74ed836 | ||
|
6743d8df4f |
4
.github/workflows/docker-build.yml
vendored
@@ -7,13 +7,11 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
- 'feature/config-overhaul'
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
- 'feature/config-overhaul'
|
||||
- '**'
|
||||
|
||||
env:
|
||||
DOCKERHUB_REPO: akkudoktor/eos
|
||||
|
@@ -12,12 +12,12 @@ repos:
|
||||
- id: check-merge-conflict
|
||||
exclude: '\.rst$' # Exclude .rst files
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.13.2
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.8
|
||||
rev: v0.9.6
|
||||
hooks:
|
||||
# Run the linter and fix simple issues automatically
|
||||
- id: ruff
|
||||
@@ -25,7 +25,7 @@ repos:
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: 'v1.13.0'
|
||||
rev: 'v1.15.0'
|
||||
hooks:
|
||||
- id: mypy
|
||||
additional_dependencies:
|
||||
@@ -33,3 +33,12 @@ repos:
|
||||
- "pandas-stubs==2.2.3.241009"
|
||||
- "numpy==2.1.3"
|
||||
pass_filenames: false
|
||||
- repo: https://github.com/jackdewinter/pymarkdown
|
||||
rev: main
|
||||
hooks:
|
||||
- id: pymarkdown
|
||||
files: ^docs/
|
||||
exclude: ^docs/_generated
|
||||
args:
|
||||
- --config=docs/pymarkdown.json
|
||||
- scan
|
||||
|
@@ -33,6 +33,7 @@ See also [README.md](README.md).
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r requirements-dev.txt
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
Install make to get access to helpful shortcuts (documentation generation, manual formatting, etc.).
|
||||
|
18
Makefile
@@ -19,8 +19,10 @@ help:
|
||||
@echo " read-docs - Read HTML documentation in your browser."
|
||||
@echo " gen-docs - Generate openapi.json and docs/_generated/*.""
|
||||
@echo " clean-docs - Remove generated documentation.""
|
||||
@echo " run - Run EOS production server in the virtual environment."
|
||||
@echo " run-dev - Run EOS development server in the virtual environment (automatically reloads)."
|
||||
@echo " run - Run EOS production server in virtual environment."
|
||||
@echo " run-dev - Run EOS development server in virtual environment (automatically reloads)."
|
||||
@echo " run-dash - Run EOSdash production server in virtual environment."
|
||||
@echo " run-dash-dev - Run EOSdash development server in virtual environment (automatically reloads)."
|
||||
@echo " dist - Create distribution (in dist/)."
|
||||
@echo " clean - Remove generated documentation, distribution and virtual environment."
|
||||
|
||||
@@ -85,11 +87,19 @@ clean: clean-docs
|
||||
|
||||
run:
|
||||
@echo "Starting EOS production server, please wait..."
|
||||
.venv/bin/python src/akkudoktoreos/server/eos.py
|
||||
.venv/bin/python -m akkudoktoreos.server.eos
|
||||
|
||||
run-dev:
|
||||
@echo "Starting EOS development server, please wait..."
|
||||
.venv/bin/python src/akkudoktoreos/server/eos.py --host localhost --port 8503 --reload true
|
||||
.venv/bin/python -m akkudoktoreos.server.eos --host localhost --port 8503 --reload true
|
||||
|
||||
run-dash:
|
||||
@echo "Starting EOSdash production server, please wait..."
|
||||
.venv/bin/python -m akkudoktoreos.server.eosdash
|
||||
|
||||
run-dash-dev:
|
||||
@echo "Starting EOSdash development server, please wait..."
|
||||
.venv/bin/python -m akkudoktoreos.server.eosdash --host localhost --port 8504 --reload true
|
||||
|
||||
# Target to setup tests.
|
||||
test-setup: pip-dev
|
||||
|
@@ -10,7 +10,7 @@ See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
|
||||
## Installation
|
||||
|
||||
The project requires Python 3.10 or newer. Official docker images can be found at [akkudoktor/eos](https://hub.docker.com/r/akkudoktor/eos).
|
||||
The project requires Python 3.11 or newer. Official docker images can be found at [akkudoktor/eos](https://hub.docker.com/r/akkudoktor/eos).
|
||||
|
||||
Following sections describe how to locally start the EOS server on `http://localhost:8503`.
|
||||
|
||||
@@ -23,13 +23,15 @@ Linux:
|
||||
```bash
|
||||
python -m venv .venv
|
||||
.venv/bin/pip install -r requirements.txt
|
||||
.venv/bin/pip install -e .
|
||||
```
|
||||
|
||||
Windows:
|
||||
|
||||
```cmd
|
||||
python -m venv .venv
|
||||
.venv\Scripts\pip install -r requirements.txt
|
||||
.venv\Scripts\pip install -r requirements.txt
|
||||
.venv\Scripts\pip install -e .
|
||||
```
|
||||
|
||||
Finally, start the EOS server:
|
||||
|
@@ -27,12 +27,10 @@ Validators:
|
||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||
| data_folder_path | `EOS_GENERAL__DATA_FOLDER_PATH` | `Optional[pathlib.Path]` | `rw` | `None` | Path to EOS data directory. |
|
||||
| data_output_subpath | `EOS_GENERAL__DATA_OUTPUT_SUBPATH` | `Optional[pathlib.Path]` | `rw` | `output` | Sub-path for the EOS output data directory. |
|
||||
| data_cache_subpath | `EOS_GENERAL__DATA_CACHE_SUBPATH` | `Optional[pathlib.Path]` | `rw` | `cache` | Sub-path for the EOS cache data directory. |
|
||||
| latitude | `EOS_GENERAL__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°) |
|
||||
| longitude | `EOS_GENERAL__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees, within -180 to 180 (°) |
|
||||
| timezone | | `Optional[str]` | `ro` | `N/A` | Compute timezone based on latitude and longitude. |
|
||||
| data_output_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Compute data_output_path based on data_folder_path. |
|
||||
| data_cache_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Compute data_cache_path based on data_folder_path. |
|
||||
| config_folder_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Path to EOS configuration directory. |
|
||||
| config_file_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Path to EOS configuration file. |
|
||||
:::
|
||||
@@ -46,7 +44,6 @@ Validators:
|
||||
"general": {
|
||||
"data_folder_path": null,
|
||||
"data_output_subpath": "output",
|
||||
"data_cache_subpath": "cache",
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405
|
||||
}
|
||||
@@ -62,18 +59,66 @@ Validators:
|
||||
"general": {
|
||||
"data_folder_path": null,
|
||||
"data_output_subpath": "output",
|
||||
"data_cache_subpath": "cache",
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
"timezone": "Europe/Berlin",
|
||||
"data_output_path": null,
|
||||
"data_cache_path": null,
|
||||
"config_folder_path": "/home/user/.config/net.akkudoktoreos.net",
|
||||
"config_file_path": "/home/user/.config/net.akkudoktoreos.net/EOS.config.json"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Cache Configuration
|
||||
|
||||
:::{table} cache
|
||||
:widths: 10 20 10 5 5 30
|
||||
:align: left
|
||||
|
||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||
| subpath | `EOS_CACHE__SUBPATH` | `Optional[pathlib.Path]` | `rw` | `cache` | Sub-path for the EOS cache data directory. |
|
||||
| cleanup_interval | `EOS_CACHE__CLEANUP_INTERVAL` | `float` | `rw` | `300` | Intervall in seconds for EOS file cache cleanup. |
|
||||
:::
|
||||
|
||||
### Example Input/Output
|
||||
|
||||
```{eval-rst}
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"cache": {
|
||||
"subpath": "cache",
|
||||
"cleanup_interval": 300.0
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Energy Management Configuration
|
||||
|
||||
:::{table} ems
|
||||
:widths: 10 20 10 5 5 30
|
||||
:align: left
|
||||
|
||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||
| startup_delay | `EOS_EMS__STARTUP_DELAY` | `float` | `rw` | `5` | Startup delay in seconds for EOS energy management runs. |
|
||||
| interval | `EOS_EMS__INTERVAL` | `Optional[float]` | `rw` | `None` | Intervall in seconds between EOS energy management runs. |
|
||||
:::
|
||||
|
||||
### Example Input/Output
|
||||
|
||||
```{eval-rst}
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ems": {
|
||||
"startup_delay": 5.0,
|
||||
"interval": 300.0
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Logging Configuration
|
||||
|
||||
:::{table} logging
|
||||
@@ -826,9 +871,6 @@ Validators:
|
||||
|
||||
## Server Configuration
|
||||
|
||||
Attributes:
|
||||
To be added
|
||||
|
||||
:::{table} server
|
||||
:widths: 10 20 10 5 5 30
|
||||
:align: left
|
||||
@@ -889,10 +931,17 @@ Attributes:
|
||||
"general": {
|
||||
"data_folder_path": null,
|
||||
"data_output_subpath": "output",
|
||||
"data_cache_subpath": "cache",
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405
|
||||
},
|
||||
"cache": {
|
||||
"subpath": "cache",
|
||||
"cleanup_interval": 300.0
|
||||
},
|
||||
"ems": {
|
||||
"startup_delay": 5.0,
|
||||
"interval": 300.0
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO"
|
||||
},
|
||||
|
@@ -166,6 +166,127 @@ Note:
|
||||
|
||||
---
|
||||
|
||||
## GET /v1/admin/cache
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_admin_cache_get_v1_admin_cache_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_admin_cache_get_v1_admin_cache_get)
|
||||
|
||||
Fastapi Admin Cache Get
|
||||
|
||||
```
|
||||
Current cache management data.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data.
|
||||
```
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
---
|
||||
|
||||
## POST /v1/admin/cache/clear
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_admin_cache_clear_post_v1_admin_cache_clear_post), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_admin_cache_clear_post_v1_admin_cache_clear_post)
|
||||
|
||||
Fastapi Admin Cache Clear Post
|
||||
|
||||
```
|
||||
Clear the cache from expired data.
|
||||
|
||||
Deletes expired cache files.
|
||||
|
||||
Args:
|
||||
clear_all (Optional[bool]): Delete all cached files. Default is False.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data after cleanup.
|
||||
```
|
||||
|
||||
**Parameters**:
|
||||
|
||||
- `clear_all` (query, optional): No description provided.
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
- **422**: Validation Error
|
||||
|
||||
---
|
||||
|
||||
## POST /v1/admin/cache/load
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_admin_cache_load_post_v1_admin_cache_load_post), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_admin_cache_load_post_v1_admin_cache_load_post)
|
||||
|
||||
Fastapi Admin Cache Load Post
|
||||
|
||||
```
|
||||
Load cache management data.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data that was loaded.
|
||||
```
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
---
|
||||
|
||||
## POST /v1/admin/cache/save
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_admin_cache_save_post_v1_admin_cache_save_post), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_admin_cache_save_post_v1_admin_cache_save_post)
|
||||
|
||||
Fastapi Admin Cache Save Post
|
||||
|
||||
```
|
||||
Save the current cache management data.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data that was saved.
|
||||
```
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
---
|
||||
|
||||
## POST /v1/admin/server/restart
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_admin_server_restart_post_v1_admin_server_restart_post), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_admin_server_restart_post_v1_admin_server_restart_post)
|
||||
|
||||
Fastapi Admin Server Restart Post
|
||||
|
||||
```
|
||||
Restart the server.
|
||||
|
||||
Restart EOS properly by starting a new instance before exiting the old one.
|
||||
```
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
---
|
||||
|
||||
## POST /v1/admin/server/shutdown
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_admin_server_shutdown_post_v1_admin_server_shutdown_post), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_admin_server_shutdown_post_v1_admin_server_shutdown_post)
|
||||
|
||||
Fastapi Admin Server Shutdown Post
|
||||
|
||||
```
|
||||
Shutdown the server.
|
||||
```
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
---
|
||||
|
||||
## GET /v1/config
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_get_v1_config_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_get_v1_config_get)
|
||||
@@ -238,11 +359,11 @@ Returns:
|
||||
|
||||
---
|
||||
|
||||
## PUT /v1/config/reset
|
||||
## POST /v1/config/reset
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_update_post_v1_config_reset_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_update_post_v1_config_reset_put)
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_reset_post_v1_config_reset_post), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_reset_post_v1_config_reset_post)
|
||||
|
||||
Fastapi Config Update Post
|
||||
Fastapi Config Reset Post
|
||||
|
||||
```
|
||||
Reset the configuration to the EOS configuration file.
|
||||
@@ -257,6 +378,86 @@ Returns:
|
||||
|
||||
---
|
||||
|
||||
## GET /v1/config/{path}
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_get_key_v1_config__path__get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_get_key_v1_config__path__get)
|
||||
|
||||
Fastapi Config Get Key
|
||||
|
||||
```
|
||||
Get the value of a nested key or index in the config model.
|
||||
|
||||
Args:
|
||||
path (str): The nested path to the key (e.g., "general/latitude" or "optimize/nested_list/0").
|
||||
|
||||
Returns:
|
||||
value (Any): The value of the selected nested key.
|
||||
```
|
||||
|
||||
**Parameters**:
|
||||
|
||||
- `path` (path, required): The nested path to the configuration key (e.g., general/latitude).
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
- **422**: Validation Error
|
||||
|
||||
---
|
||||
|
||||
## PUT /v1/config/{path}
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_put_key_v1_config__path__put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_put_key_v1_config__path__put)
|
||||
|
||||
Fastapi Config Put Key
|
||||
|
||||
```
|
||||
Update a nested key or index in the config model.
|
||||
|
||||
Args:
|
||||
path (str): The nested path to the key (e.g., "general/latitude" or "optimize/nested_list/0").
|
||||
value (Any): The new value to assign to the key or index at path.
|
||||
|
||||
Returns:
|
||||
configuration (ConfigEOS): The current configuration after the update.
|
||||
```
|
||||
|
||||
**Parameters**:
|
||||
|
||||
- `path` (path, required): The nested path to the configuration key (e.g., general/latitude).
|
||||
|
||||
**Request Body**:
|
||||
|
||||
- `application/json`: {
|
||||
"description": "The value to assign to the specified configuration path.",
|
||||
"title": "Value"
|
||||
}
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
- **422**: Validation Error
|
||||
|
||||
---
|
||||
|
||||
## GET /v1/health
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_health_get_v1_health_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_health_get_v1_health_get)
|
||||
|
||||
Fastapi Health Get
|
||||
|
||||
```
|
||||
Health check endpoint to verify that the EOS server is alive.
|
||||
```
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
---
|
||||
|
||||
## PUT /v1/measurement/data
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_measurement_data_put_v1_measurement_data_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_measurement_data_put_v1_measurement_data_put)
|
||||
@@ -473,6 +674,92 @@ Merge the measurement of given key and value into EOS measurements at given date
|
||||
|
||||
---
|
||||
|
||||
## GET /v1/prediction/dataframe
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_prediction_dataframe_get_v1_prediction_dataframe_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_prediction_dataframe_get_v1_prediction_dataframe_get)
|
||||
|
||||
Fastapi Prediction Dataframe Get
|
||||
|
||||
```
|
||||
Get prediction for given key within given date range as series.
|
||||
|
||||
Args:
|
||||
key (str): Prediction key
|
||||
start_datetime (Optional[str]): Starting datetime (inclusive).
|
||||
Defaults to start datetime of latest prediction.
|
||||
end_datetime (Optional[str]: Ending datetime (exclusive).
|
||||
|
||||
Defaults to end datetime of latest prediction.
|
||||
```
|
||||
|
||||
**Parameters**:
|
||||
|
||||
- `keys` (query, required): Prediction keys.
|
||||
|
||||
- `start_datetime` (query, optional): Starting datetime (inclusive).
|
||||
|
||||
- `end_datetime` (query, optional): Ending datetime (exclusive).
|
||||
|
||||
- `interval` (query, optional): Time duration for each interval. Defaults to 1 hour.
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
- **422**: Validation Error
|
||||
|
||||
---
|
||||
|
||||
## PUT /v1/prediction/import/{provider_id}
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_prediction_import_provider_v1_prediction_import__provider_id__put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_prediction_import_provider_v1_prediction_import__provider_id__put)
|
||||
|
||||
Fastapi Prediction Import Provider
|
||||
|
||||
```
|
||||
Import prediction for given provider ID.
|
||||
|
||||
Args:
|
||||
provider_id: ID of provider to update.
|
||||
data: Prediction data.
|
||||
force_enable: Update data even if provider is disabled.
|
||||
Defaults to False.
|
||||
```
|
||||
|
||||
**Parameters**:
|
||||
|
||||
- `provider_id` (path, required): Provider ID.
|
||||
|
||||
- `force_enable` (query, optional): No description provided.
|
||||
|
||||
**Request Body**:
|
||||
|
||||
- `application/json`: {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/PydanticDateTimeDataFrame"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/PydanticDateTimeData"
|
||||
},
|
||||
{
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Data"
|
||||
}
|
||||
|
||||
**Responses**:
|
||||
|
||||
- **200**: Successful Response
|
||||
|
||||
- **422**: Validation Error
|
||||
|
||||
---
|
||||
|
||||
## GET /v1/prediction/keys
|
||||
|
||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_prediction_keys_get_v1_prediction_keys_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_prediction_keys_get_v1_prediction_keys_get)
|
||||
@@ -516,7 +803,7 @@ Args:
|
||||
|
||||
- `end_datetime` (query, optional): Ending datetime (exclusive).
|
||||
|
||||
- `interval` (query, optional): Time duration for each interval.
|
||||
- `interval` (query, optional): Time duration for each interval. Defaults to 1 hour.
|
||||
|
||||
**Responses**:
|
||||
|
||||
|
1
docs/_static/optimization_timeframes-excalidraw.json
vendored
Normal file
BIN
docs/_static/optimization_timeframes.png
vendored
Normal file
After Width: | Height: | Size: 664 KiB |
@@ -20,17 +20,22 @@ EOS Architecture
|
||||
|
||||
### Configuration
|
||||
|
||||
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy management.
|
||||
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy
|
||||
management.
|
||||
|
||||
### Energy Management
|
||||
|
||||
Energy management is the overall process to provide planning data for scheduling the different devices in your system in an optimal way. Energy management cares for the update of predictions and the optimization of the planning based on the simulated behavior of the devices. The planning is on the hour. Sub-hour energy management is left
|
||||
Energy management is the overall process to provide planning data for scheduling the different
|
||||
devices in your system in an optimal way. Energy management cares for the update of predictions and
|
||||
the optimization of the planning based on the simulated behavior of the devices. The planning is on
|
||||
the hour. Sub-hour energy management is left
|
||||
|
||||
### Optimization
|
||||
|
||||
### Device Simulations
|
||||
|
||||
Device simulations simulate devices' behavior based on internal logic and predicted data. They provide the data needed for optimization.
|
||||
Device simulations simulate devices' behavior based on internal logic and predicted data. They
|
||||
provide the data needed for optimization.
|
||||
|
||||
### Predictions
|
||||
|
||||
@@ -38,7 +43,8 @@ Predictions provide predicted future data to be used by the optimization.
|
||||
|
||||
### Measurements
|
||||
|
||||
Measurements are utilized to refine predictions using real data from your system, thereby enhancing accuracy.
|
||||
Measurements are utilized to refine predictions using real data from your system, thereby enhancing
|
||||
accuracy.
|
||||
|
||||
### EOS Server
|
||||
|
||||
|
@@ -31,10 +31,10 @@ Use endpoint `POST /v1/config/reset` to reset the configuration to the values in
|
||||
|
||||
The configuration sources and their priorities are as follows:
|
||||
|
||||
1. **Runtime Config Updates**: Provided during runtime by the REST interface
|
||||
2. **Environment Variables**: Defined at startup of the REST server and during runtime
|
||||
3. **EOS Configuration File**: Read at startup of the REST server and on request
|
||||
4. **Default Values**
|
||||
1. `Settings`: Provided during runtime by the REST interface
|
||||
2. `Environment Variables`: Defined at startup of the REST server and during runtime
|
||||
3. `EOS Configuration File`: Read at startup of the REST server and on request
|
||||
4. `Default Values`
|
||||
|
||||
### Runtime Config Updates
|
||||
|
||||
|
@@ -17,18 +17,17 @@ APIs, and online services in creative and practical ways.
|
||||
|
||||
Andreas Schmitz uses [Node-RED](https://nodered.org/) as part of his home automation setup.
|
||||
|
||||
### Resources
|
||||
### Node-Red Resources
|
||||
|
||||
- [Installation Guide (German)](https://meintechblog.de/2024/09/05/andreas-schmitz-joerg-installiert-mein-energieoptimierungssystem/) — A detailed guide on integrating an early version of EOS with
|
||||
`Node-RED`.
|
||||
- [Installation Guide (German)](https://meintechblog.de/2024/09/05/andreas-schmitz-joerg-installiert-mein-energieoptimierungssystem/)
|
||||
\— A detailed guide on integrating an early version of EOS with `Node-RED`.
|
||||
|
||||
## Home Assistant
|
||||
|
||||
[Home Assistant](https://www.home-assistant.io/) is an open-source home automation platform that
|
||||
emphasizes local control and user privacy.
|
||||
|
||||
### Resources
|
||||
### Home Assistant Resources
|
||||
|
||||
- Duetting's [EOS Home Assistant Addon](https://github.com/Duetting/ha_eos_addon) — Additional
|
||||
details can be found in this
|
||||
[discussion thread](https://github.com/Akkudoktor-EOS/EOS/discussions/294).
|
||||
details can be found in this [discussion thread](https://github.com/Akkudoktor-EOS/EOS/discussions/294).
|
||||
|
@@ -5,9 +5,9 @@
|
||||
Measurements are utilized to refine predictions using real data from your system, thereby enhancing
|
||||
accuracy.
|
||||
|
||||
- **Household Load Measurement**
|
||||
- **Grid Export Measurement**
|
||||
- **Grid Import Measurement**
|
||||
- Household Load Measurement
|
||||
- Grid Export Measurement
|
||||
- Grid Import Measurement
|
||||
|
||||
## Storing Measurements
|
||||
|
||||
|
@@ -2,7 +2,199 @@
|
||||
|
||||
# Optimization
|
||||
|
||||
:::{admonition} Todo
|
||||
:class: note
|
||||
Describe optimization.
|
||||
:::
|
||||
## Introduction
|
||||
|
||||
The `POST /optimize` API endpoint optimizes your energy management system based on various inputs
|
||||
including electricity prices, battery storage capacity, PV forecast, and temperature data.
|
||||
|
||||
## Input Payload
|
||||
|
||||
### Sample Request
|
||||
|
||||
```json
|
||||
{
|
||||
"ems": {
|
||||
"preis_euro_pro_wh_akku": 0.0007,
|
||||
"einspeiseverguetung_euro_pro_wh": 0.00007,
|
||||
"gesamtlast": [500, 500, ..., 500, 500],
|
||||
"pv_prognose_wh": [300, 0, 0, ..., 2160, 1840],
|
||||
"strompreis_euro_pro_wh": [0.0003784, 0.0003868, ..., 0.00034102, 0.00033709]
|
||||
},
|
||||
"pv_akku": {
|
||||
"capacity_wh": 12000,
|
||||
"charging_efficiency": 0.92,
|
||||
"discharging_efficiency": 0.92,
|
||||
"max_charge_power_w": 5700,
|
||||
"initial_soc_percentage": 66,
|
||||
"min_soc_percentage": 5,
|
||||
"max_soc_percentage": 100
|
||||
},
|
||||
"inverter": {
|
||||
"max_power_wh": 15500
|
||||
},
|
||||
"eauto": {
|
||||
"capacity_wh": 64000,
|
||||
"charging_efficiency": 0.88,
|
||||
"discharging_efficiency": 0.88,
|
||||
"max_charge_power_w": 11040,
|
||||
"initial_soc_percentage": 98,
|
||||
"min_soc_percentage": 60,
|
||||
"max_soc_percentage": 100
|
||||
},
|
||||
"temperature_forecast": [18.3, 18, ..., 20.16, 19.84],
|
||||
"start_solution": null
|
||||
}
|
||||
```
|
||||
|
||||
## Input Parameters
|
||||
|
||||
### Energy Management System (EMS)
|
||||
|
||||
#### Battery Cost (`preis_euro_pro_wh_akku`)
|
||||
|
||||
- Unit: €/Wh
|
||||
- Purpose: Represents the residual value of energy stored in the battery
|
||||
- Impact: Lower values encourage battery depletion, higher values preserve charge at the end of the simulation.
|
||||
|
||||
#### Feed-in Tariff (`einspeiseverguetung_euro_pro_wh`)
|
||||
|
||||
- Unit: €/Wh
|
||||
- Purpose: Compensation received for feeding excess energy back to the grid
|
||||
|
||||
#### Total Load Forecast (`gesamtlast`)
|
||||
|
||||
- Unit: W
|
||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
||||
- Format: Array of hourly values
|
||||
- Note: Exclude optimizable loads (EV charging, battery charging, etc.)
|
||||
|
||||
##### Data Sources
|
||||
|
||||
1. Standard Load Profile: `GET /v1/prediction/list?key=load_mean` for a standard load profile based
|
||||
on your yearly consumption.
|
||||
2. Adjusted Load Profile: `GET /v1/prediction/list?key=load_mean_adjusted` for a combination of a
|
||||
standard load profile based on your yearly consumption incl. data from last 48h.
|
||||
|
||||
#### PV Generation Forecast (`pv_prognose_wh`)
|
||||
|
||||
- Unit: W
|
||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
||||
- Format: Array of hourly values
|
||||
- Data Source: `GET /v1/prediction/series?key=pvforecast_ac_power`
|
||||
|
||||
#### Electricity Price Forecast (`strompreis_euro_pro_wh`)
|
||||
|
||||
- Unit: €/Wh
|
||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
||||
- Format: Array of hourly values
|
||||
- Data Source: `GET /v1/prediction/list?key=elecprice_marketprice_wh`
|
||||
|
||||
Verify prices against your local tariffs.
|
||||
|
||||
### Battery Storage System
|
||||
|
||||
#### Configuration
|
||||
|
||||
- `capacity_wh`: Total battery capacity in Wh
|
||||
- `charging_efficiency`: Charging efficiency (0-1)
|
||||
- `discharging_efficiency`: Discharging efficiency (0-1)
|
||||
- `max_charge_power_w`: Maximum charging power in W
|
||||
|
||||
#### State of Charge (SoC)
|
||||
|
||||
- `initial_soc_percentage`: Current battery level (%)
|
||||
- `min_soc_percentage`: Minimum allowed SoC (%)
|
||||
- `max_soc_percentage`: Maximum allowed SoC (%)
|
||||
|
||||
### Inverter
|
||||
|
||||
- `max_power_wh`: Maximum inverter power in Wh
|
||||
|
||||
### Electric Vehicle (EV)
|
||||
|
||||
- `capacity_wh`: Battery capacity in Wh
|
||||
- `charging_efficiency`: Charging efficiency (0-1)
|
||||
- `discharging_efficiency`: Discharging efficiency (0-1)
|
||||
- `max_charge_power_w`: Maximum charging power in W
|
||||
- `initial_soc_percentage`: Current charge level (%)
|
||||
- `min_soc_percentage`: Minimum allowed SoC (%)
|
||||
- `max_soc_percentage`: Maximum allowed SoC (%)
|
||||
|
||||
### Temperature Forecast
|
||||
|
||||
- Unit: °C
|
||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
||||
- Format: Array of hourly values
|
||||
- Data Source: `GET /v1/prediction/list?key=weather_temp_air`
|
||||
|
||||
## Output Format
|
||||
|
||||
### Sample Response
|
||||
|
||||
```json
|
||||
{
|
||||
"ac_charge": [0.625, 0, ..., 0.75, 0],
|
||||
"dc_charge": [1, 1, ..., 1, 1],
|
||||
"discharge_allowed": [0, 0, 1, ..., 0, 0],
|
||||
"eautocharge_hours_float": [0.625, 0, ..., 0.75, 0],
|
||||
"result": {
|
||||
"Last_Wh_pro_Stunde": [...],
|
||||
"EAuto_SoC_pro_Stunde": [...],
|
||||
"Einnahmen_Euro_pro_Stunde": [...],
|
||||
"Gesamt_Verluste": 1514.96,
|
||||
"Gesamtbilanz_Euro": 2.51,
|
||||
"Gesamteinnahmen_Euro": 2.88,
|
||||
"Gesamtkosten_Euro": 5.39,
|
||||
"akku_soc_pro_stunde": [...]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Output Parameters
|
||||
|
||||
#### Battery Control
|
||||
|
||||
- `ac_charge`: Grid charging schedule (0-1)
|
||||
- `dc_charge`: DC charging schedule (0-1)
|
||||
- `discharge_allowed`: Discharge permission (0 or 1)
|
||||
|
||||
0 (no charge)
|
||||
1 (charge with full load)
|
||||
|
||||
`ac_charge` multiplied by the maximum charge power of the battery results in the planned charging power.
|
||||
|
||||
#### EV Charging
|
||||
|
||||
- `eautocharge_hours_float`: EV charging schedule (0-1)
|
||||
|
||||
#### Results
|
||||
|
||||
The `result` object contains detailed information about the optimization outcome.
|
||||
The length of the array is between 25 and 48 and starts at the current hour and ends at 23:00 tomorrow.
|
||||
|
||||
- `Last_Wh_pro_Stunde`: Array of hourly load values in Wh
|
||||
- Shows the total energy consumption per hour
|
||||
- Includes household load, battery charging/discharging, and EV charging
|
||||
|
||||
- `EAuto_SoC_pro_Stunde`: Array of hourly EV state of charge values (%)
|
||||
- Shows the projected EV battery level throughout the optimization period
|
||||
|
||||
- `Einnahmen_Euro_pro_Stunde`: Array of hourly revenue values in Euro
|
||||
|
||||
- `Gesamt_Verluste`: Total energy losses in Wh
|
||||
|
||||
- `Gesamtbilanz_Euro`: Overall financial balance in Euro
|
||||
|
||||
- `Gesamteinnahmen_Euro`: Total revenue in Euro
|
||||
|
||||
- `Gesamtkosten_Euro`: Total costs in Euro
|
||||
|
||||
- `akku_soc_pro_stunde`: Array of hourly battery state of charge values (%)
|
||||
|
||||
## Timeframe overview
|
||||
|
||||
```{figure} ../_static/optimization_timeframes.png
|
||||
:alt: Timeframe Overview
|
||||
|
||||
Timeframe Overview
|
||||
```
|
||||
|
@@ -5,10 +5,10 @@
|
||||
Predictions, along with simulations and measurements, form the foundation upon which energy
|
||||
optimization is executed. In EOS, a standard set of predictions is managed, including:
|
||||
|
||||
- **Household Load Prediction**
|
||||
- **Electricity Price Prediction**
|
||||
- **PV Power Prediction**
|
||||
- **Weather Prediction**
|
||||
- Household Load Prediction
|
||||
- Electricity Price Prediction
|
||||
- PV Power Prediction
|
||||
- Weather Prediction
|
||||
|
||||
## Storing Predictions
|
||||
|
||||
@@ -60,13 +60,15 @@ A dictionary with the following structure:
|
||||
#### 2. DateTimeDataFrame
|
||||
|
||||
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) dataframe with a
|
||||
`DatetimeIndex`. Use [pandas.DataFrame.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_json.html#pandas.DataFrame.to_json).
|
||||
`DatetimeIndex`. Use
|
||||
[pandas.DataFrame.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_json.html#pandas.DataFrame.to_json).
|
||||
The column name of the data must be the same as the names of the `prediction key`s.
|
||||
|
||||
#### 3. DateTimeSeries
|
||||
|
||||
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) series with a
|
||||
`DatetimeIndex`. Use [pandas.Series.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.Series.to_json.html#pandas.Series.to_json).
|
||||
`DatetimeIndex`. Use
|
||||
[pandas.Series.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.Series.to_json.html#pandas.Series.to_json).
|
||||
|
||||
## Adjusted Predictions
|
||||
|
||||
@@ -141,9 +143,12 @@ The prediction key for the electricity price forecast data is:
|
||||
- `elecprice_marketprice_wh`: Electricity market price per Wh (€/Wh).
|
||||
|
||||
The electricity proce forecast data must be provided in one of the formats described in
|
||||
<project:#prediction-import-providers>. The data source must be given in the
|
||||
<project:#prediction-import-providers>. The data source can be given in the
|
||||
`import_file_path` or `import_json` configuration option.
|
||||
|
||||
The data may additionally or solely be provided by the
|
||||
**PUT** `/v1/prediction/import/ElecPriceImport` endpoint.
|
||||
|
||||
## Load Prediction
|
||||
|
||||
Prediction keys:
|
||||
@@ -184,9 +189,12 @@ The prediction keys for the load forecast data are:
|
||||
- `load_mean_adjusted`: Predicted load mean value adjusted by load measurement (W).
|
||||
|
||||
The load forecast data must be provided in one of the formats described in
|
||||
<project:#prediction-import-providers>. The data source must be given in the `loadimport_file_path`
|
||||
<project:#prediction-import-providers>. The data source can be given in the `loadimport_file_path`
|
||||
or `loadimport_json` configuration option.
|
||||
|
||||
The data may additionally or solely be provided by the
|
||||
**PUT** `/v1/prediction/import/LoadImport` endpoint.
|
||||
|
||||
## PV Power Prediction
|
||||
|
||||
Prediction keys:
|
||||
@@ -209,13 +217,21 @@ Configuration options:
|
||||
- `PVForecastImport`: Imports from a file or JSON string.
|
||||
|
||||
- `planes[].surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
||||
- `planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
||||
- `planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
||||
Clockwise from north (north=0, east=90, south=180, west=270).
|
||||
- `planes[].userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
||||
- `planes[].peakpower`: Nominal power of PV system in kW.
|
||||
- `planes[].pvtechchoice`: PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
||||
- `planes[].mountingplace`: Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
||||
- `planes[].mountingplace`: Type of mounting for PV system.
|
||||
Options are 'free' for free-standing and 'building' for building-integrated.
|
||||
- `planes[].loss`: Sum of PV system losses in percent
|
||||
- `planes[].trackingtype`: Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
||||
- `planes[].trackingtype`: Type of suntracking.
|
||||
0=fixed,
|
||||
1=single horizontal axis aligned north-south,
|
||||
2=two-axis tracking,
|
||||
3=vertical axis tracking,
|
||||
4=single horizontal axis aligned east-west,
|
||||
5=single inclined axis aligned north-south.
|
||||
- `planes[].optimal_surface_tilt`: Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
||||
- `planes[].optimalangles`: Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
||||
- `planes[].albedo`: Proportion of the light hitting the ground that it reflects back.
|
||||
@@ -229,37 +245,71 @@ Configuration options:
|
||||
|
||||
------
|
||||
|
||||
Some of the planes configuration options directly follow the [PVGIS](https://joint-research-centre.ec.europa.eu/photovoltaic-geographical-information-system-pvgis/getting-started-pvgis/pvgis-user-manual_en) nomenclature.
|
||||
|
||||
Detailed definitions taken from **PVGIS**:
|
||||
Detailed definitions taken from
|
||||
[PVGIS](https://joint-research-centre.ec.europa.eu/photovoltaic-geographical-information-system-pvgis/getting-started-pvgis/pvgis-user-manual_en).
|
||||
|
||||
- `pvtechchoice`
|
||||
|
||||
The performance of PV modules depends on the temperature and on the solar irradiance, but the exact dependence varies between different types of PV modules. At the moment we can estimate the losses due to temperature and irradiance effects for the following types of modules: crystalline silicon cells; thin film modules made from CIS or CIGS and thin film modules made from Cadmium Telluride (CdTe).
|
||||
The performance of PV modules depends on the temperature and on the solar irradiance, but the exact
|
||||
dependence varies between different types of PV modules. At the moment we can estimate the losses
|
||||
due to temperature and irradiance effects for the following types of modules: crystalline silicon
|
||||
cells; thin film modules made from CIS or CIGS and thin film modules made from Cadmium Telluride
|
||||
(CdTe).
|
||||
|
||||
For other technologies (especially various amorphous technologies), this correction cannot be calculated here. If you choose one of the first three options here the calculation of performance will take into account the temperature dependence of the performance of the chosen technology. If you choose the other option (other/unknown), the calculation will assume a loss of 8% of power due to temperature effects (a generic value which has found to be reasonable for temperate climates).
|
||||
For other technologies (especially various amorphous technologies), this correction cannot be
|
||||
calculated here. If you choose one of the first three options here the calculation of performance
|
||||
will take into account the temperature dependence of the performance of the chosen technology. If
|
||||
you choose the other option (other/unknown), the calculation will assume a loss of 8% of power due
|
||||
to temperature effects (a generic value which has found to be reasonable for temperate climates).
|
||||
|
||||
PV power output also depends on the spectrum of the solar radiation. PVGIS can calculate how the variations of the spectrum of sunlight affects the overall energy production from a PV system. At the moment this calculation can be done for crystalline silicon and CdTe modules. Note that this calculation is not yet available when using the NSRDB solar radiation database.
|
||||
PV power output also depends on the spectrum of the solar radiation. PVGIS can calculate how the
|
||||
variations of the spectrum of sunlight affects the overall energy production from a PV system. At
|
||||
the moment this calculation can be done for crystalline silicon and CdTe modules. Note that this
|
||||
calculation is not yet available when using the NSRDB solar radiation database.
|
||||
|
||||
- `peakpower`
|
||||
|
||||
This is the power that the manufacturer declares that the PV array can produce under standard test conditions (STC), which are a constant 1000W of solar irradiation per square meter in the plane of the array, at an array temperature of 25°C. The peak power should be entered in kilowatt-peak (kWp). If you do not know the declared peak power of your modules but instead know the area of the modules and the declared conversion efficiency (in percent), you can calculate the peak power as power = area * efficiency / 100.
|
||||
This is the power that the manufacturer declares that the PV array can produce under standard test
|
||||
conditions (STC), which are a constant 1000W of solar irradiation per square meter in the plane of
|
||||
the array, at an array temperature of 25°C. The peak power should be entered in kilowatt-peak (kWp).
|
||||
If you do not know the declared peak power of your modules but instead know the area of the modules
|
||||
and the declared conversion efficiency (in percent), you can calculate the peak power as
|
||||
power = area * efficiency / 100.
|
||||
|
||||
Bifacial modules: PVGIS doesn't make specific calculations for bifacial modules at present. Users who wish to explore the possible benefits of this technology can input the power value for Bifacial Nameplate Irradiance. This can also be can also be estimated from the front side peak power P_STC value and the bifaciality factor, φ (if reported in the module data sheet) as: P_BNPI = P_STC * (1 + φ * 0.135). NB this bifacial approach is not appropriate for BAPV or BIPV installations or for modules mounting on a N-S axis i.e. facing E-W.
|
||||
Bifacial modules: PVGIS doesn't make specific calculations for bifacial modules at present. Users
|
||||
who wish to explore the possible benefits of this technology can input the power value for Bifacial
|
||||
Nameplate Irradiance. This can also be can also be estimated from the front side peak power P_STC
|
||||
value and the bifaciality factor, φ (if reported in the module data sheet) as:
|
||||
P_BNPI = P_STC \* (1 + φ \* 0.135). NB this bifacial approach is not appropriate for BAPV or BIPV
|
||||
installations or for modules mounting on a N-S axis i.e. facing E-W.
|
||||
|
||||
- `loss`
|
||||
|
||||
The estimated system losses are all the losses in the system, which cause the power actually delivered to the electricity grid to be lower than the power produced by the PV modules. There are several causes for this loss, such as losses in cables, power inverters, dirt (sometimes snow) on the modules and so on. Over the years the modules also tend to lose a bit of their power, so the average yearly output over the lifetime of the system will be a few percent lower than the output in the first years.
|
||||
The estimated system losses are all the losses in the system, which cause the power actually
|
||||
delivered to the electricity grid to be lower than the power produced by the PV modules. There are
|
||||
several causes for this loss, such as losses in cables, power inverters, dirt (sometimes snow) on
|
||||
the modules and so on. Over the years the modules also tend to lose a bit of their power, so the
|
||||
average yearly output over the lifetime of the system will be a few percent lower than the output
|
||||
in the first years.
|
||||
|
||||
We have given a default value of 14% for the overall losses. If you have a good idea that your value will be different (maybe due to a really high-efficiency inverter) you may reduce this value a little.
|
||||
We have given a default value of 14% for the overall losses. If you have a good idea that your value
|
||||
will be different (maybe due to a really high-efficiency inverter) you may reduce this value a little.
|
||||
|
||||
- `mountingplace`
|
||||
|
||||
For fixed (non-tracking) systems, the way the modules are mounted will have an influence on the temperature of the module, which in turn affects the efficiency. Experiments have shown that if the movement of air behind the modules is restricted, the modules can get considerably hotter (up to 15°C at 1000W/m2 of sunlight).
|
||||
For fixed (non-tracking) systems, the way the modules are mounted will have an influence on the
|
||||
temperature of the module, which in turn affects the efficiency. Experiments have shown that if the
|
||||
movement of air behind the modules is restricted, the modules can get considerably hotter
|
||||
(up to 15°C at 1000W/m2 of sunlight).
|
||||
|
||||
In PVGIS there are two possibilities: free-standing, meaning that the modules are mounted on a rack with air flowing freely behind the modules; and building- integrated, which means that the modules are completely built into the structure of the wall or roof of a building, with no air movement behind the modules.
|
||||
In PVGIS there are two possibilities: free-standing, meaning that the modules are mounted on a rack
|
||||
with air flowing freely behind the modules; and building- integrated, which means that the modules
|
||||
are completely built into the structure of the wall or roof of a building, with no air movement
|
||||
behind the modules.
|
||||
|
||||
Some types of mounting are in between these two extremes, for instance if the modules are mounted on a roof with curved roof tiles, allowing air to move behind the modules. In such cases, the performance will be somewhere between the results of the two calculations that are possible here.
|
||||
Some types of mounting are in between these two extremes, for instance if the modules are mounted on
|
||||
a roof with curved roof tiles, allowing air to move behind the modules. In such cases, the
|
||||
performance will be somewhere between the results of the two calculations that are possible here.
|
||||
|
||||
- `userhorizon`
|
||||
|
||||
@@ -273,7 +323,8 @@ degrees west of north.
|
||||
|
||||
------
|
||||
|
||||
Most of the planes configuration options are in line with the [PVLib](https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html) definition for PVGIS data.
|
||||
Most of the configuration options are in line with the
|
||||
[PVLib](https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html) definition for PVGIS data.
|
||||
|
||||
Detailed definitions from **PVLib** for PVGIS data.
|
||||
|
||||
@@ -301,7 +352,8 @@ The following prediction configuration options of the PV system must be set:
|
||||
For each plane of the PV system the following configuration options must be set:
|
||||
|
||||
- `pvforecast.planes[].surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
||||
- `pvforecast.planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
||||
- `pvforecast.planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
||||
Clockwise from north (north=0, east=90, south=180, west=270).
|
||||
- `pvforecast.planes[].userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
||||
- `pvforecast.planes[].inverter_paco`: AC power rating of the inverter. [W]
|
||||
- `pvforecast.planes[].peakpower`: Nominal power of PV system in kW.
|
||||
@@ -362,9 +414,12 @@ The prediction keys for the PV forecast data are:
|
||||
- `pvforecast_dc_power`: Total AC power (W).
|
||||
|
||||
The PV forecast data must be provided in one of the formats described in
|
||||
<project:#prediction-import-providers>. The data source must be given in the
|
||||
<project:#prediction-import-providers>. The data source can be given in the
|
||||
`import_file_path` or `import_json` configuration option.
|
||||
|
||||
The data may additionally or solely be provided by the
|
||||
**PUT** `/v1/prediction/import/PVForecastImport` endpoint.
|
||||
|
||||
## Weather Prediction
|
||||
|
||||
Prediction keys:
|
||||
@@ -398,8 +453,8 @@ Configuration options:
|
||||
|
||||
- `provider`: Load provider id of provider to be used.
|
||||
|
||||
- `BrightSky`: Retrieves from https://api.brightsky.dev.
|
||||
- `ClearOutside`: Retrieves from https://clearoutside.com/forecast.
|
||||
- `BrightSky`: Retrieves from [BrightSky](https://api.brightsky.dev).
|
||||
- `ClearOutside`: Retrieves from [ClearOutside](https://clearoutside.com/forecast).
|
||||
- `LoadImport`: Imports from a file or JSON string.
|
||||
|
||||
- `provider_settings.import_file_path`: Path to the file to import weatherforecast data from.
|
||||
@@ -460,7 +515,7 @@ The `WeatherImport` provider is designed to import weather forecast data from a
|
||||
string. An external entity should update the file or JSON string whenever new prediction data
|
||||
becomes available.
|
||||
|
||||
The prediction keys for the PV forecast data are:
|
||||
The prediction keys for the weather forecast data are:
|
||||
|
||||
- `weather_dew_point`: Dew Point (°C)
|
||||
- `weather_dhi`: Diffuse Horizontal Irradiance (W/m2)
|
||||
@@ -486,5 +541,8 @@ The prediction keys for the PV forecast data are:
|
||||
- `weather_wind_speed`: Wind Speed (kmph)
|
||||
|
||||
The PV forecast data must be provided in one of the formats described in
|
||||
<project:#prediction-import-providers>. The data source must be given in the
|
||||
<project:#prediction-import-providers>. The data source can be given in the
|
||||
`import_file_path` or `import_json` configuration option.
|
||||
|
||||
The data may additionally or solely be provided by the
|
||||
**PUT** `/v1/prediction/import/WeatherImport` endpoint.
|
||||
|
@@ -19,6 +19,7 @@ Install the dependencies in a virtual environment:
|
||||
|
||||
python -m venv .venv
|
||||
.venv\Scripts\pip install -r requirements.txt
|
||||
.venv\Scripts\pip install -e .
|
||||
|
||||
.. tab:: Linux
|
||||
|
||||
@@ -26,6 +27,7 @@ Install the dependencies in a virtual environment:
|
||||
|
||||
python -m venv .venv
|
||||
.venv/bin/pip install -r requirements.txt
|
||||
.venv/bin/pip install -e .
|
||||
|
||||
```
|
||||
|
||||
@@ -73,37 +75,53 @@ This project uses the `EOS.config.json` file to manage configuration settings.
|
||||
|
||||
### Default Configuration
|
||||
|
||||
A default configuration file `default.config.json` is provided. This file contains all the necessary configuration keys with their default values.
|
||||
A default configuration file `default.config.json` is provided. This file contains all the necessary
|
||||
configuration keys with their default values.
|
||||
|
||||
### Custom Configuration
|
||||
|
||||
Users can specify a custom configuration directory by setting the environment variable `EOS_DIR`.
|
||||
|
||||
- If the directory specified by `EOS_DIR` contains an existing `EOS.config.json` file, the application will use this configuration file.
|
||||
- If the `EOS.config.json` file does not exist in the specified directory, the `default.config.json` file will be copied to the directory as `EOS.config.json`.
|
||||
- If the directory specified by `EOS_DIR` contains an existing `EOS.config.json` file, the
|
||||
application will use this configuration file.
|
||||
- If the `EOS.config.json` file does not exist in the specified directory, the `default.config.json`
|
||||
file will be copied to the directory as `EOS.config.json`.
|
||||
|
||||
### Configuration Updates
|
||||
|
||||
If the configuration keys in the `EOS.config.json` file are missing or different from those in `default.config.json`, they will be automatically updated to match the default settings, ensuring that all required keys are present.
|
||||
If the configuration keys in the `EOS.config.json` file are missing or different from those in
|
||||
`default.config.json`, they will be automatically updated to match the default settings, ensuring
|
||||
that all required keys are present.
|
||||
|
||||
## Classes and Functionalities
|
||||
|
||||
This project uses various classes to simulate and optimize the components of an energy system. Each class represents a specific aspect of the system, as described below:
|
||||
This project uses various classes to simulate and optimize the components of an energy system. Each
|
||||
class represents a specific aspect of the system, as described below:
|
||||
|
||||
- `Battery`: Simulates a battery storage system, including capacity, state of charge, and now charge and discharge losses.
|
||||
- `Battery`: Simulates a battery storage system, including capacity, state of charge, and now
|
||||
charge and discharge losses.
|
||||
|
||||
- `PVForecast`: Provides forecast data for photovoltaic generation, based on weather data and historical generation data.
|
||||
- `PVForecast`: Provides forecast data for photovoltaic generation, based on weather data and
|
||||
historical generation data.
|
||||
|
||||
- `Load`: Models the load requirements of a household or business, enabling the prediction of future energy demand.
|
||||
- `Load`: Models the load requirements of a household or business, enabling the prediction of future
|
||||
energy demand.
|
||||
|
||||
- `Heatpump`: Simulates a heat pump, including its energy consumption and efficiency under various operating conditions.
|
||||
- `Heatpump`: Simulates a heat pump, including its energy consumption and efficiency under various
|
||||
operating conditions.
|
||||
|
||||
- `Strompreis`: Provides information on electricity prices, enabling optimization of energy consumption and generation based on tariff information.
|
||||
- `Strompreis`: Provides information on electricity prices, enabling optimization of energy
|
||||
consumption and generation based on tariff information.
|
||||
|
||||
- `EMS`: The Energy Management System (EMS) coordinates the interaction between the various components, performs optimization, and simulates the operation of the entire energy system.
|
||||
- `EMS`: The Energy Management System (EMS) coordinates the interaction between the various
|
||||
components, performs optimization, and simulates the operation of the entire energy system.
|
||||
|
||||
These classes work together to enable a detailed simulation and optimization of the energy system. For each class, specific parameters and settings can be adjusted to test different scenarios and strategies.
|
||||
These classes work together to enable a detailed simulation and optimization of the energy system.
|
||||
For each class, specific parameters and settings can be adjusted to test different scenarios and
|
||||
strategies.
|
||||
|
||||
### Customization and Extension
|
||||
|
||||
Each class is designed to be easily customized and extended to integrate additional functions or improvements. For example, new methods can be added for more accurate modeling of PV system or battery behavior. Developers are invited to modify and extend the system according to their needs.
|
||||
Each class is designed to be easily customized and extended to integrate additional functions or
|
||||
improvements. For example, new methods can be added for more accurate modeling of PV system or
|
||||
battery behavior. Developers are invited to modify and extend the system according to their needs.
|
||||
|
@@ -24,7 +24,7 @@ akkudoktoreos/serverapi.md
|
||||
akkudoktoreos/api.rst
|
||||
```
|
||||
|
||||
# Indices and tables
|
||||
## Indices and tables
|
||||
|
||||
- {ref}`genindex`
|
||||
- {ref}`modindex`
|
||||
|
20
docs/pymarkdown.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"plugins": {
|
||||
"md007": {
|
||||
"enabled": true,
|
||||
"code_block_line_length" : 160
|
||||
},
|
||||
"md013": {
|
||||
"enabled": true,
|
||||
"line_length" : 120
|
||||
},
|
||||
"md041": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"extensions": {
|
||||
"front-matter" : {
|
||||
"enabled" : true
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,12 +1,12 @@
|
||||
% SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# Welcome to the EOS documentation!
|
||||
# Welcome to the EOS documentation
|
||||
|
||||
This documentation is continuously written. It is edited via text files in the
|
||||
[Markdown/ Markedly Structured Text](https://myst-parser.readthedocs.io/en/latest/index.html)
|
||||
markup language and then compiled into a static website/ offline document using the open source tool
|
||||
[Sphinx](https://www.sphinx-doc.org) and will someday land on
|
||||
[Read the Docs](https://akkudoktoreos.readthedocs.io/en/latest/index.html).
|
||||
[Sphinx](https://www.sphinx-doc.org) and is available on
|
||||
[Read the Docs](https://akkudoktor-eos.readthedocs.io/en/latest/).
|
||||
|
||||
You can contribute to EOS's documentation by opening
|
||||
[GitHub issues](https://github.com/Akkudoktor-EOS/EOS/issues)
|
||||
|
1037
openapi.json
@@ -7,7 +7,7 @@ authors = [
|
||||
description = "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period."
|
||||
readme = "README.md"
|
||||
license = {file = "LICENSE"}
|
||||
requires-python = ">=3.10"
|
||||
requires-python = ">=3.11"
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Programming Language :: Python :: 3",
|
||||
|
@@ -1,14 +1,13 @@
|
||||
-r requirements.txt
|
||||
gitpython==3.1.44
|
||||
linkify-it-py==2.0.3
|
||||
myst-parser==4.0.0
|
||||
sphinx==8.1.3
|
||||
myst-parser==4.0.1
|
||||
sphinx==8.2.3
|
||||
sphinx_rtd_theme==3.0.2
|
||||
sphinx-tabs==3.4.7
|
||||
pytest==8.3.4
|
||||
pytest==8.3.5
|
||||
pytest-cov==6.0.0
|
||||
pytest-xprocess==1.0.2
|
||||
pre-commit
|
||||
mypy==1.13.0
|
||||
types-requests==2.32.0.20241016
|
||||
pandas-stubs==2.2.3.241126
|
||||
mypy==1.15.0
|
||||
types-requests==2.32.0.20250306
|
||||
pandas-stubs==2.2.3.250308
|
||||
|
@@ -1,8 +1,13 @@
|
||||
numpy==2.2.2
|
||||
numpydantic==1.6.7
|
||||
matplotlib==3.10.0
|
||||
fastapi[standard]==0.115.7
|
||||
python-fasthtml==0.12.0
|
||||
cachebox==4.4.2
|
||||
numpy==2.2.4
|
||||
numpydantic==1.6.8
|
||||
matplotlib==3.10.1
|
||||
fastapi[standard]==0.115.11
|
||||
python-fasthtml==0.12.4
|
||||
MonsterUI==0.0.29
|
||||
markdown-it-py==3.0.0
|
||||
mdit-py-plugins==0.4.2
|
||||
bokeh==3.6.3
|
||||
uvicorn==0.34.0
|
||||
scikit-learn==1.6.1
|
||||
timezonefinder==6.5.8
|
||||
@@ -10,8 +15,10 @@ deap==1.4.2
|
||||
requests==2.32.3
|
||||
pandas==2.2.3
|
||||
pendulum==3.0.0
|
||||
platformdirs==4.3.6
|
||||
pvlib==0.11.2
|
||||
platformdirs==4.3.7
|
||||
psutil==6.1.1
|
||||
pvlib==0.12.0
|
||||
pydantic==2.10.6
|
||||
statsmodels==0.14.4
|
||||
pydantic-settings==2.7.0
|
||||
linkify-it-py==2.0.3
|
||||
|
@@ -150,7 +150,7 @@ def main():
|
||||
|
||||
try:
|
||||
if args.input_file:
|
||||
with open(args.input_file, "r", encoding="utf8") as f:
|
||||
with open(args.input_file, "r", encoding="utf-8", newline=None) as f:
|
||||
content = f.read()
|
||||
elif args.input:
|
||||
content = args.input
|
||||
@@ -164,7 +164,7 @@ def main():
|
||||
)
|
||||
if args.output_file:
|
||||
# Write to file
|
||||
with open(args.output_file, "w", encoding="utf8") as f:
|
||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(extracted_content)
|
||||
else:
|
||||
# Write to std output
|
||||
|
@@ -3,6 +3,7 @@
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
from pathlib import Path
|
||||
@@ -86,7 +87,7 @@ def get_default_value(field_info: Union[FieldInfo, ComputedFieldInfo], regular_f
|
||||
|
||||
|
||||
def get_type_name(field_type: type) -> str:
|
||||
type_name = str(field_type).replace("typing.", "")
|
||||
type_name = str(field_type).replace("typing.", "").replace("pathlib._local", "pathlib")
|
||||
if type_name.startswith("<class"):
|
||||
type_name = field_type.__name__
|
||||
return type_name
|
||||
@@ -296,9 +297,11 @@ def main():
|
||||
|
||||
try:
|
||||
config_md = generate_config_md(config_eos)
|
||||
if os.name == "nt":
|
||||
config_md = config_md.replace("127.0.0.1", "0.0.0.0").replace("\\\\", "/")
|
||||
if args.output_file:
|
||||
# Write to file
|
||||
with open(args.output_file, "w", encoding="utf8") as f:
|
||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(config_md)
|
||||
else:
|
||||
# Write to std output
|
||||
|
@@ -16,6 +16,7 @@ Example:
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
@@ -57,9 +58,11 @@ def main():
|
||||
try:
|
||||
openapi_spec = generate_openapi()
|
||||
openapi_spec_str = json.dumps(openapi_spec, indent=2)
|
||||
if os.name == "nt":
|
||||
openapi_spec_str = openapi_spec_str.replace("127.0.0.1", "0.0.0.0")
|
||||
if args.output_file:
|
||||
# Write to file
|
||||
with open(args.output_file, "w", encoding="utf8") as f:
|
||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(openapi_spec_str)
|
||||
else:
|
||||
# Write to std output
|
||||
|
@@ -3,6 +3,7 @@
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import git
|
||||
@@ -284,9 +285,11 @@ def main():
|
||||
|
||||
try:
|
||||
openapi_md = generate_openapi_md()
|
||||
if os.name == "nt":
|
||||
openapi_md = openapi_md.replace("127.0.0.1", "0.0.0.0")
|
||||
if args.output_file:
|
||||
# Write to file
|
||||
with open(args.output_file, "w", encoding="utf8") as f:
|
||||
with open(args.output_file, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(openapi_md)
|
||||
else:
|
||||
# Write to std output
|
||||
|
@@ -121,30 +121,40 @@ def run_prediction(provider_id: str, verbose: bool = False) -> str:
|
||||
# Initialize the oprediction
|
||||
config_eos = get_config()
|
||||
prediction_eos = get_prediction()
|
||||
if verbose:
|
||||
print(f"\nProvider ID: {provider_id}")
|
||||
if provider_id in ("PVForecastAkkudoktor",):
|
||||
settings = config_pvforecast()
|
||||
settings["pvforecast"]["provider"] = provider_id
|
||||
forecast = "pvforecast"
|
||||
elif provider_id in ("BrightSky", "ClearOutside"):
|
||||
settings = config_weather()
|
||||
settings["weather"]["provider"] = provider_id
|
||||
forecast = "weather"
|
||||
elif provider_id in ("ElecPriceAkkudoktor",):
|
||||
settings = config_elecprice()
|
||||
settings["elecprice"]["provider"] = provider_id
|
||||
forecast = "elecprice"
|
||||
elif provider_id in ("LoadAkkudoktor",):
|
||||
settings = config_elecprice()
|
||||
forecast = "load"
|
||||
settings["load"]["loadakkudoktor_year_energy"] = 1000
|
||||
settings["load"]["provider"] = provider_id
|
||||
else:
|
||||
raise ValueError(f"Unknown provider '{provider_id}'.")
|
||||
settings[forecast]["provider"] = provider_id
|
||||
config_eos.merge_settings_from_dict(settings)
|
||||
|
||||
provider = prediction_eos.provider_by_id(provider_id)
|
||||
|
||||
prediction_eos.update_data()
|
||||
|
||||
# Return result of prediction
|
||||
provider = prediction_eos.provider_by_id(provider_id)
|
||||
if verbose:
|
||||
print(f"\nProvider ID: {provider.provider_id()}")
|
||||
print("----------")
|
||||
print("\nSettings\n----------")
|
||||
print(settings)
|
||||
print("\nProvider\n----------")
|
||||
print(f"elecprice.provider: {config_eos.elecprice.provider}")
|
||||
print(f"load.provider: {config_eos.load.provider}")
|
||||
print(f"pvforecast.provider: {config_eos.pvforecast.provider}")
|
||||
print(f"weather.provider: {config_eos.weather.provider}")
|
||||
print(f"enabled: {provider.enabled()}")
|
||||
for key in provider.record_keys:
|
||||
print(f"\n{key}\n----------")
|
||||
print(f"Array: {provider.key_to_array(key)}")
|
||||
|
@@ -22,15 +22,16 @@ from pydantic_settings import (
|
||||
PydanticBaseSettingsSource,
|
||||
SettingsConfigDict,
|
||||
)
|
||||
from pydantic_settings.sources import ConfigFileSourceMixin
|
||||
|
||||
# settings
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
from akkudoktoreos.core.cachesettings import CacheCommonSettings
|
||||
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||
from akkudoktoreos.core.decorators import classproperty
|
||||
from akkudoktoreos.core.emsettings import EnergyManagementCommonSettings
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.logsettings import LoggingCommonSettings
|
||||
from akkudoktoreos.core.pydantic import merge_models
|
||||
from akkudoktoreos.core.pydantic import access_nested_value, merge_models
|
||||
from akkudoktoreos.devices.settings import DevicesCommonSettings
|
||||
from akkudoktoreos.measurement.measurement import MeasurementCommonSettings
|
||||
from akkudoktoreos.optimization.optimization import OptimizationCommonSettings
|
||||
@@ -96,10 +97,6 @@ class GeneralSettings(SettingsBaseModel):
|
||||
default="output", description="Sub-path for the EOS output data directory."
|
||||
)
|
||||
|
||||
data_cache_subpath: Optional[Path] = Field(
|
||||
default="cache", description="Sub-path for the EOS cache data directory."
|
||||
)
|
||||
|
||||
latitude: Optional[float] = Field(
|
||||
default=52.52,
|
||||
ge=-90.0,
|
||||
@@ -128,12 +125,6 @@ class GeneralSettings(SettingsBaseModel):
|
||||
"""Compute data_output_path based on data_folder_path."""
|
||||
return get_absolute_path(self.data_folder_path, self.data_output_subpath)
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def data_cache_path(self) -> Optional[Path]:
|
||||
"""Compute data_cache_path based on data_folder_path."""
|
||||
return get_absolute_path(self.data_folder_path, self.data_cache_subpath)
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def config_folder_path(self) -> Optional[Path]:
|
||||
@@ -153,21 +144,68 @@ class SettingsEOS(BaseSettings):
|
||||
Used by updating the configuration with specific settings only.
|
||||
"""
|
||||
|
||||
general: Optional[GeneralSettings] = None
|
||||
logging: Optional[LoggingCommonSettings] = None
|
||||
devices: Optional[DevicesCommonSettings] = None
|
||||
measurement: Optional[MeasurementCommonSettings] = None
|
||||
optimization: Optional[OptimizationCommonSettings] = None
|
||||
prediction: Optional[PredictionCommonSettings] = None
|
||||
elecprice: Optional[ElecPriceCommonSettings] = None
|
||||
load: Optional[LoadCommonSettings] = None
|
||||
pvforecast: Optional[PVForecastCommonSettings] = None
|
||||
weather: Optional[WeatherCommonSettings] = None
|
||||
server: Optional[ServerCommonSettings] = None
|
||||
utils: Optional[UtilsCommonSettings] = None
|
||||
general: Optional[GeneralSettings] = Field(
|
||||
default=None,
|
||||
description="General Settings",
|
||||
)
|
||||
cache: Optional[CacheCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Cache Settings",
|
||||
)
|
||||
ems: Optional[EnergyManagementCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Energy Management Settings",
|
||||
)
|
||||
logging: Optional[LoggingCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Logging Settings",
|
||||
)
|
||||
devices: Optional[DevicesCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Devices Settings",
|
||||
)
|
||||
measurement: Optional[MeasurementCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Measurement Settings",
|
||||
)
|
||||
optimization: Optional[OptimizationCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Optimization Settings",
|
||||
)
|
||||
prediction: Optional[PredictionCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Prediction Settings",
|
||||
)
|
||||
elecprice: Optional[ElecPriceCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Electricity Price Settings",
|
||||
)
|
||||
load: Optional[LoadCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Load Settings",
|
||||
)
|
||||
pvforecast: Optional[PVForecastCommonSettings] = Field(
|
||||
default=None,
|
||||
description="PV Forecast Settings",
|
||||
)
|
||||
weather: Optional[WeatherCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Weather Settings",
|
||||
)
|
||||
server: Optional[ServerCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Server Settings",
|
||||
)
|
||||
utils: Optional[UtilsCommonSettings] = Field(
|
||||
default=None,
|
||||
description="Utilities Settings",
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_nested_delimiter="__", nested_model_default_partial_update=True, env_prefix="EOS_"
|
||||
env_nested_delimiter="__",
|
||||
nested_model_default_partial_update=True,
|
||||
env_prefix="EOS_",
|
||||
ignored_types=(classproperty,),
|
||||
)
|
||||
|
||||
|
||||
@@ -178,6 +216,8 @@ class SettingsEOSDefaults(SettingsEOS):
|
||||
"""
|
||||
|
||||
general: GeneralSettings = GeneralSettings()
|
||||
cache: CacheCommonSettings = CacheCommonSettings()
|
||||
ems: EnergyManagementCommonSettings = EnergyManagementCommonSettings()
|
||||
logging: LoggingCommonSettings = LoggingCommonSettings()
|
||||
devices: DevicesCommonSettings = DevicesCommonSettings()
|
||||
measurement: MeasurementCommonSettings = MeasurementCommonSettings()
|
||||
@@ -281,7 +321,13 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
- This method logs a warning if the default configuration file cannot be copied.
|
||||
- It ensures that a fallback to the default configuration file is always possible.
|
||||
"""
|
||||
file_settings: Optional[ConfigFileSourceMixin] = None
|
||||
setting_sources = [
|
||||
init_settings,
|
||||
env_settings,
|
||||
dotenv_settings,
|
||||
]
|
||||
|
||||
file_settings: Optional[JsonConfigSettingsSource] = None
|
||||
config_file, exists = cls._get_config_file_path()
|
||||
config_dir = config_file.parent
|
||||
if not exists:
|
||||
@@ -292,28 +338,27 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
logger.warning(f"Could not copy default config: {exc}. Using default config...")
|
||||
config_file = cls.config_default_file_path
|
||||
config_dir = config_file.parent
|
||||
try:
|
||||
file_settings = JsonConfigSettingsSource(settings_cls, json_file=config_file)
|
||||
setting_sources.append(file_settings)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error reading config file '{config_file}' (falling back to default config): {e}"
|
||||
)
|
||||
default_settings = JsonConfigSettingsSource(
|
||||
settings_cls, json_file=cls.config_default_file_path
|
||||
)
|
||||
GeneralSettings._config_folder_path = config_dir
|
||||
GeneralSettings._config_file_path = config_file
|
||||
|
||||
return (
|
||||
init_settings,
|
||||
env_settings,
|
||||
dotenv_settings,
|
||||
file_settings,
|
||||
default_settings,
|
||||
)
|
||||
setting_sources.append(default_settings)
|
||||
return tuple(setting_sources)
|
||||
|
||||
@classmethod
|
||||
@classproperty
|
||||
def config_default_file_path(cls) -> Path:
|
||||
"""Compute the default config file path."""
|
||||
return cls.package_root_path.joinpath("data/default.config.json")
|
||||
|
||||
@classmethod
|
||||
@classproperty
|
||||
def package_root_path(cls) -> Path:
|
||||
"""Compute the package root path."""
|
||||
@@ -327,13 +372,15 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
"""
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
super().__init__(*args, **kwargs)
|
||||
self._create_initial_config_file()
|
||||
self._update_data_folder_path()
|
||||
self._setup(self, *args, **kwargs)
|
||||
|
||||
def _setup(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Re-initialize global settings."""
|
||||
# Assure settings base knows EOS configuration
|
||||
SettingsBaseModel.config = self
|
||||
# (Re-)load settings
|
||||
SettingsEOSDefaults.__init__(self, *args, **kwargs)
|
||||
# Init config file and data folder pathes
|
||||
self._create_initial_config_file()
|
||||
self._update_data_folder_path()
|
||||
|
||||
@@ -379,11 +426,37 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
"""
|
||||
self._setup()
|
||||
|
||||
def set_config_value(self, path: str, value: Any) -> None:
|
||||
"""Set a configuration value based on the provided path.
|
||||
|
||||
Supports string paths (with '/' separators) or sequence paths (list/tuple).
|
||||
Trims leading and trailing '/' from string paths.
|
||||
|
||||
Args:
|
||||
path (str): The path to the configuration key (e.g., "key1/key2/key3" or key1/key2/0).
|
||||
value (Any]): The value to set.
|
||||
"""
|
||||
access_nested_value(self, path, True, value)
|
||||
|
||||
def get_config_value(self, path: str) -> Any:
|
||||
"""Get a configuration value based on the provided path.
|
||||
|
||||
Supports string paths (with '/' separators) or sequence paths (list/tuple).
|
||||
Trims leading and trailing '/' from string paths.
|
||||
|
||||
Args:
|
||||
path (str): The path to the configuration key (e.g., "key1/key2/key3" or key1/key2/0).
|
||||
|
||||
Returns:
|
||||
Any: The retrieved value.
|
||||
"""
|
||||
return access_nested_value(self, path, False)
|
||||
|
||||
def _create_initial_config_file(self) -> None:
|
||||
if self.general.config_file_path and not self.general.config_file_path.exists():
|
||||
self.general.config_file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
with open(self.general.config_file_path, "w") as f:
|
||||
with self.general.config_file_path.open("w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(self.model_dump_json(indent=4))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
@@ -436,7 +509,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
logger.debug(f"Environment config dir: '{env_dir}'")
|
||||
if env_dir is not None:
|
||||
config_dirs.append(env_dir.resolve())
|
||||
config_dirs.append(Path(user_config_dir(cls.APP_NAME)))
|
||||
config_dirs.append(Path(user_config_dir(cls.APP_NAME, cls.APP_AUTHOR)))
|
||||
config_dirs.append(Path.cwd())
|
||||
for cdir in config_dirs:
|
||||
cfile = cdir.joinpath(cls.CONFIG_FILE_NAME)
|
||||
@@ -455,7 +528,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
"""
|
||||
if not self.general.config_file_path:
|
||||
raise ValueError("Configuration file path unknown.")
|
||||
with self.general.config_file_path.open("w", encoding=self.ENCODING) as f_out:
|
||||
with self.general.config_file_path.open("w", encoding="utf-8", newline="\n") as f_out:
|
||||
json_str = super().model_dump_json()
|
||||
f_out.write(json_str)
|
||||
|
||||
|
@@ -1,9 +1,12 @@
|
||||
"""Abstract and base classes for configuration."""
|
||||
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
|
||||
|
||||
class SettingsBaseModel(PydanticBaseModel):
|
||||
"""Base model class for all settings configurations."""
|
||||
|
||||
pass
|
||||
# EOS configuration - set by ConfigEOS
|
||||
config: ClassVar[Any] = None
|
||||
|
@@ -1,32 +1,14 @@
|
||||
"""Class for in-memory managing of cache files.
|
||||
"""In-memory and file caching.
|
||||
|
||||
The `CacheFileStore` class is a singleton-based, thread-safe key-value store for managing
|
||||
temporary file objects, allowing the creation, retrieval, and management of cache files.
|
||||
|
||||
Classes:
|
||||
--------
|
||||
- CacheFileStore: A thread-safe, singleton class for in-memory managing of file-like cache objects.
|
||||
- CacheFileStoreMeta: Metaclass for enforcing the singleton behavior in `CacheFileStore`.
|
||||
|
||||
Example usage:
|
||||
--------------
|
||||
# CacheFileStore usage
|
||||
>>> cache_store = CacheFileStore()
|
||||
>>> cache_store.create('example_key')
|
||||
>>> cache_file = cache_store.get('example_key')
|
||||
>>> cache_file.write('Some data')
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: 'Some data'
|
||||
|
||||
Notes:
|
||||
------
|
||||
- Cache files are automatically associated with the current date unless specified.
|
||||
Decorators and classes for caching results of computations,
|
||||
both in memory (using an LRU cache) and in temporary files. It also includes
|
||||
mechanisms for managing cache file expiration and retrieval.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import hashlib
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
import tempfile
|
||||
@@ -35,8 +17,8 @@ from typing import (
|
||||
IO,
|
||||
Any,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
@@ -44,29 +26,226 @@ from typing import (
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import cachebox
|
||||
from pendulum import DateTime, Duration
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from pydantic import Field
|
||||
|
||||
from akkudoktoreos.core.coreabc import ConfigMixin
|
||||
from akkudoktoreos.core.coreabc import ConfigMixin, SingletonMixin
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
# ---------------------------------
|
||||
# In-Memory Caching Functionality
|
||||
# ---------------------------------
|
||||
|
||||
# Define a type variable for methods and functions
|
||||
TCallable = TypeVar("TCallable", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def cache_until_update_store_callback(event: int, key: Any, value: Any) -> None:
|
||||
"""Calback function for CacheUntilUpdateStore."""
|
||||
CacheUntilUpdateStore.last_event = event
|
||||
CacheUntilUpdateStore.last_key = key
|
||||
CacheUntilUpdateStore.last_value = value
|
||||
if event == cachebox.EVENT_MISS:
|
||||
CacheUntilUpdateStore.miss_count += 1
|
||||
elif event == cachebox.EVENT_HIT:
|
||||
CacheUntilUpdateStore.hit_count += 1
|
||||
else:
|
||||
# unreachable code
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class CacheUntilUpdateStore(SingletonMixin):
|
||||
"""Singleton-based in-memory LRU (Least Recently Used) cache.
|
||||
|
||||
This cache is shared across the application to store results of decorated
|
||||
methods or functions until the next EMS (Energy Management System) update.
|
||||
|
||||
The cache uses an LRU eviction strategy, storing up to 100 items, with the oldest
|
||||
items being evicted once the cache reaches its capacity.
|
||||
"""
|
||||
|
||||
cache: ClassVar[cachebox.LRUCache] = cachebox.LRUCache(maxsize=100, iterable=None, capacity=100)
|
||||
last_event: ClassVar[Optional[int]] = None
|
||||
last_key: ClassVar[Any] = None
|
||||
last_value: ClassVar[Any] = None
|
||||
hit_count: ClassVar[int] = 0
|
||||
miss_count: ClassVar[int] = 0
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initializes the `CacheUntilUpdateStore` instance with default parameters.
|
||||
|
||||
The cache uses an LRU eviction strategy with a maximum size of 100 items.
|
||||
This cache is a singleton, meaning only one instance will exist throughout
|
||||
the application lifecycle.
|
||||
|
||||
Example:
|
||||
>>> cache = CacheUntilUpdateStore()
|
||||
"""
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
"""Propagates method calls to the cache object.
|
||||
|
||||
This method allows you to call methods on the underlying cache object,
|
||||
and it will delegate the call to the cache's corresponding method.
|
||||
|
||||
Args:
|
||||
name (str): The name of the method being called.
|
||||
|
||||
Returns:
|
||||
Callable: A method bound to the cache object.
|
||||
|
||||
Raises:
|
||||
AttributeError: If the cache object does not have the requested method.
|
||||
|
||||
Example:
|
||||
>>> result = cache.get("key")
|
||||
"""
|
||||
# This will return a method of the target cache, or raise an AttributeError
|
||||
target_attr = getattr(self.cache, name)
|
||||
if callable(target_attr):
|
||||
return target_attr
|
||||
else:
|
||||
return target_attr
|
||||
|
||||
def __getitem__(self, key: Any) -> Any:
|
||||
"""Retrieves an item from the cache by its key.
|
||||
|
||||
Args:
|
||||
key (Any): The key used for subscripting to retrieve an item.
|
||||
|
||||
Returns:
|
||||
Any: The value corresponding to the key in the cache.
|
||||
|
||||
Raises:
|
||||
KeyError: If the key does not exist in the cache.
|
||||
|
||||
Example:
|
||||
>>> value = cache["user_data"]
|
||||
"""
|
||||
return CacheUntilUpdateStore.cache[key]
|
||||
|
||||
def __setitem__(self, key: Any, value: Any) -> None:
|
||||
"""Stores an item in the cache.
|
||||
|
||||
Args:
|
||||
key (Any): The key used to store the item in the cache.
|
||||
value (Any): The value to store.
|
||||
|
||||
Example:
|
||||
>>> cache["user_data"] = {"name": "Alice", "age": 30}
|
||||
"""
|
||||
CacheUntilUpdateStore.cache[key] = value
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Returns the number of items in the cache."""
|
||||
return len(CacheUntilUpdateStore.cache)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Provides a string representation of the CacheUntilUpdateStore object."""
|
||||
return repr(CacheUntilUpdateStore.cache)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clears the cache, removing all stored items.
|
||||
|
||||
This method propagates the `clear` method call to the underlying cache object,
|
||||
ensuring that the cache is emptied when necessary (e.g., at the end of the energy
|
||||
management system run).
|
||||
|
||||
Example:
|
||||
>>> cache.clear()
|
||||
"""
|
||||
if hasattr(self.cache, "clear") and callable(getattr(self.cache, "clear")):
|
||||
CacheUntilUpdateStore.cache.clear()
|
||||
CacheUntilUpdateStore.last_event = None
|
||||
CacheUntilUpdateStore.last_key = None
|
||||
CacheUntilUpdateStore.last_value = None
|
||||
CacheUntilUpdateStore.miss_count = 0
|
||||
CacheUntilUpdateStore.hit_count = 0
|
||||
else:
|
||||
raise AttributeError(f"'{self.cache.__class__.__name__}' object has no method 'clear'")
|
||||
|
||||
|
||||
def cachemethod_until_update(method: TCallable) -> TCallable:
|
||||
"""Decorator for in memory caching the result of an instance method.
|
||||
|
||||
This decorator caches the method's result in `CacheUntilUpdateStore`, ensuring
|
||||
that subsequent calls with the same arguments return the cached result until the
|
||||
next EMS update cycle.
|
||||
|
||||
Args:
|
||||
method (Callable): The instance method to be decorated.
|
||||
|
||||
Returns:
|
||||
Callable: The wrapped method with caching functionality.
|
||||
|
||||
Example:
|
||||
>>> class MyClass:
|
||||
>>> @cachemethod_until_update
|
||||
>>> def expensive_method(self, param: str) -> str:
|
||||
>>> # Perform expensive computation
|
||||
>>> return f"Computed {param}"
|
||||
"""
|
||||
|
||||
@cachebox.cachedmethod(
|
||||
cache=CacheUntilUpdateStore().cache, callback=cache_until_update_store_callback
|
||||
)
|
||||
@functools.wraps(method)
|
||||
def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any:
|
||||
result = method(self, *args, **kwargs)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def cache_until_update(func: TCallable) -> TCallable:
|
||||
"""Decorator for in memory caching the result of a standalone function.
|
||||
|
||||
This decorator caches the function's result in `CacheUntilUpdateStore`, ensuring
|
||||
that subsequent calls with the same arguments return the cached result until the
|
||||
next EMS update cycle.
|
||||
|
||||
Args:
|
||||
func (Callable): The function to be decorated.
|
||||
|
||||
Returns:
|
||||
Callable: The wrapped function with caching functionality.
|
||||
|
||||
Example:
|
||||
>>> @cache_until_next_update
|
||||
>>> def expensive_function(param: str) -> str:
|
||||
>>> # Perform expensive computation
|
||||
>>> return f"Computed {param}"
|
||||
"""
|
||||
|
||||
@cachebox.cached(
|
||||
cache=CacheUntilUpdateStore().cache, callback=cache_until_update_store_callback
|
||||
)
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
result = func(*args, **kwargs)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# ---------------------------------
|
||||
# Cache File Management
|
||||
# ---------------------------------
|
||||
|
||||
Param = ParamSpec("Param")
|
||||
RetType = TypeVar("RetType")
|
||||
|
||||
|
||||
class CacheFileRecord(BaseModel):
|
||||
# Enable custom serialization globally in config
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
use_enum_values=True,
|
||||
validate_assignment=True,
|
||||
)
|
||||
|
||||
class CacheFileRecord(PydanticBaseModel):
|
||||
cache_file: Any = Field(..., description="File descriptor of the cache file.")
|
||||
until_datetime: DateTime = Field(..., description="Datetime until the cache file is valid.")
|
||||
ttl_duration: Optional[Duration] = Field(
|
||||
@@ -74,24 +253,7 @@ class CacheFileRecord(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class CacheFileStoreMeta(type, Generic[T]):
|
||||
"""A thread-safe implementation of CacheFileStore."""
|
||||
|
||||
_instances: dict[CacheFileStoreMeta[T], T] = {}
|
||||
|
||||
_lock: threading.Lock = threading.Lock()
|
||||
"""Lock object to synchronize threads on first access to CacheFileStore."""
|
||||
|
||||
def __call__(cls) -> T:
|
||||
"""Return CacheFileStore instance."""
|
||||
with cls._lock:
|
||||
if cls not in cls._instances:
|
||||
instance = super().__call__()
|
||||
cls._instances[cls] = instance
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
class CacheFileStore(ConfigMixin, SingletonMixin):
|
||||
"""A key-value store that manages file-like tempfile objects to be used as cache files.
|
||||
|
||||
Cache files are associated with a date. If no date is specified, the cache files are
|
||||
@@ -105,7 +267,7 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
store (dict): A dictionary that holds the in-memory cache file objects
|
||||
with their associated keys and dates.
|
||||
|
||||
Example usage:
|
||||
Example:
|
||||
>>> cache_store = CacheFileStore()
|
||||
>>> cache_store.create('example_file')
|
||||
>>> cache_file = cache_store.get('example_file')
|
||||
@@ -114,14 +276,18 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
>>> print(cache_file.read()) # Output: 'Some data'
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initializes the CacheFileStore instance.
|
||||
|
||||
This constructor sets up an empty key-value store (a dictionary) where each key
|
||||
corresponds to a cache file that is associated with a given key and an optional date.
|
||||
"""
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
self._store: Dict[str, CacheFileRecord] = {}
|
||||
self._store_lock = threading.Lock()
|
||||
self._store_lock = threading.RLock()
|
||||
self._store_file = self.config.cache.path().joinpath("cachefilestore.json")
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def _until_datetime_by_options(
|
||||
self,
|
||||
@@ -329,9 +495,9 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
# File already available
|
||||
cache_file_obj = cache_item.cache_file
|
||||
else:
|
||||
self.config.general.data_cache_path.mkdir(parents=True, exist_ok=True)
|
||||
self.config.cache.path().mkdir(parents=True, exist_ok=True)
|
||||
cache_file_obj = tempfile.NamedTemporaryFile(
|
||||
mode=mode, delete=delete, suffix=suffix, dir=self.config.general.data_cache_path
|
||||
mode=mode, delete=delete, suffix=suffix, dir=self.config.cache.path()
|
||||
)
|
||||
self._store[cache_file_key] = CacheFileRecord(
|
||||
cache_file=cache_file_obj,
|
||||
@@ -502,7 +668,7 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
|
||||
def clear(
|
||||
self,
|
||||
clear_all: bool = False,
|
||||
clear_all: Optional[bool] = None,
|
||||
before_datetime: Optional[Any] = None,
|
||||
) -> None:
|
||||
"""Deletes all cache files or those expiring before `before_datetime`.
|
||||
@@ -516,8 +682,6 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
Raises:
|
||||
OSError: If there's an error during file deletion.
|
||||
"""
|
||||
delete_keys = [] # List of keys to delete, prevent deleting when traversing the store
|
||||
|
||||
# Some weired logic to prevent calling to_datetime on clear_all.
|
||||
# Clear_all may be set on __del__. At this time some info for to_datetime will
|
||||
# not be available anymore.
|
||||
@@ -528,6 +692,8 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
before_datetime = to_datetime(before_datetime)
|
||||
|
||||
with self._store_lock: # Synchronize access to _store
|
||||
delete_keys = [] # List of keys to delete, prevent deleting when traversing the store
|
||||
|
||||
for cache_file_key, cache_item in self._store.items():
|
||||
# Some weired logic to prevent calling to_datetime on clear_all.
|
||||
# Clear_all may be set on __del__. At this time some info for to_datetime will
|
||||
@@ -566,6 +732,89 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
||||
for delete_key in delete_keys:
|
||||
del self._store[delete_key]
|
||||
|
||||
def current_store(self) -> dict:
|
||||
"""Current state of the store.
|
||||
|
||||
Returns:
|
||||
data (dict): current cache management data.
|
||||
"""
|
||||
with self._store_lock:
|
||||
store_current = {}
|
||||
for key, record in self._store.items():
|
||||
ttl_duration = record.ttl_duration
|
||||
if ttl_duration:
|
||||
ttl_duration = ttl_duration.total_seconds()
|
||||
store_current[key] = {
|
||||
# Convert file-like objects to file paths for serialization
|
||||
"cache_file": self._get_file_path(record.cache_file),
|
||||
"mode": record.cache_file.mode,
|
||||
"until_datetime": to_datetime(record.until_datetime, as_string=True),
|
||||
"ttl_duration": ttl_duration,
|
||||
}
|
||||
return store_current
|
||||
|
||||
def save_store(self) -> dict:
|
||||
"""Saves the current state of the store to a file.
|
||||
|
||||
Returns:
|
||||
data (dict): cache management data that was saved.
|
||||
"""
|
||||
with self._store_lock:
|
||||
self._store_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
store_to_save = self.current_store()
|
||||
with self._store_file.open("w", encoding="utf-8", newline="\n") as f:
|
||||
try:
|
||||
json.dump(store_to_save, f, indent=4)
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving cache file store: {e}")
|
||||
return store_to_save
|
||||
|
||||
def load_store(self) -> dict:
|
||||
"""Loads the state of the store from a file.
|
||||
|
||||
Returns:
|
||||
data (dict): cache management data that was loaded.
|
||||
"""
|
||||
with self._store_lock:
|
||||
store_loaded = {}
|
||||
if self._store_file.exists():
|
||||
with self._store_file.open("r", encoding="utf-8", newline=None) as f:
|
||||
try:
|
||||
store_to_load = json.load(f)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error loading cache file store: {e}\n"
|
||||
+ f"Deleting the store file {self._store_file}."
|
||||
)
|
||||
self._store_file.unlink()
|
||||
return {}
|
||||
for key, record in store_to_load.items():
|
||||
if record is None:
|
||||
continue
|
||||
if key in self._store.keys():
|
||||
# Already available - do not overwrite by record from file
|
||||
continue
|
||||
try:
|
||||
cache_file_obj = open(
|
||||
record["cache_file"], "rb+" if "b" in record["mode"] else "r+"
|
||||
)
|
||||
except Exception as e:
|
||||
cache_file_record = record["cache_file"]
|
||||
logger.warning(f"Can not open cache file '{cache_file_record}': {e}")
|
||||
continue
|
||||
ttl_duration = record["ttl_duration"]
|
||||
if ttl_duration:
|
||||
ttl_duration = to_duration(float(record["ttl_duration"]))
|
||||
self._store[key] = CacheFileRecord(
|
||||
cache_file=cache_file_obj,
|
||||
until_datetime=record["until_datetime"],
|
||||
ttl_duration=ttl_duration,
|
||||
)
|
||||
cache_file_obj.seek(0)
|
||||
# Remember newly loaded
|
||||
store_loaded[key] = record
|
||||
return store_loaded
|
||||
|
||||
|
||||
def cache_in_file(
|
||||
ignore_params: List[str] = [],
|
32
src/akkudoktoreos/core/cachesettings.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Settings for caching.
|
||||
|
||||
Kept in an extra module to avoid cyclic dependencies on package import.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
|
||||
|
||||
class CacheCommonSettings(SettingsBaseModel):
|
||||
"""Cache Configuration."""
|
||||
|
||||
subpath: Optional[Path] = Field(
|
||||
default="cache", description="Sub-path for the EOS cache data directory."
|
||||
)
|
||||
|
||||
cleanup_interval: float = Field(
|
||||
default=5 * 60, description="Intervall in seconds for EOS file cache cleanup."
|
||||
)
|
||||
|
||||
# Do not make this a pydantic computed field. The pydantic model must be fully initialized
|
||||
# to have access to config.general, which may not be the case if it is a computed field.
|
||||
def path(self) -> Optional[Path]:
|
||||
"""Compute cache path based on general.data_folder_path."""
|
||||
data_cache_path = self.config.general.data_folder_path
|
||||
if data_cache_path is None or self.subpath is None:
|
||||
return None
|
||||
return data_cache_path.joinpath(self.subpath)
|
@@ -265,10 +265,12 @@ class SingletonMixin:
|
||||
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
||||
name: str
|
||||
|
||||
# implement __init__ to avoid re-initialization of parent class PydanticBaseModel:
|
||||
# implement __init__ to avoid re-initialization of parent classes:
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
# Your initialisation here
|
||||
...
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
instance1 = MySingletonModel(name="Instance 1")
|
||||
|
@@ -811,7 +811,8 @@ class DataSequence(DataBase, MutableSequence):
|
||||
dates, values = self.key_to_lists(
|
||||
key=key, start_datetime=start_datetime, end_datetime=end_datetime, dropna=dropna
|
||||
)
|
||||
return pd.Series(data=values, index=pd.DatetimeIndex(dates), name=key)
|
||||
series = pd.Series(data=values, index=pd.DatetimeIndex(dates), name=key)
|
||||
return series
|
||||
|
||||
def key_from_series(self, key: str, series: pd.Series) -> None:
|
||||
"""Update the DataSequence from a Pandas Series.
|
||||
@@ -953,6 +954,44 @@ class DataSequence(DataBase, MutableSequence):
|
||||
array = resampled.values
|
||||
return array
|
||||
|
||||
def to_dataframe(
|
||||
self,
|
||||
start_datetime: Optional[DateTime] = None,
|
||||
end_datetime: Optional[DateTime] = None,
|
||||
) -> pd.DataFrame:
|
||||
"""Converts the sequence of DataRecord instances into a Pandas DataFrame.
|
||||
|
||||
Args:
|
||||
start_datetime (Optional[datetime]): The lower bound for filtering (inclusive).
|
||||
Defaults to the earliest possible datetime if None.
|
||||
end_datetime (Optional[datetime]): The upper bound for filtering (exclusive).
|
||||
Defaults to the latest possible datetime if None.
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: A DataFrame containing the filtered data from all records.
|
||||
"""
|
||||
if not self.records:
|
||||
return pd.DataFrame() # Return empty DataFrame if no records exist
|
||||
|
||||
# Use filter_by_datetime to get filtered records
|
||||
filtered_records = self.filter_by_datetime(start_datetime, end_datetime)
|
||||
|
||||
# Convert filtered records to a dictionary list
|
||||
data = [record.model_dump() for record in filtered_records]
|
||||
|
||||
# Convert to DataFrame
|
||||
df = pd.DataFrame(data)
|
||||
if df.empty:
|
||||
return df
|
||||
|
||||
# Ensure `date_time` column exists and use it for the index
|
||||
if not "date_time" in df.columns:
|
||||
error_msg = f"Cannot create dataframe: no `date_time` column in `{df}`."
|
||||
logger.error(error_msg)
|
||||
raise TypeError(error_msg)
|
||||
df.index = pd.DatetimeIndex(df["date_time"])
|
||||
return df
|
||||
|
||||
def sort_by_datetime(self, reverse: bool = False) -> None:
|
||||
"""Sort the DataRecords in the sequence by their date_time attribute.
|
||||
|
||||
@@ -1229,14 +1268,14 @@ class DataImportMixin:
|
||||
# We jump back by 1 hour
|
||||
# Repeat the value(s) (reuse value index)
|
||||
for i in range(interval_steps_per_hour):
|
||||
logger.debug(f"{i+1}: Repeat at {next_time} with index {value_index}")
|
||||
logger.debug(f"{i + 1}: Repeat at {next_time} with index {value_index}")
|
||||
timestamps_with_indices.append((next_time, value_index))
|
||||
next_time = next_time.add(seconds=interval.total_seconds())
|
||||
else:
|
||||
# We jump forward by 1 hour
|
||||
# Drop the value(s)
|
||||
logger.debug(
|
||||
f"{i+1}: Skip {interval_steps_per_hour} at {next_time} with index {value_index}"
|
||||
f"{i + 1}: Skip {interval_steps_per_hour} at {next_time} with index {value_index}"
|
||||
)
|
||||
value_index += interval_steps_per_hour
|
||||
|
||||
@@ -1465,7 +1504,7 @@ class DataImportMixin:
|
||||
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
||||
logger.debug(f"PydanticDateTimeDataFrame import: {error_msg}")
|
||||
|
||||
# Try dictionary with special keys start_datetime and intervall
|
||||
# Try dictionary with special keys start_datetime and interval
|
||||
try:
|
||||
import_data = PydanticDateTimeData.model_validate_json(json_str)
|
||||
self.import_from_dict(import_data.to_dict())
|
||||
@@ -1525,7 +1564,7 @@ class DataImportMixin:
|
||||
and `key_prefix = "load"`, only the "load_mean" key will be processed even though
|
||||
both keys are in the record.
|
||||
"""
|
||||
with import_file_path.open("r") as import_file:
|
||||
with import_file_path.open("r", encoding="utf-8", newline=None) as import_file:
|
||||
import_str = import_file.read()
|
||||
self.import_from_json(
|
||||
import_str, key_prefix=key_prefix, start_datetime=start_datetime, interval=interval
|
||||
@@ -1807,6 +1846,88 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
|
||||
return array
|
||||
|
||||
def keys_to_dataframe(
|
||||
self,
|
||||
keys: list[str],
|
||||
start_datetime: Optional[DateTime] = None,
|
||||
end_datetime: Optional[DateTime] = None,
|
||||
interval: Optional[Any] = None, # Duration assumed
|
||||
fill_method: Optional[str] = None,
|
||||
) -> pd.DataFrame:
|
||||
"""Retrieve a dataframe indexed by fixed time intervals for specified keys from the data in each DataProvider.
|
||||
|
||||
Generates a pandas DataFrame using the NumPy arrays for each specified key, ensuring a common time index..
|
||||
|
||||
Args:
|
||||
keys (list[str]): A list of field names to retrieve.
|
||||
start_datetime (datetime, optional): Start date for filtering records (inclusive).
|
||||
end_datetime (datetime, optional): End date for filtering records (exclusive).
|
||||
interval (duration, optional): The fixed time interval. Defaults to 1 hour.
|
||||
fill_method (str, optional): Method to handle missing values during resampling.
|
||||
- 'linear': Linearly interpolate missing values (for numeric data only).
|
||||
- 'ffill': Forward fill missing values.
|
||||
- 'bfill': Backward fill missing values.
|
||||
- 'none': Defaults to 'linear' for numeric values, otherwise 'ffill'.
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: A DataFrame where each column represents a key's array with a common time index.
|
||||
|
||||
Raises:
|
||||
KeyError: If no valid data is found for any of the requested keys.
|
||||
ValueError: If any retrieved array has a different time index than the first one.
|
||||
"""
|
||||
# Ensure datetime objects are normalized
|
||||
start_datetime = to_datetime(start_datetime, to_maxtime=False) if start_datetime else None
|
||||
end_datetime = to_datetime(end_datetime, to_maxtime=False) if end_datetime else None
|
||||
if interval is None:
|
||||
interval = to_duration("1 hour")
|
||||
if start_datetime is None:
|
||||
# Take earliest datetime of all providers that are enabled
|
||||
for provider in self.enabled_providers:
|
||||
if start_datetime is None:
|
||||
start_datetime = provider.min_datetime
|
||||
elif (
|
||||
provider.min_datetime
|
||||
and compare_datetimes(provider.min_datetime, start_datetime).lt
|
||||
):
|
||||
start_datetime = provider.min_datetime
|
||||
if end_datetime is None:
|
||||
# Take latest datetime of all providers that are enabled
|
||||
for provider in self.enabled_providers:
|
||||
if end_datetime is None:
|
||||
end_datetime = provider.max_datetime
|
||||
elif (
|
||||
provider.max_datetime
|
||||
and compare_datetimes(provider.max_datetime, end_datetime).gt
|
||||
):
|
||||
end_datetime = provider.min_datetime
|
||||
if end_datetime:
|
||||
end_datetime.add(seconds=1)
|
||||
|
||||
# Create a DatetimeIndex based on start, end, and interval
|
||||
reference_index = pd.date_range(
|
||||
start=start_datetime, end=end_datetime, freq=interval, inclusive="left"
|
||||
)
|
||||
|
||||
data = {}
|
||||
for key in keys:
|
||||
try:
|
||||
array = self.key_to_array(key, start_datetime, end_datetime, interval, fill_method)
|
||||
|
||||
if len(array) != len(reference_index):
|
||||
raise ValueError(
|
||||
f"Array length mismatch for key '{key}' (expected {len(reference_index)}, got {len(array)})"
|
||||
)
|
||||
|
||||
data[key] = array
|
||||
except KeyError as e:
|
||||
raise KeyError(f"Failed to retrieve data for key '{key}': {e}")
|
||||
|
||||
if not data:
|
||||
raise KeyError(f"No valid data found for the requested keys {keys}.")
|
||||
|
||||
return pd.DataFrame(data, index=reference_index)
|
||||
|
||||
def provider_by_id(self, provider_id: str) -> DataProvider:
|
||||
"""Retrieves a data provider by its unique identifier.
|
||||
|
||||
|
@@ -1,3 +1,4 @@
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Optional
|
||||
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
@@ -5,13 +6,14 @@ from akkudoktoreos.core.logging import get_logger
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class classproperty(property):
|
||||
class classproperty:
|
||||
"""A decorator to define a read-only property at the class level.
|
||||
|
||||
This class extends the built-in `property` to allow a method to be accessed
|
||||
as a property on the class itself, rather than an instance. This is useful
|
||||
when you want a property-like syntax for methods that depend on the class
|
||||
rather than any instance of the class.
|
||||
This class replaces the built-in `property` which is no longer available in
|
||||
combination with @classmethod since Python 3.13 to allow a method to be
|
||||
accessed as a property on the class itself, rather than an instance. This
|
||||
is useful when you want a property-like syntax for methods that depend on
|
||||
the class rather than any instance of the class.
|
||||
|
||||
Example:
|
||||
class MyClass:
|
||||
@@ -28,13 +30,16 @@ class classproperty(property):
|
||||
decorated method on the class.
|
||||
|
||||
Parameters:
|
||||
fget (Callable[[type], Any]): A method that takes the class as an
|
||||
fget (Callable[[Any], Any]): A method that takes the class as an
|
||||
argument and returns a value.
|
||||
|
||||
Raises:
|
||||
AssertionError: If `fget` is not defined when `__get__` is called.
|
||||
"""
|
||||
|
||||
def __init__(self, fget: Callable[[Any], Any]) -> None:
|
||||
self.fget = fget
|
||||
|
||||
def __get__(self, _: Any, owner_cls: Optional[type[Any]] = None) -> Any:
|
||||
if owner_cls is None:
|
||||
return self
|
||||
|
@@ -6,19 +6,20 @@ from pendulum import DateTime
|
||||
from pydantic import ConfigDict, Field, computed_field, field_validator, model_validator
|
||||
from typing_extensions import Self
|
||||
|
||||
from akkudoktoreos.core.cache import CacheUntilUpdateStore
|
||||
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin, SingletonMixin
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import ParametersBaseModel, PydanticBaseModel
|
||||
from akkudoktoreos.devices.battery import Battery
|
||||
from akkudoktoreos.devices.generic import HomeAppliance
|
||||
from akkudoktoreos.devices.inverter import Inverter
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class EnergieManagementSystemParameters(ParametersBaseModel):
|
||||
class EnergyManagementParameters(ParametersBaseModel):
|
||||
pv_prognose_wh: list[float] = Field(
|
||||
description="An array of floats representing the forecasted photovoltaic output in watts for different time intervals."
|
||||
)
|
||||
@@ -107,7 +108,7 @@ class SimulationResult(ParametersBaseModel):
|
||||
return NumpyEncoder.convert_numpy(field)[0]
|
||||
|
||||
|
||||
class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBaseModel):
|
||||
class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBaseModel):
|
||||
# Disable validation on assignment to speed up simulation runs.
|
||||
model_config = ConfigDict(
|
||||
validate_assignment=False,
|
||||
@@ -116,16 +117,33 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
||||
# Start datetime.
|
||||
_start_datetime: ClassVar[Optional[DateTime]] = None
|
||||
|
||||
# last run datetime. Used by energy management task
|
||||
_last_datetime: ClassVar[Optional[DateTime]] = None
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def start_datetime(self) -> DateTime:
|
||||
"""The starting datetime of the current or latest energy management."""
|
||||
if EnergieManagementSystem._start_datetime is None:
|
||||
EnergieManagementSystem.set_start_datetime()
|
||||
return EnergieManagementSystem._start_datetime
|
||||
if EnergyManagement._start_datetime is None:
|
||||
EnergyManagement.set_start_datetime()
|
||||
return EnergyManagement._start_datetime
|
||||
|
||||
@classmethod
|
||||
def set_start_datetime(cls, start_datetime: Optional[DateTime] = None) -> DateTime:
|
||||
"""Set the start datetime for the next energy management cycle.
|
||||
|
||||
If no datetime is provided, the current datetime is used.
|
||||
|
||||
The start datetime is always rounded down to the nearest hour
|
||||
(i.e., setting minutes, seconds, and microseconds to zero).
|
||||
|
||||
Args:
|
||||
start_datetime (Optional[DateTime]): The datetime to set as the start.
|
||||
If None, the current datetime is used.
|
||||
|
||||
Returns:
|
||||
DateTime: The adjusted start datetime.
|
||||
"""
|
||||
if start_datetime is None:
|
||||
start_datetime = to_datetime()
|
||||
cls._start_datetime = start_datetime.set(minute=0, second=0, microsecond=0)
|
||||
@@ -176,7 +194,7 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
||||
|
||||
def set_parameters(
|
||||
self,
|
||||
parameters: EnergieManagementSystemParameters,
|
||||
parameters: EnergyManagementParameters,
|
||||
ev: Optional[Battery] = None,
|
||||
home_appliance: Optional[HomeAppliance] = None,
|
||||
inverter: Optional[Inverter] = None,
|
||||
@@ -243,8 +261,9 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
||||
is mostly relevant to prediction providers.
|
||||
force_update (bool, optional): If True, forces to update the data even if still cached.
|
||||
"""
|
||||
# Throw away any cached results of the last run.
|
||||
CacheUntilUpdateStore().clear()
|
||||
self.set_start_hour(start_hour=start_hour)
|
||||
self.config.update()
|
||||
|
||||
# Check for run definitions
|
||||
if self.start_datetime is None:
|
||||
@@ -255,14 +274,70 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
||||
error_msg = "Prediction hours unknown."
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
if self.config.prediction.optimisation_hours is None:
|
||||
error_msg = "Optimisation hours unknown."
|
||||
if self.config.optimization.hours is None:
|
||||
error_msg = "Optimization hours unknown."
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
self.prediction.update_data(force_enable=force_enable, force_update=force_update)
|
||||
# TODO: Create optimisation problem that calls into devices.update_data() for simulations.
|
||||
|
||||
def manage_energy(self) -> None:
|
||||
"""Repeating task for managing energy.
|
||||
|
||||
This task should be executed by the server regularly (e.g., every 10 seconds)
|
||||
to ensure proper energy management. Configuration changes to the energy management interval
|
||||
will only take effect if this task is executed.
|
||||
|
||||
- Initializes and runs the energy management for the first time if it has never been run
|
||||
before.
|
||||
- If the energy management interval is not configured or invalid (NaN), the task will not
|
||||
trigger any repeated energy management runs.
|
||||
- Compares the current time with the last run time and runs the energy management if the
|
||||
interval has elapsed.
|
||||
- Logs any exceptions that occur during the initialization or execution of the energy
|
||||
management.
|
||||
|
||||
Note: The task maintains the interval even if some intervals are missed.
|
||||
"""
|
||||
current_datetime = to_datetime()
|
||||
|
||||
if EnergyManagement._last_datetime is None:
|
||||
# Never run before
|
||||
try:
|
||||
# Try to run a first energy management. May fail due to config incomplete.
|
||||
self.run()
|
||||
# Remember energy run datetime.
|
||||
EnergyManagement._last_datetime = current_datetime
|
||||
except Exception as e:
|
||||
message = f"EOS init: {e}"
|
||||
logger.error(message)
|
||||
return
|
||||
|
||||
if self.config.ems.interval is None or self.config.ems.interval == float("nan"):
|
||||
# No Repetition
|
||||
return
|
||||
|
||||
if (
|
||||
compare_datetimes(current_datetime, self._last_datetime).time_diff
|
||||
< self.config.ems.interval
|
||||
):
|
||||
# Wait for next run
|
||||
return
|
||||
|
||||
try:
|
||||
self.run()
|
||||
except Exception as e:
|
||||
message = f"EOS run: {e}"
|
||||
logger.error(message)
|
||||
|
||||
# Remember the energy management run - keep on interval even if we missed some intervals
|
||||
while (
|
||||
compare_datetimes(current_datetime, EnergyManagement._last_datetime).time_diff
|
||||
>= self.config.ems.interval
|
||||
):
|
||||
EnergyManagement._last_datetime.add(seconds=self.config.ems.interval)
|
||||
|
||||
def set_start_hour(self, start_hour: Optional[int] = None) -> None:
|
||||
"""Sets start datetime to given hour.
|
||||
|
||||
@@ -440,9 +515,9 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
||||
|
||||
|
||||
# Initialize the Energy Management System, it is a singleton.
|
||||
ems = EnergieManagementSystem()
|
||||
ems = EnergyManagement()
|
||||
|
||||
|
||||
def get_ems() -> EnergieManagementSystem:
|
||||
def get_ems() -> EnergyManagement:
|
||||
"""Gets the EOS Energy Management System."""
|
||||
return ems
|
||||
|
26
src/akkudoktoreos/core/emsettings.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Settings for energy management.
|
||||
|
||||
Kept in an extra module to avoid cyclic dependencies on package import.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
|
||||
|
||||
class EnergyManagementCommonSettings(SettingsBaseModel):
|
||||
"""Energy Management Configuration."""
|
||||
|
||||
startup_delay: float = Field(
|
||||
default=5,
|
||||
ge=1,
|
||||
description="Startup delay in seconds for EOS energy management runs.",
|
||||
)
|
||||
|
||||
interval: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Intervall in seconds between EOS energy management runs.",
|
||||
examples=["300"],
|
||||
)
|
@@ -52,6 +52,10 @@ def get_logger(
|
||||
# Create a logger with the specified name
|
||||
logger = pylogging.getLogger(name)
|
||||
logger.propagate = True
|
||||
# This is already supported by pydantic-settings in LoggingCommonSettings, however in case
|
||||
# loading the config itself fails and to set the level before we load the config, we set it here manually.
|
||||
if logging_level is None and (env_level := os.getenv("EOS_LOGGING__LEVEL")) is not None:
|
||||
logging_level = env_level
|
||||
if logging_level is not None:
|
||||
level = logging_str_to_level(logging_level)
|
||||
logger.setLevel(level)
|
||||
|
@@ -51,6 +51,70 @@ def merge_models(source: BaseModel, update_dict: dict[str, Any]) -> dict[str, An
|
||||
return merged_dict
|
||||
|
||||
|
||||
def access_nested_value(
|
||||
model: BaseModel, path: str, setter: bool, value: Optional[Any] = None
|
||||
) -> Any:
|
||||
"""Get or set a nested model value based on the provided path.
|
||||
|
||||
Supports string paths (with '/' separators) or sequence paths (list/tuple).
|
||||
Trims leading and trailing '/' from string paths.
|
||||
|
||||
Args:
|
||||
model (BaseModel): The model object for partial assignment.
|
||||
path (str): The path to the model key (e.g., "key1/key2/key3" or key1/key2/0).
|
||||
setter (bool): True to set value at path, False to return value at path.
|
||||
value (Optional[Any]): The value to set.
|
||||
|
||||
Returns:
|
||||
Any: The retrieved value if acting as a getter, or None if setting a value.
|
||||
"""
|
||||
path_elements = path.strip("/").split("/")
|
||||
|
||||
cfg: Any = model
|
||||
parent: BaseModel = model
|
||||
model_key: str = ""
|
||||
|
||||
for i, key in enumerate(path_elements):
|
||||
is_final_key = i == len(path_elements) - 1
|
||||
|
||||
if isinstance(cfg, list):
|
||||
try:
|
||||
idx = int(key)
|
||||
if is_final_key:
|
||||
if not setter: # Getter
|
||||
return cfg[idx]
|
||||
else: # Setter
|
||||
new_list = list(cfg)
|
||||
new_list[idx] = value
|
||||
# Trigger validation
|
||||
setattr(parent, model_key, new_list)
|
||||
else:
|
||||
cfg = cfg[idx]
|
||||
except ValidationError as e:
|
||||
raise ValueError(f"Error updating model: {e}") from e
|
||||
except (ValueError, IndexError) as e:
|
||||
raise IndexError(f"Invalid list index at {path}: {key}") from e
|
||||
|
||||
elif isinstance(cfg, BaseModel):
|
||||
parent = cfg
|
||||
model_key = key
|
||||
if is_final_key:
|
||||
if not setter: # Getter
|
||||
return getattr(cfg, key)
|
||||
else: # Setter
|
||||
try:
|
||||
# Verification also if nested value is provided opposed to just setattr
|
||||
# Will merge partial assignment
|
||||
cfg = cfg.__pydantic_validator__.validate_assignment(cfg, key, value)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error updating model: {e}") from e
|
||||
else:
|
||||
cfg = getattr(cfg, key)
|
||||
|
||||
else:
|
||||
raise KeyError(f"Key '{key}' not found in model.")
|
||||
|
||||
|
||||
class PydanticTypeAdapterDateTime(TypeAdapter[pendulum.DateTime]):
|
||||
"""Custom type adapter for Pendulum DateTime fields."""
|
||||
|
||||
@@ -373,6 +437,10 @@ class PydanticDateTimeDataFrame(PydanticBaseModel):
|
||||
index = pd.Index([to_datetime(dt, in_timezone=self.tz) for dt in df.index])
|
||||
df.index = index
|
||||
|
||||
# Check if 'date_time' column exists, if not, create it
|
||||
if "date_time" not in df.columns:
|
||||
df["date_time"] = df.index
|
||||
|
||||
dtype_mapping = {
|
||||
"int": int,
|
||||
"float": float,
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import logging
|
||||
from typing import List, Sequence
|
||||
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
|
||||
|
||||
class Heatpump:
|
||||
MAX_HEAT_OUTPUT = 5000
|
||||
@@ -21,7 +22,7 @@ class Heatpump:
|
||||
def __init__(self, max_heat_output: int, hours: int):
|
||||
self.max_heat_output = max_heat_output
|
||||
self.hours = hours
|
||||
self.log = logging.getLogger(__name__)
|
||||
self.log = get_logger(__name__)
|
||||
|
||||
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
||||
"""Check if temperature is in valid range between -100 and 100 degree Celsius.
|
||||
|
@@ -1,4 +1,3 @@
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
from typing import Any, Optional
|
||||
@@ -13,7 +12,7 @@ from akkudoktoreos.core.coreabc import (
|
||||
DevicesMixin,
|
||||
EnergyManagementSystemMixin,
|
||||
)
|
||||
from akkudoktoreos.core.ems import EnergieManagementSystemParameters, SimulationResult
|
||||
from akkudoktoreos.core.ems import EnergyManagementParameters, SimulationResult
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
||||
from akkudoktoreos.devices.battery import (
|
||||
@@ -30,7 +29,7 @@ logger = get_logger(__name__)
|
||||
|
||||
|
||||
class OptimizationParameters(ParametersBaseModel):
|
||||
ems: EnergieManagementSystemParameters
|
||||
ems: EnergyManagementParameters
|
||||
pv_akku: Optional[SolarPanelBatteryParameters]
|
||||
inverter: Optional[InverterParameters]
|
||||
eauto: Optional[ElectricVehicleParameters]
|
||||
@@ -121,7 +120,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
# Set a fixed seed for random operations if provided or in debug mode
|
||||
if self.fix_seed is not None:
|
||||
random.seed(self.fix_seed)
|
||||
elif logger.level == logging.DEBUG:
|
||||
elif logger.level == "DEBUG":
|
||||
self.fix_seed = random.randint(1, 100000000000)
|
||||
random.seed(self.fix_seed)
|
||||
|
||||
|
@@ -14,10 +14,10 @@ import requests
|
||||
from pydantic import ValidationError
|
||||
from statsmodels.tsa.holtwinters import ExponentialSmoothing
|
||||
|
||||
from akkudoktoreos.core.cache import cache_in_file
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
||||
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@@ -63,6 +63,9 @@ class ElecPriceImport(ElecPriceProvider, PredictionImportProvider):
|
||||
return "ElecPriceImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
if self.config.elecprice.provider_settings is None:
|
||||
logger.debug(f"{self.provider_id()} data update without provider settings.")
|
||||
return
|
||||
if self.config.elecprice.provider_settings.import_file_path:
|
||||
self.import_from_file(
|
||||
self.config.elecprice.provider_settings.import_file_path,
|
||||
|
@@ -62,6 +62,9 @@ class LoadImport(LoadProvider, PredictionImportProvider):
|
||||
return "LoadImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
if self.config.load.provider_settings is None:
|
||||
logger.debug(f"{self.provider_id()} data update without provider settings.")
|
||||
return
|
||||
if self.config.load.provider_settings.import_file_path:
|
||||
self.import_from_file(self.config.provider_settings.import_file_path, key_prefix="load")
|
||||
if self.config.load.provider_settings.import_json:
|
||||
|
@@ -206,9 +206,6 @@ class PredictionProvider(PredictionStartEndKeepMixin, DataProvider):
|
||||
force_enable (bool, optional): If True, forces the update even if the provider is disabled.
|
||||
force_update (bool, optional): If True, forces the provider to update the data even if still cached.
|
||||
"""
|
||||
# Update prediction configuration
|
||||
self.config.update()
|
||||
|
||||
# Check after configuration is updated.
|
||||
if not force_enable and not self.enabled():
|
||||
return
|
||||
|
@@ -80,13 +80,13 @@ from typing import Any, List, Optional, Union
|
||||
import requests
|
||||
from pydantic import Field, ValidationError, computed_field
|
||||
|
||||
from akkudoktoreos.core.cache import cache_in_file
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
from akkudoktoreos.prediction.pvforecastabc import (
|
||||
PVForecastDataRecord,
|
||||
PVForecastProvider,
|
||||
)
|
||||
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -267,7 +267,7 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
||||
logger.debug(f"Response from {self._url()}: {response}")
|
||||
akkudoktor_data = self._validate_data(response.content)
|
||||
# We are working on fresh data (no cache), report update time
|
||||
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
||||
|
||||
return akkudoktor_data
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
|
@@ -63,6 +63,9 @@ class PVForecastImport(PVForecastProvider, PredictionImportProvider):
|
||||
return "PVForecastImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
if self.config.pvforecast.provider_settings is None:
|
||||
logger.debug(f"{self.provider_id()} data update without provider settings.")
|
||||
return
|
||||
if self.config.pvforecast.provider_settings.import_file_path is not None:
|
||||
self.import_from_file(
|
||||
self.config.pvforecast.provider_settings.import_file_path,
|
||||
|
@@ -7,23 +7,23 @@ format, enabling consistent access to forecasted and historical weather attribut
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import pandas as pd
|
||||
import pvlib
|
||||
import requests
|
||||
|
||||
from akkudoktoreos.core.cache import cache_in_file
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.prediction.weatherabc import WeatherDataRecord, WeatherProvider
|
||||
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
WheaterDataBrightSkyMapping: List[Tuple[str, Optional[str], Optional[float]]] = [
|
||||
WheaterDataBrightSkyMapping: List[Tuple[str, Optional[str], Optional[Union[str, float]]]] = [
|
||||
# brightsky_key, description, corr_factor
|
||||
("timestamp", "DateTime", None),
|
||||
("timestamp", "DateTime", "to datetime in timezone"),
|
||||
("precipitation", "Precipitation Amount (mm)", 1),
|
||||
("pressure_msl", "Pressure (mb)", 1),
|
||||
("sunshine", None, None),
|
||||
@@ -96,8 +96,8 @@ class WeatherBrightSky(WeatherProvider):
|
||||
ValueError: If the API response does not include expected `weather` data.
|
||||
"""
|
||||
source = "https://api.brightsky.dev"
|
||||
date = to_datetime(self.start_datetime, as_string="YYYY-MM-DD")
|
||||
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
||||
date = to_datetime(self.start_datetime, as_string=True)
|
||||
last_date = to_datetime(self.end_datetime, as_string=True)
|
||||
response = requests.get(
|
||||
f"{source}/weather?lat={self.config.general.latitude}&lon={self.config.general.longitude}&date={date}&last_date={last_date}&tz={self.config.general.timezone}"
|
||||
)
|
||||
@@ -133,7 +133,8 @@ class WeatherBrightSky(WeatherProvider):
|
||||
error_msg = f"No WeatherDataRecord key for '{description}'"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
return self.key_to_series(key)
|
||||
series = self.key_to_series(key)
|
||||
return series
|
||||
|
||||
def _description_from_series(self, description: str, data: pd.Series) -> None:
|
||||
"""Update a weather data with a pandas Series based on its description.
|
||||
@@ -170,7 +171,7 @@ class WeatherBrightSky(WeatherProvider):
|
||||
brightsky_data = self._request_forecast(force_update=force_update) # type: ignore
|
||||
|
||||
# Get key mapping from description
|
||||
brightsky_key_mapping: Dict[str, Tuple[Optional[str], Optional[float]]] = {}
|
||||
brightsky_key_mapping: Dict[str, Tuple[Optional[str], Optional[Union[str, float]]]] = {}
|
||||
for brightsky_key, description, corr_factor in WheaterDataBrightSkyMapping:
|
||||
if description is None:
|
||||
brightsky_key_mapping[brightsky_key] = (None, None)
|
||||
@@ -192,6 +193,9 @@ class WeatherBrightSky(WeatherProvider):
|
||||
value = brightsky_record[brightsky_key]
|
||||
corr_factor = item[1]
|
||||
if value and corr_factor:
|
||||
if corr_factor == "to datetime in timezone":
|
||||
value = to_datetime(value, in_timezone=self.config.general.timezone)
|
||||
else:
|
||||
value = value * corr_factor
|
||||
setattr(weather_record, key, value)
|
||||
self.insert_by_datetime(weather_record)
|
||||
@@ -216,14 +220,30 @@ class WeatherBrightSky(WeatherProvider):
|
||||
self._description_from_series(description, dhi)
|
||||
|
||||
# Add Preciptable Water (PWAT) with a PVLib method.
|
||||
description = "Temperature (°C)"
|
||||
temperature = self._description_to_series(description)
|
||||
|
||||
description = "Relative Humidity (%)"
|
||||
humidity = self._description_to_series(description)
|
||||
|
||||
key = WeatherDataRecord.key_from_description("Temperature (°C)")
|
||||
assert key
|
||||
temperature = self.key_to_array(
|
||||
key=key,
|
||||
start_datetime=self.start_datetime,
|
||||
end_datetime=self.end_datetime,
|
||||
interval=to_duration("1 hour"),
|
||||
)
|
||||
key = WeatherDataRecord.key_from_description("Relative Humidity (%)")
|
||||
assert key
|
||||
humidity = self.key_to_array(
|
||||
key=key,
|
||||
start_datetime=self.start_datetime,
|
||||
end_datetime=self.end_datetime,
|
||||
interval=to_duration("1 hour"),
|
||||
)
|
||||
data = pvlib.atmosphere.gueymard94_pw(temperature, humidity)
|
||||
pwat = pd.Series(
|
||||
data=pvlib.atmosphere.gueymard94_pw(temperature, humidity), index=temperature.index
|
||||
data=data,
|
||||
index=pd.DatetimeIndex(
|
||||
pd.date_range(
|
||||
start=self.start_datetime, end=self.end_datetime, freq="1h", inclusive="left"
|
||||
)
|
||||
),
|
||||
)
|
||||
description = "Preciptable Water (cm)"
|
||||
self._description_from_series(description, pwat)
|
||||
|
@@ -19,9 +19,9 @@ import pandas as pd
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from akkudoktoreos.core.cache import cache_in_file
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.prediction.weatherabc import WeatherDataRecord, WeatherProvider
|
||||
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration, to_timezone
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@@ -63,6 +63,9 @@ class WeatherImport(WeatherProvider, PredictionImportProvider):
|
||||
return "WeatherImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
if self.config.weather.provider_settings is None:
|
||||
logger.debug(f"{self.provider_id()} data update without provider settings.")
|
||||
return
|
||||
if self.config.weather.provider_settings.import_file_path:
|
||||
self.import_from_file(
|
||||
self.config.weather.provider_settings.import_file_path, key_prefix="weather"
|
||||
|
0
src/akkudoktoreos/server/dash/__init__.py
Normal file
After Width: | Height: | Size: 22 KiB |
After Width: | Height: | Size: 112 KiB |
After Width: | Height: | Size: 20 KiB |
BIN
src/akkudoktoreos/server/dash/assets/favicon/favicon-16x16.png
Normal file
After Width: | Height: | Size: 724 B |
BIN
src/akkudoktoreos/server/dash/assets/favicon/favicon-32x32.png
Normal file
After Width: | Height: | Size: 1.6 KiB |
BIN
src/akkudoktoreos/server/dash/assets/favicon/favicon.ico
Normal file
After Width: | Height: | Size: 15 KiB |
@@ -0,0 +1 @@
|
||||
{"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}
|
BIN
src/akkudoktoreos/server/dash/assets/icon.png
Normal file
After Width: | Height: | Size: 7.5 KiB |
BIN
src/akkudoktoreos/server/dash/assets/logo.png
Normal file
After Width: | Height: | Size: 12 KiB |
38
src/akkudoktoreos/server/dash/bokeh.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Module taken from https://github.com/koaning/fh-altair
|
||||
# MIT license
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from bokeh.embed import components
|
||||
from bokeh.models import Plot
|
||||
from monsterui.franken import H4, Card, NotStr, Script
|
||||
|
||||
BokehJS = [
|
||||
Script(src="https://cdn.bokeh.org/bokeh/release/bokeh-3.6.3.min.js", crossorigin="anonymous"),
|
||||
Script(
|
||||
src="https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.6.3.min.js",
|
||||
crossorigin="anonymous",
|
||||
),
|
||||
Script(
|
||||
src="https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.6.3.min.js", crossorigin="anonymous"
|
||||
),
|
||||
Script(
|
||||
src="https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.6.3.min.js", crossorigin="anonymous"
|
||||
),
|
||||
Script(
|
||||
src="https://cdn.bokeh.org/bokeh/release/bokeh-mathjax-3.6.3.min.js",
|
||||
crossorigin="anonymous",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def Bokeh(plot: Plot, header: Optional[str] = None) -> Card:
|
||||
"""Converts an Bokeh plot to a FastHTML FT component."""
|
||||
script, div = components(plot)
|
||||
if header:
|
||||
header = H4(header, cls="mt-2")
|
||||
return Card(
|
||||
NotStr(div),
|
||||
NotStr(script),
|
||||
header=header,
|
||||
)
|
224
src/akkudoktoreos/server/dash/components.py
Normal file
@@ -0,0 +1,224 @@
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from fasthtml.common import H1, Div, Li
|
||||
|
||||
# from mdit_py_plugins import plugin1, plugin2
|
||||
from monsterui.foundations import stringify
|
||||
from monsterui.franken import (
|
||||
Button,
|
||||
ButtonT,
|
||||
Card,
|
||||
Container,
|
||||
ContainerT,
|
||||
Details,
|
||||
DivLAligned,
|
||||
DivRAligned,
|
||||
Grid,
|
||||
Input,
|
||||
P,
|
||||
Summary,
|
||||
TabContainer,
|
||||
UkIcon,
|
||||
)
|
||||
|
||||
scrollbar_viewport_styles = (
|
||||
"scrollbar-width: none; -ms-overflow-style: none; -webkit-overflow-scrolling: touch;"
|
||||
)
|
||||
|
||||
scrollbar_cls = "flex touch-none select-none transition-colors p-[1px]"
|
||||
|
||||
|
||||
def ScrollArea(
|
||||
*c: Any, cls: Optional[Union[str, tuple]] = None, orientation: str = "vertical", **kwargs: Any
|
||||
) -> Div:
|
||||
"""Creates a styled scroll area.
|
||||
|
||||
Args:
|
||||
orientation (str): The orientation of the scroll area. Defaults to vertical.
|
||||
"""
|
||||
new_cls = "relative overflow-hidden"
|
||||
if cls:
|
||||
new_cls += f" {stringify(cls)}"
|
||||
kwargs["cls"] = new_cls
|
||||
|
||||
content = Div(
|
||||
Div(*c, style="min-width:100%;display:table;"),
|
||||
style=f"overflow: {'hidden scroll' if orientation == 'vertical' else 'scroll'}; {scrollbar_viewport_styles}",
|
||||
cls="w-full h-full rounded-[inherit]",
|
||||
data_ref="viewport",
|
||||
)
|
||||
|
||||
scrollbar = Div(
|
||||
Div(cls="bg-border rounded-full hidden relative flex-1", data_ref="thumb"),
|
||||
cls=f"{scrollbar_cls} flex-col h-2.5 w-full border-t border-t-transparent"
|
||||
if orientation == "horizontal"
|
||||
else f"{scrollbar_cls} w-2.5 h-full border-l border-l-transparent",
|
||||
data_ref="scrollbar",
|
||||
style=f"position: absolute;{'right:0; top:0;' if orientation == 'vertical' else 'bottom:0; left:0;'}",
|
||||
)
|
||||
|
||||
return Div(
|
||||
content,
|
||||
scrollbar,
|
||||
role="region",
|
||||
tabindex="0",
|
||||
data_orientation=orientation,
|
||||
data_ref_scrollarea=True,
|
||||
aria_label="Scrollable content",
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def ConfigCard(
|
||||
config_name: str, config_type: str, read_only: str, value: str, default: str, description: str
|
||||
) -> Card:
|
||||
return Card(
|
||||
Details(
|
||||
Summary(
|
||||
Grid(
|
||||
Grid(
|
||||
DivLAligned(
|
||||
UkIcon(icon="play"),
|
||||
P(config_name),
|
||||
),
|
||||
DivRAligned(
|
||||
P(read_only),
|
||||
),
|
||||
),
|
||||
Input(value=value) if read_only == "rw" else P(value),
|
||||
),
|
||||
# cls="flex cursor-pointer list-none items-center gap-4",
|
||||
cls="list-none",
|
||||
),
|
||||
Grid(
|
||||
P(description),
|
||||
P(config_type),
|
||||
),
|
||||
Grid(
|
||||
DivRAligned(
|
||||
P("default") if read_only == "rw" else P(""),
|
||||
),
|
||||
P(default) if read_only == "rw" else P(""),
|
||||
)
|
||||
if read_only == "rw"
|
||||
else None,
|
||||
cls="space-y-4 gap-4",
|
||||
),
|
||||
cls="w-full",
|
||||
)
|
||||
|
||||
|
||||
def DashboardHeader(title: Optional[str]) -> Div:
|
||||
"""Creates a styled header with a title.
|
||||
|
||||
Args:
|
||||
title (Optional[str]): The title text for the header.
|
||||
|
||||
Returns:
|
||||
Div: A styled `Div` element containing the header.
|
||||
"""
|
||||
if title is None:
|
||||
return Div("", cls="header")
|
||||
return Div(H1(title, cls="text-2xl font-bold mb-4"), cls="header")
|
||||
|
||||
|
||||
def DashboardFooter(*c: Any, path: str) -> Card:
|
||||
"""Creates a styled footer with the provided information.
|
||||
|
||||
The footer content is reloaded every 5 seconds from path.
|
||||
|
||||
Args:
|
||||
path (str): Path to reload footer content from
|
||||
|
||||
Returns:
|
||||
Card: A styled `Card` element containing the footer.
|
||||
"""
|
||||
return Card(
|
||||
Container(*c, id="footer-content"),
|
||||
hx_get=f"{path}",
|
||||
hx_trigger="every 5s",
|
||||
hx_target="#footer-content",
|
||||
hx_swap="innerHTML",
|
||||
)
|
||||
|
||||
|
||||
def DashboardTrigger(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any) -> Button:
|
||||
"""Creates a styled button for the dashboard trigger.
|
||||
|
||||
Args:
|
||||
*c: Positional arguments to pass to the button.
|
||||
cls (Optional[str]): Additional CSS classes for styling. Defaults to None.
|
||||
**kwargs: Additional keyword arguments for the button.
|
||||
|
||||
Returns:
|
||||
Button: A styled `Button` component.
|
||||
"""
|
||||
new_cls = f"{ButtonT.primary}"
|
||||
if cls:
|
||||
new_cls += f" {stringify(cls)}"
|
||||
kwargs["cls"] = new_cls
|
||||
return Button(*c, submit=False, **kwargs)
|
||||
|
||||
|
||||
def DashboardTabs(dashboard_items: dict[str, str]) -> Card:
|
||||
"""Creates a dashboard tab with dynamic dashboard items.
|
||||
|
||||
Args:
|
||||
dashboard_items (dict[str, str]): A dictionary of dashboard items where keys are item names
|
||||
and values are paths for navigation.
|
||||
|
||||
Returns:
|
||||
Card: A styled `Card` component containing the dashboard tabs.
|
||||
"""
|
||||
dash_items = [
|
||||
Li(
|
||||
DashboardTrigger(
|
||||
menu,
|
||||
hx_get=f"{path}",
|
||||
hx_target="#page-content",
|
||||
hx_swap="innerHTML",
|
||||
),
|
||||
)
|
||||
for menu, path in dashboard_items.items()
|
||||
]
|
||||
return Card(TabContainer(*dash_items, cls="gap-4"), alt=True)
|
||||
|
||||
|
||||
def DashboardContent(content: Any) -> Card:
|
||||
"""Creates a content section within a styled card.
|
||||
|
||||
Args:
|
||||
content (Any): The content to display.
|
||||
|
||||
Returns:
|
||||
Card: A styled `Card` element containing the content.
|
||||
"""
|
||||
return Card(ScrollArea(Container(content, id="page-content"), cls="h-[75vh] w-full rounded-md"))
|
||||
|
||||
|
||||
def Page(
|
||||
title: Optional[str],
|
||||
dashboard_items: dict[str, str],
|
||||
content: Any,
|
||||
footer_content: Any,
|
||||
footer_path: str,
|
||||
) -> Div:
|
||||
"""Generates a full-page layout with a header, dashboard items, content, and footer.
|
||||
|
||||
Args:
|
||||
title (Optional[str]): The page title.
|
||||
dashboard_items (dict[str, str]): A dictionary of dashboard items.
|
||||
content (Any): The main content for the page.
|
||||
footer_content (Any): Footer content.
|
||||
footer_path (Any): Path to reload footer content from.
|
||||
|
||||
Returns:
|
||||
Div: A `Div` element representing the entire page layout.
|
||||
"""
|
||||
return Container(
|
||||
DashboardHeader(title),
|
||||
DashboardTabs(dashboard_items),
|
||||
DashboardContent(content),
|
||||
DashboardFooter(footer_content, path=footer_path),
|
||||
cls=("bg-background text-foreground w-screen p-4 space-y-4", ContainerT.xl),
|
||||
)
|
275
src/akkudoktoreos/server/dash/configuration.py
Normal file
@@ -0,0 +1,275 @@
|
||||
from typing import Any, Dict, List, Optional, Sequence, TypeVar, Union
|
||||
|
||||
import requests
|
||||
from monsterui.franken import Div, DividerLine, P, Table, Tbody, Td, Th, Thead, Tr
|
||||
from pydantic.fields import ComputedFieldInfo, FieldInfo
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
from akkudoktoreos.config.config import get_config
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
from akkudoktoreos.server.dash.components import ConfigCard
|
||||
|
||||
logger = get_logger(__name__)
|
||||
config_eos = get_config()
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def get_nested_value(
|
||||
dictionary: Union[Dict[str, Any], List[Any]],
|
||||
keys: Sequence[Union[str, int]],
|
||||
default: Optional[T] = None,
|
||||
) -> Union[Any, T]:
|
||||
"""Retrieve a nested value from a dictionary or list using a sequence of keys.
|
||||
|
||||
Args:
|
||||
dictionary (Union[Dict[str, Any], List[Any]]): The nested dictionary or list to search.
|
||||
keys (Sequence[Union[str, int]]): A sequence of keys or indices representing the path to the desired value.
|
||||
default (Optional[T]): A value to return if the path is not found.
|
||||
|
||||
Returns:
|
||||
Union[Any, T]: The value at the specified nested path, or the default value if not found.
|
||||
|
||||
Raises:
|
||||
TypeError: If the input is not a dictionary or list, or if keys are not a sequence.
|
||||
KeyError: If a key is not found in a dictionary.
|
||||
IndexError: If an index is out of range in a list.
|
||||
"""
|
||||
if not isinstance(dictionary, (dict, list)):
|
||||
raise TypeError("The first argument must be a dictionary or list")
|
||||
if not isinstance(keys, Sequence):
|
||||
raise TypeError("Keys must be provided as a sequence (e.g., list, tuple)")
|
||||
|
||||
if not keys:
|
||||
return dictionary
|
||||
|
||||
try:
|
||||
# Traverse the structure
|
||||
current = dictionary
|
||||
for key in keys:
|
||||
if isinstance(current, dict) and isinstance(key, str):
|
||||
current = current[key]
|
||||
elif isinstance(current, list) and isinstance(key, int):
|
||||
current = current[key]
|
||||
else:
|
||||
raise KeyError(f"Invalid key or index: {key}")
|
||||
return current
|
||||
except (KeyError, IndexError, TypeError):
|
||||
return default
|
||||
|
||||
|
||||
def get_default_value(field_info: Union[FieldInfo, ComputedFieldInfo], regular_field: bool) -> Any:
|
||||
"""Retrieve the default value of a field.
|
||||
|
||||
Args:
|
||||
field_info (Union[FieldInfo, ComputedFieldInfo]): The field metadata from Pydantic.
|
||||
regular_field (bool): Indicates if the field is a regular field.
|
||||
|
||||
Returns:
|
||||
Any: The default value of the field or "N/A" if not a regular field.
|
||||
"""
|
||||
default_value = ""
|
||||
if regular_field:
|
||||
if (val := field_info.default) is not PydanticUndefined:
|
||||
default_value = val
|
||||
else:
|
||||
default_value = "N/A"
|
||||
return default_value
|
||||
|
||||
|
||||
def resolve_nested_types(field_type: Any, parent_types: list[str]) -> list[tuple[Any, list[str]]]:
|
||||
"""Resolve nested types within a field and return their structure.
|
||||
|
||||
Args:
|
||||
field_type (Any): The type of the field to resolve.
|
||||
parent_types (List[str]): A list of parent type names.
|
||||
|
||||
Returns:
|
||||
List[tuple[Any, List[str]]]: A list of tuples containing resolved types and their parent hierarchy.
|
||||
"""
|
||||
resolved_types: list[tuple[Any, list[str]]] = []
|
||||
|
||||
origin = getattr(field_type, "__origin__", field_type)
|
||||
if origin is Union:
|
||||
for arg in getattr(field_type, "__args__", []):
|
||||
if arg is not type(None):
|
||||
resolved_types.extend(resolve_nested_types(arg, parent_types))
|
||||
else:
|
||||
resolved_types.append((field_type, parent_types))
|
||||
|
||||
return resolved_types
|
||||
|
||||
|
||||
def configuration(values: dict) -> list[dict]:
|
||||
"""Generate configuration details based on provided values and model metadata.
|
||||
|
||||
Args:
|
||||
values (dict): A dictionary containing the current configuration values.
|
||||
|
||||
Returns:
|
||||
List[dict]: A sorted list of configuration details, each represented as a dictionary.
|
||||
"""
|
||||
configs = []
|
||||
inner_types: set[type[PydanticBaseModel]] = set()
|
||||
|
||||
for field_name, field_info in list(config_eos.model_fields.items()) + list(
|
||||
config_eos.model_computed_fields.items()
|
||||
):
|
||||
|
||||
def extract_nested_models(
|
||||
subfield_info: Union[ComputedFieldInfo, FieldInfo], parent_types: list[str]
|
||||
) -> None:
|
||||
regular_field = isinstance(subfield_info, FieldInfo)
|
||||
subtype = subfield_info.annotation if regular_field else subfield_info.return_type
|
||||
|
||||
if subtype in inner_types:
|
||||
return
|
||||
|
||||
nested_types = resolve_nested_types(subtype, [])
|
||||
found_basic = False
|
||||
for nested_type, nested_parent_types in nested_types:
|
||||
if not isinstance(nested_type, type) or not issubclass(
|
||||
nested_type, PydanticBaseModel
|
||||
):
|
||||
if found_basic:
|
||||
continue
|
||||
|
||||
config = {}
|
||||
config["name"] = ".".join(parent_types)
|
||||
config["value"] = str(get_nested_value(values, parent_types, "<unknown>"))
|
||||
config["default"] = str(get_default_value(subfield_info, regular_field))
|
||||
config["description"] = (
|
||||
subfield_info.description if subfield_info.description else ""
|
||||
)
|
||||
if isinstance(subfield_info, ComputedFieldInfo):
|
||||
config["read-only"] = "ro"
|
||||
type_description = str(subfield_info.return_type)
|
||||
else:
|
||||
config["read-only"] = "rw"
|
||||
type_description = str(subfield_info.annotation)
|
||||
config["type"] = (
|
||||
type_description.replace("typing.", "")
|
||||
.replace("pathlib.", "")
|
||||
.replace("[", "[ ")
|
||||
.replace("NoneType", "None")
|
||||
)
|
||||
configs.append(config)
|
||||
found_basic = True
|
||||
else:
|
||||
new_parent_types = parent_types + nested_parent_types
|
||||
inner_types.add(nested_type)
|
||||
for nested_field_name, nested_field_info in list(
|
||||
nested_type.model_fields.items()
|
||||
) + list(nested_type.model_computed_fields.items()):
|
||||
extract_nested_models(
|
||||
nested_field_info,
|
||||
new_parent_types + [nested_field_name],
|
||||
)
|
||||
|
||||
extract_nested_models(field_info, [field_name])
|
||||
return sorted(configs, key=lambda x: x["name"])
|
||||
|
||||
|
||||
def get_configuration(eos_host: Optional[str], eos_port: Optional[Union[str, int]]) -> list[dict]:
|
||||
"""Fetch and process configuration data from the specified EOS server.
|
||||
|
||||
Args:
|
||||
eos_host (Optional[str]): The hostname of the server.
|
||||
eos_port (Optional[Union[str, int]]): The port of the server.
|
||||
|
||||
Returns:
|
||||
List[dict]: A list of processed configuration entries.
|
||||
"""
|
||||
if eos_host is None:
|
||||
eos_host = config_eos.server.host
|
||||
if eos_port is None:
|
||||
eos_port = config_eos.server.port
|
||||
server = f"http://{eos_host}:{eos_port}"
|
||||
|
||||
# Get current configuration from server
|
||||
try:
|
||||
result = requests.get(f"{server}/v1/config")
|
||||
result.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
detail = result.json()["detail"]
|
||||
warning_msg = f"Can not retrieve configuration from {server}: {e}, {detail}"
|
||||
logger.warning(warning_msg)
|
||||
return configuration({})
|
||||
config = result.json()
|
||||
|
||||
return configuration(config)
|
||||
|
||||
|
||||
def Configuration(eos_host: Optional[str], eos_port: Optional[Union[str, int]]) -> Div:
|
||||
"""Create a visual representation of the configuration.
|
||||
|
||||
Args:
|
||||
eos_host (Optional[str]): The hostname of the EOS server.
|
||||
eos_port (Optional[Union[str, int]]): The port of the EOS server.
|
||||
|
||||
Returns:
|
||||
Table: A `monsterui.franken.Table` component displaying configuration details.
|
||||
"""
|
||||
flds = "Name", "Type", "RO/RW", "Value", "Default", "Description"
|
||||
rows = []
|
||||
last_category = ""
|
||||
for config in get_configuration(eos_host, eos_port):
|
||||
category = config["name"].split(".")[0]
|
||||
if category != last_category:
|
||||
rows.append(P(category))
|
||||
rows.append(DividerLine())
|
||||
last_category = category
|
||||
rows.append(
|
||||
ConfigCard(
|
||||
config["name"],
|
||||
config["type"],
|
||||
config["read-only"],
|
||||
config["value"],
|
||||
config["default"],
|
||||
config["description"],
|
||||
)
|
||||
)
|
||||
return Div(*rows, cls="space-y-4")
|
||||
|
||||
|
||||
def ConfigurationOrg(eos_host: Optional[str], eos_port: Optional[Union[str, int]]) -> Table:
|
||||
"""Create a visual representation of the configuration.
|
||||
|
||||
Args:
|
||||
eos_host (Optional[str]): The hostname of the EOS server.
|
||||
eos_port (Optional[Union[str, int]]): The port of the EOS server.
|
||||
|
||||
Returns:
|
||||
Table: A `monsterui.franken.Table` component displaying configuration details.
|
||||
"""
|
||||
flds = "Name", "Type", "RO/RW", "Value", "Default", "Description"
|
||||
rows = [
|
||||
Tr(
|
||||
Td(
|
||||
config["name"],
|
||||
cls="max-w-64 text-wrap break-all",
|
||||
),
|
||||
Td(
|
||||
config["type"],
|
||||
cls="max-w-48 text-wrap break-all",
|
||||
),
|
||||
Td(
|
||||
config["read-only"],
|
||||
cls="max-w-24 text-wrap break-all",
|
||||
),
|
||||
Td(
|
||||
config["value"],
|
||||
cls="max-w-md text-wrap break-all",
|
||||
),
|
||||
Td(config["default"], cls="max-w-48 text-wrap break-all"),
|
||||
Td(
|
||||
config["description"],
|
||||
cls="max-w-prose text-wrap",
|
||||
),
|
||||
cls="",
|
||||
)
|
||||
for config in get_configuration(eos_host, eos_port)
|
||||
]
|
||||
head = Thead(*map(Th, flds), cls="text-left")
|
||||
return Table(head, Tbody(*rows), cls="w-full uk-table uk-table-divider uk-table-striped")
|
86
src/akkudoktoreos/server/dash/data/democonfig.json
Normal file
@@ -0,0 +1,86 @@
|
||||
{
|
||||
"elecprice": {
|
||||
"charges_kwh": 0.21,
|
||||
"provider": "ElecPriceAkkudoktor"
|
||||
},
|
||||
"general": {
|
||||
"latitude": 52.5,
|
||||
"longitude": 13.4
|
||||
},
|
||||
"prediction": {
|
||||
"historic_hours": 48,
|
||||
"hours": 48
|
||||
},
|
||||
"load": {
|
||||
"provider": "LoadAkkudoktor",
|
||||
"provider_settings": {
|
||||
"loadakkudoktor_year_energy": 20000
|
||||
}
|
||||
},
|
||||
"optimization": {
|
||||
"hours": 48
|
||||
},
|
||||
"pvforecast": {
|
||||
"planes": [
|
||||
{
|
||||
"peakpower": 5.0,
|
||||
"surface_azimuth": -10,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [
|
||||
20,
|
||||
27,
|
||||
22,
|
||||
20
|
||||
],
|
||||
"inverter_paco": 10000
|
||||
},
|
||||
{
|
||||
"peakpower": 4.8,
|
||||
"surface_azimuth": -90,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [
|
||||
30,
|
||||
30,
|
||||
30,
|
||||
50
|
||||
],
|
||||
"inverter_paco": 10000
|
||||
},
|
||||
{
|
||||
"peakpower": 1.4,
|
||||
"surface_azimuth": -40,
|
||||
"surface_tilt": 60,
|
||||
"userhorizon": [
|
||||
60,
|
||||
30,
|
||||
0,
|
||||
30
|
||||
],
|
||||
"inverter_paco": 2000
|
||||
},
|
||||
{
|
||||
"peakpower": 1.6,
|
||||
"surface_azimuth": 5,
|
||||
"surface_tilt": 45,
|
||||
"userhorizon": [
|
||||
45,
|
||||
25,
|
||||
30,
|
||||
60
|
||||
],
|
||||
"inverter_paco": 1400
|
||||
}
|
||||
],
|
||||
"provider": "PVForecastAkkudoktor"
|
||||
},
|
||||
"server": {
|
||||
"startup_eosdash": true,
|
||||
"host": "0.0.0.0",
|
||||
"port": 8503,
|
||||
"eosdash_host": "0.0.0.0",
|
||||
"eosdash_port": 8504
|
||||
},
|
||||
"weather": {
|
||||
"provider": "BrightSky"
|
||||
}
|
||||
}
|
267
src/akkudoktoreos/server/dash/demo.py
Normal file
@@ -0,0 +1,267 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
import pandas as pd
|
||||
import requests
|
||||
from bokeh.models import ColumnDataSource, LinearAxis, Range1d
|
||||
from bokeh.plotting import figure
|
||||
from monsterui.franken import FT, Grid, P
|
||||
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import PydanticDateTimeDataFrame
|
||||
from akkudoktoreos.server.dash.bokeh import Bokeh
|
||||
|
||||
DIR_DEMODATA = Path(__file__).absolute().parent.joinpath("data")
|
||||
FILE_DEMOCONFIG = DIR_DEMODATA.joinpath("democonfig.json")
|
||||
if not FILE_DEMOCONFIG.exists():
|
||||
raise ValueError(f"File does not exist: {FILE_DEMOCONFIG}")
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# bar width for 1 hour bars (time given in millseconds)
|
||||
BAR_WIDTH_1HOUR = 1000 * 60 * 60
|
||||
|
||||
|
||||
def DemoPVForecast(predictions: pd.DataFrame, config: dict) -> FT:
|
||||
source = ColumnDataSource(predictions)
|
||||
provider = config["pvforecast"]["provider"]
|
||||
|
||||
plot = figure(
|
||||
x_axis_type="datetime",
|
||||
title=f"PV Power Prediction ({provider})",
|
||||
x_axis_label="Datetime",
|
||||
y_axis_label="Power [W]",
|
||||
sizing_mode="stretch_width",
|
||||
height=400,
|
||||
)
|
||||
|
||||
plot.vbar(
|
||||
x="date_time",
|
||||
top="pvforecast_ac_power",
|
||||
source=source,
|
||||
width=BAR_WIDTH_1HOUR * 0.8,
|
||||
legend_label="AC Power",
|
||||
color="lightblue",
|
||||
)
|
||||
|
||||
return Bokeh(plot)
|
||||
|
||||
|
||||
def DemoElectricityPriceForecast(predictions: pd.DataFrame, config: dict) -> FT:
|
||||
source = ColumnDataSource(predictions)
|
||||
provider = config["elecprice"]["provider"]
|
||||
|
||||
plot = figure(
|
||||
x_axis_type="datetime",
|
||||
y_range=Range1d(
|
||||
predictions["elecprice_marketprice_kwh"].min() - 0.1,
|
||||
predictions["elecprice_marketprice_kwh"].max() + 0.1,
|
||||
),
|
||||
title=f"Electricity Price Prediction ({provider})",
|
||||
x_axis_label="Datetime",
|
||||
y_axis_label="Price [€/kWh]",
|
||||
sizing_mode="stretch_width",
|
||||
height=400,
|
||||
)
|
||||
plot.vbar(
|
||||
x="date_time",
|
||||
top="elecprice_marketprice_kwh",
|
||||
source=source,
|
||||
width=BAR_WIDTH_1HOUR * 0.8,
|
||||
legend_label="Market Price",
|
||||
color="lightblue",
|
||||
)
|
||||
|
||||
return Bokeh(plot)
|
||||
|
||||
|
||||
def DemoWeatherTempAir(predictions: pd.DataFrame, config: dict) -> FT:
|
||||
source = ColumnDataSource(predictions)
|
||||
provider = config["weather"]["provider"]
|
||||
|
||||
plot = figure(
|
||||
x_axis_type="datetime",
|
||||
y_range=Range1d(
|
||||
predictions["weather_temp_air"].min() - 1.0, predictions["weather_temp_air"].max() + 1.0
|
||||
),
|
||||
title=f"Air Temperature Prediction ({provider})",
|
||||
x_axis_label="Datetime",
|
||||
y_axis_label="Temperature [°C]",
|
||||
sizing_mode="stretch_width",
|
||||
height=400,
|
||||
)
|
||||
plot.line(
|
||||
"date_time", "weather_temp_air", source=source, legend_label="Air Temperature", color="blue"
|
||||
)
|
||||
|
||||
return Bokeh(plot)
|
||||
|
||||
|
||||
def DemoWeatherIrradiance(predictions: pd.DataFrame, config: dict) -> FT:
|
||||
source = ColumnDataSource(predictions)
|
||||
provider = config["weather"]["provider"]
|
||||
|
||||
plot = figure(
|
||||
x_axis_type="datetime",
|
||||
title=f"Irradiance Prediction ({provider})",
|
||||
x_axis_label="Datetime",
|
||||
y_axis_label="Irradiance [W/m2]",
|
||||
sizing_mode="stretch_width",
|
||||
height=400,
|
||||
)
|
||||
plot.line(
|
||||
"date_time",
|
||||
"weather_ghi",
|
||||
source=source,
|
||||
legend_label="Global Horizontal Irradiance",
|
||||
color="red",
|
||||
)
|
||||
plot.line(
|
||||
"date_time",
|
||||
"weather_dni",
|
||||
source=source,
|
||||
legend_label="Direct Normal Irradiance",
|
||||
color="green",
|
||||
)
|
||||
plot.line(
|
||||
"date_time",
|
||||
"weather_dhi",
|
||||
source=source,
|
||||
legend_label="Diffuse Horizontal Irradiance",
|
||||
color="blue",
|
||||
)
|
||||
|
||||
return Bokeh(plot)
|
||||
|
||||
|
||||
def DemoLoad(predictions: pd.DataFrame, config: dict) -> FT:
|
||||
source = ColumnDataSource(predictions)
|
||||
provider = config["load"]["provider"]
|
||||
if provider == "LoadAkkudoktor":
|
||||
year_energy = config["load"]["provider_settings"]["loadakkudoktor_year_energy"]
|
||||
provider = f"{provider}, {year_energy} kWh"
|
||||
|
||||
plot = figure(
|
||||
x_axis_type="datetime",
|
||||
title=f"Load Prediction ({provider})",
|
||||
x_axis_label="Datetime",
|
||||
y_axis_label="Load [W]",
|
||||
sizing_mode="stretch_width",
|
||||
height=400,
|
||||
)
|
||||
plot.extra_y_ranges["stddev"] = Range1d(0, 1000)
|
||||
y2_axis = LinearAxis(y_range_name="stddev", axis_label="Load Standard Deviation [W]")
|
||||
y2_axis.axis_label_text_color = "green"
|
||||
plot.add_layout(y2_axis, "left")
|
||||
|
||||
plot.line(
|
||||
"date_time",
|
||||
"load_mean",
|
||||
source=source,
|
||||
legend_label="Load mean value",
|
||||
color="red",
|
||||
)
|
||||
plot.line(
|
||||
"date_time",
|
||||
"load_mean_adjusted",
|
||||
source=source,
|
||||
legend_label="Load adjusted by measurement",
|
||||
color="blue",
|
||||
)
|
||||
plot.line(
|
||||
"date_time",
|
||||
"load_std",
|
||||
source=source,
|
||||
legend_label="Load standard deviation",
|
||||
color="green",
|
||||
y_range_name="stddev",
|
||||
)
|
||||
|
||||
return Bokeh(plot)
|
||||
|
||||
|
||||
def Demo(eos_host: str, eos_port: Union[str, int]) -> str:
|
||||
server = f"http://{eos_host}:{eos_port}"
|
||||
|
||||
# Get current configuration from server
|
||||
try:
|
||||
result = requests.get(f"{server}/v1/config")
|
||||
result.raise_for_status()
|
||||
except requests.exceptions.HTTPError as err:
|
||||
detail = result.json()["detail"]
|
||||
return P(
|
||||
f"Can not retrieve configuration from {server}: {err}, {detail}",
|
||||
cls="text-center",
|
||||
)
|
||||
config = result.json()
|
||||
|
||||
# Set demo configuration
|
||||
with FILE_DEMOCONFIG.open("r", encoding="utf-8") as fd:
|
||||
democonfig = json.load(fd)
|
||||
try:
|
||||
result = requests.put(f"{server}/v1/config", json=democonfig)
|
||||
result.raise_for_status()
|
||||
except requests.exceptions.HTTPError as err:
|
||||
detail = result.json()["detail"]
|
||||
# Try to reset to original config
|
||||
requests.put(f"{server}/v1/config", json=config)
|
||||
return P(
|
||||
f"Can not set demo configuration on {server}: {err}, {detail}",
|
||||
cls="text-center",
|
||||
)
|
||||
|
||||
# Update all predictions
|
||||
try:
|
||||
result = requests.post(f"{server}/v1/prediction/update")
|
||||
result.raise_for_status()
|
||||
except requests.exceptions.HTTPError as err:
|
||||
detail = result.json()["detail"]
|
||||
# Try to reset to original config
|
||||
requests.put(f"{server}/v1/config", json=config)
|
||||
return P(
|
||||
f"Can not update predictions on {server}: {err}, {detail}",
|
||||
cls="text-center",
|
||||
)
|
||||
|
||||
# Get Forecasts
|
||||
try:
|
||||
params = {
|
||||
"keys": [
|
||||
"pvforecast_ac_power",
|
||||
"elecprice_marketprice_kwh",
|
||||
"weather_temp_air",
|
||||
"weather_ghi",
|
||||
"weather_dni",
|
||||
"weather_dhi",
|
||||
"load_mean",
|
||||
"load_std",
|
||||
"load_mean_adjusted",
|
||||
],
|
||||
}
|
||||
result = requests.get(f"{server}/v1/prediction/dataframe", params=params)
|
||||
result.raise_for_status()
|
||||
predictions = PydanticDateTimeDataFrame(**result.json()).to_dataframe()
|
||||
except requests.exceptions.HTTPError as err:
|
||||
detail = result.json()["detail"]
|
||||
return P(
|
||||
f"Can not retrieve predictions from {server}: {err}, {detail}",
|
||||
cls="text-center",
|
||||
)
|
||||
except Exception as err:
|
||||
return P(
|
||||
f"Can not retrieve predictions from {server}: {err}",
|
||||
cls="text-center",
|
||||
)
|
||||
|
||||
# Reset to original config
|
||||
requests.put(f"{server}/v1/config", json=config)
|
||||
|
||||
return Grid(
|
||||
DemoPVForecast(predictions, democonfig),
|
||||
DemoElectricityPriceForecast(predictions, democonfig),
|
||||
DemoWeatherTempAir(predictions, democonfig),
|
||||
DemoWeatherIrradiance(predictions, democonfig),
|
||||
DemoLoad(predictions, democonfig),
|
||||
cols_max=2,
|
||||
)
|
92
src/akkudoktoreos/server/dash/footer.py
Normal file
@@ -0,0 +1,92 @@
|
||||
from typing import Optional, Union
|
||||
|
||||
import requests
|
||||
from monsterui.daisy import Loading, LoadingT
|
||||
from monsterui.franken import A, ButtonT, DivFullySpaced, P
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from akkudoktoreos.config.config import get_config
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
config_eos = get_config()
|
||||
|
||||
|
||||
def get_alive(eos_host: str, eos_port: Union[str, int]) -> str:
|
||||
"""Fetch alive information from the specified EOS server.
|
||||
|
||||
Args:
|
||||
eos_host (str): The hostname of the server.
|
||||
eos_port (Union[str, int]): The port of the server.
|
||||
|
||||
Returns:
|
||||
str: Alive data.
|
||||
"""
|
||||
result = requests.Response()
|
||||
try:
|
||||
result = requests.get(f"http://{eos_host}:{eos_port}/v1/health")
|
||||
if result.status_code == 200:
|
||||
alive = result.json()["status"]
|
||||
else:
|
||||
alive = f"Server responded with status code: {result.status_code}"
|
||||
except RequestException as e:
|
||||
warning_msg = f"{e}"
|
||||
logger.warning(warning_msg)
|
||||
alive = warning_msg
|
||||
|
||||
return alive
|
||||
|
||||
|
||||
def Footer(eos_host: Optional[str], eos_port: Optional[Union[str, int]]) -> str:
|
||||
if eos_host is None:
|
||||
eos_host = config_eos.server.host
|
||||
if eos_port is None:
|
||||
eos_port = config_eos.server.port
|
||||
alive_icon = None
|
||||
if eos_host is None or eos_port is None:
|
||||
alive = "EOS server not given: {eos_host}:{eos_port}"
|
||||
else:
|
||||
alive = get_alive(eos_host, eos_port)
|
||||
if alive == "alive":
|
||||
alive_icon = Loading(
|
||||
cls=(
|
||||
LoadingT.ring,
|
||||
LoadingT.sm,
|
||||
),
|
||||
)
|
||||
alive = f"EOS {eos_host}:{eos_port}"
|
||||
if alive_icon:
|
||||
alive_cls = f"{ButtonT.primary} uk-link rounded-md"
|
||||
else:
|
||||
alive_cls = f"{ButtonT.secondary} uk-link rounded-md"
|
||||
return DivFullySpaced(
|
||||
P(
|
||||
alive_icon,
|
||||
A(alive, href=f"http://{eos_host}:{eos_port}/docs", target="_blank", cls=alive_cls),
|
||||
),
|
||||
P(
|
||||
A(
|
||||
"Documentation",
|
||||
href="https://akkudoktor-eos.readthedocs.io/en/latest/",
|
||||
target="_blank",
|
||||
cls="uk-link",
|
||||
),
|
||||
),
|
||||
P(
|
||||
A(
|
||||
"Issues",
|
||||
href="https://github.com/Akkudoktor-EOS/EOS/issues",
|
||||
target="_blank",
|
||||
cls="uk-link",
|
||||
),
|
||||
),
|
||||
P(
|
||||
A(
|
||||
"GitHub",
|
||||
href="https://github.com/Akkudoktor-EOS/EOS/",
|
||||
target="_blank",
|
||||
cls="uk-link",
|
||||
),
|
||||
),
|
||||
cls="uk-padding-remove-top uk-padding-remove-botton",
|
||||
)
|
24
src/akkudoktoreos/server/dash/hello.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from typing import Any
|
||||
|
||||
from fasthtml.common import Div
|
||||
|
||||
from akkudoktoreos.server.dash.markdown import Markdown
|
||||
|
||||
hello_md = """
|
||||
|
||||
# Akkudoktor EOSdash
|
||||
|
||||
The dashboard for Akkudoktor EOS.
|
||||
|
||||
EOS provides a comprehensive solution for simulating and optimizing an energy system based
|
||||
on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries),
|
||||
load management (consumer requirements), heat pumps, electric vehicles, and consideration of
|
||||
electricity price data, this system enables forecasting and optimization of energy flow and costs
|
||||
over a specified period.
|
||||
|
||||
Documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedocs.io/en/latest/).
|
||||
"""
|
||||
|
||||
|
||||
def Hello(**kwargs: Any) -> Div:
|
||||
return Markdown(hello_md, **kwargs)
|
136
src/akkudoktoreos/server/dash/markdown.py
Normal file
@@ -0,0 +1,136 @@
|
||||
"""Markdown rendering with MonsterUI HTML classes."""
|
||||
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
from fasthtml.common import FT, Div, NotStr
|
||||
from markdown_it import MarkdownIt
|
||||
from markdown_it.renderer import RendererHTML
|
||||
from markdown_it.token import Token
|
||||
from monsterui.foundations import stringify
|
||||
|
||||
|
||||
def render_heading(
|
||||
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
||||
) -> str:
|
||||
"""Custom renderer for Markdown headings.
|
||||
|
||||
Adds specific CSS classes based on the heading level.
|
||||
|
||||
Parameters:
|
||||
self: The renderer instance.
|
||||
tokens: List of tokens to be rendered.
|
||||
idx: Index of the current token.
|
||||
options: Rendering options.
|
||||
env: Environment sandbox for plugins.
|
||||
|
||||
Returns:
|
||||
The rendered token as a string.
|
||||
"""
|
||||
if tokens[idx].markup == "#":
|
||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h1 uk-margin")
|
||||
elif tokens[idx].markup == "##":
|
||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h2 uk-margin")
|
||||
elif tokens[idx].markup == "###":
|
||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h3 uk-margin")
|
||||
elif tokens[idx].markup == "####":
|
||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h4 uk-margin")
|
||||
|
||||
# pass token to default renderer.
|
||||
return self.renderToken(tokens, idx, options, env)
|
||||
|
||||
|
||||
def render_paragraph(
|
||||
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
||||
) -> str:
|
||||
"""Custom renderer for Markdown paragraphs.
|
||||
|
||||
Adds specific CSS classes.
|
||||
|
||||
Parameters:
|
||||
self: The renderer instance.
|
||||
tokens: List of tokens to be rendered.
|
||||
idx: Index of the current token.
|
||||
options: Rendering options.
|
||||
env: Environment sandbox for plugins.
|
||||
|
||||
Returns:
|
||||
The rendered token as a string.
|
||||
"""
|
||||
tokens[idx].attrSet("class", "uk-paragraph")
|
||||
|
||||
# pass token to default renderer.
|
||||
return self.renderToken(tokens, idx, options, env)
|
||||
|
||||
|
||||
def render_blockquote(
|
||||
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
||||
) -> str:
|
||||
"""Custom renderer for Markdown blockquotes.
|
||||
|
||||
Adds specific CSS classes.
|
||||
|
||||
Parameters:
|
||||
self: The renderer instance.
|
||||
tokens: List of tokens to be rendered.
|
||||
idx: Index of the current token.
|
||||
options: Rendering options.
|
||||
env: Environment sandbox for plugins.
|
||||
|
||||
Returns:
|
||||
The rendered token as a string.
|
||||
"""
|
||||
tokens[idx].attrSet("class", "uk-blockquote")
|
||||
|
||||
# pass token to default renderer.
|
||||
return self.renderToken(tokens, idx, options, env)
|
||||
|
||||
|
||||
def render_link(self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict) -> str:
|
||||
"""Custom renderer for Markdown links.
|
||||
|
||||
Adds the target attribute to open links in a new tab.
|
||||
|
||||
Parameters:
|
||||
self: The renderer instance.
|
||||
tokens: List of tokens to be rendered.
|
||||
idx: Index of the current token.
|
||||
options: Rendering options.
|
||||
env: Environment sandbox for plugins.
|
||||
|
||||
Returns:
|
||||
The rendered token as a string.
|
||||
"""
|
||||
tokens[idx].attrSet("class", "uk-link")
|
||||
tokens[idx].attrSet("target", "_blank")
|
||||
|
||||
# pass token to default renderer.
|
||||
return self.renderToken(tokens, idx, options, env)
|
||||
|
||||
|
||||
markdown = MarkdownIt("gfm-like")
|
||||
markdown.add_render_rule("heading_open", render_heading)
|
||||
markdown.add_render_rule("paragraph_open", render_paragraph)
|
||||
markdown.add_render_rule("blockquote_open", render_blockquote)
|
||||
markdown.add_render_rule("link_open", render_link)
|
||||
|
||||
|
||||
markdown_cls = "bg-background text-lg ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
||||
|
||||
|
||||
def Markdown(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any) -> FT:
|
||||
"""Component to render Markdown content with custom styling.
|
||||
|
||||
Parameters:
|
||||
c: Markdown content to be rendered.
|
||||
cls: Optional additional CSS classes to be added.
|
||||
kwargs: Additional keyword arguments for the Div component.
|
||||
|
||||
Returns:
|
||||
An FT object representing the rendered HTML content wrapped in a Div component.
|
||||
"""
|
||||
new_cls = markdown_cls
|
||||
if cls:
|
||||
new_cls += f" {stringify(cls)}"
|
||||
kwargs["cls"] = new_cls
|
||||
md_html = markdown.render(*c)
|
||||
return Div(NotStr(md_html), **kwargs)
|
@@ -1,20 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
from typing import Annotated, Any, AsyncGenerator, Dict, List, Optional, Union
|
||||
|
||||
import httpx
|
||||
import psutil
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, Query, Request
|
||||
from fastapi import Body, FastAPI
|
||||
from fastapi import Path as FastapiPath
|
||||
from fastapi import Query, Request
|
||||
from fastapi.exceptions import HTTPException
|
||||
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse, Response
|
||||
from fastapi.responses import (
|
||||
FileResponse,
|
||||
HTMLResponse,
|
||||
JSONResponse,
|
||||
RedirectResponse,
|
||||
Response,
|
||||
)
|
||||
|
||||
from akkudoktoreos.config.config import ConfigEOS, SettingsEOS, get_config
|
||||
from akkudoktoreos.core.cache import CacheFileStore
|
||||
from akkudoktoreos.core.ems import get_ems
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import (
|
||||
@@ -34,6 +47,9 @@ from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||
from akkudoktoreos.prediction.prediction import PredictionCommonSettings, get_prediction
|
||||
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||
from akkudoktoreos.server.rest.error import create_error_page
|
||||
from akkudoktoreos.server.rest.tasks import repeat_every
|
||||
from akkudoktoreos.server.server import get_default_host, wait_for_port_free
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -45,133 +61,54 @@ ems_eos = get_ems()
|
||||
# Command line arguments
|
||||
args = None
|
||||
|
||||
ERROR_PAGE_TEMPLATE = """
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Energy Optimization System (EOS) Error</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, sans-serif;
|
||||
background-color: #f5f5f5;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.error-container {
|
||||
background: white;
|
||||
padding: 2rem;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||
max-width: 500px;
|
||||
width: 100%;
|
||||
text-align: center;
|
||||
}
|
||||
.error-code {
|
||||
font-size: 4rem;
|
||||
font-weight: bold;
|
||||
color: #e53e3e;
|
||||
margin: 0;
|
||||
}
|
||||
.error-title {
|
||||
font-size: 1.5rem;
|
||||
color: #2d3748;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
.error-message {
|
||||
color: #4a5568;
|
||||
margin-bottom: 1.5rem;
|
||||
}
|
||||
.error-details {
|
||||
background: #f7fafc;
|
||||
padding: 1rem;
|
||||
border-radius: 4px;
|
||||
margin-bottom: 1.5rem;
|
||||
text-align: left;
|
||||
font-family: monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
.back-button {
|
||||
background: #3182ce;
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 0.75rem 1.5rem;
|
||||
border-radius: 4px;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
.back-button:hover {
|
||||
background: #2c5282;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="error-container">
|
||||
<h1 class="error-code">STATUS_CODE</h1>
|
||||
<h2 class="error-title">ERROR_TITLE</h2>
|
||||
<p class="error-message">ERROR_MESSAGE</p>
|
||||
<div class="error-details">ERROR_DETAILS</div>
|
||||
<a href="/docs" class="back-button">Back to Home</a>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def create_error_page(
|
||||
status_code: str, error_title: str, error_message: str, error_details: str
|
||||
) -> str:
|
||||
"""Create an error page by replacing placeholders in the template."""
|
||||
return (
|
||||
ERROR_PAGE_TEMPLATE.replace("STATUS_CODE", status_code)
|
||||
.replace("ERROR_TITLE", error_title)
|
||||
.replace("ERROR_MESSAGE", error_message)
|
||||
.replace("ERROR_DETAILS", error_details)
|
||||
)
|
||||
|
||||
|
||||
# ----------------------
|
||||
# EOSdash server startup
|
||||
# ----------------------
|
||||
|
||||
|
||||
def start_eosdash() -> subprocess.Popen:
|
||||
def start_eosdash(
|
||||
host: str,
|
||||
port: int,
|
||||
eos_host: str,
|
||||
eos_port: int,
|
||||
log_level: str,
|
||||
access_log: bool,
|
||||
reload: bool,
|
||||
eos_dir: str,
|
||||
eos_config_dir: str,
|
||||
) -> subprocess.Popen:
|
||||
"""Start the EOSdash server as a subprocess.
|
||||
|
||||
This function starts the EOSdash server by launching it as a subprocess. It checks if the server
|
||||
is already running on the specified port and either returns the existing process or starts a new one.
|
||||
|
||||
Args:
|
||||
host (str): The hostname for the EOSdash server.
|
||||
port (int): The port for the EOSdash server.
|
||||
eos_host (str): The hostname for the EOS server.
|
||||
eos_port (int): The port for the EOS server.
|
||||
log_level (str): The logging level for the EOSdash server.
|
||||
access_log (bool): Flag to enable or disable access logging.
|
||||
reload (bool): Flag to enable or disable auto-reloading.
|
||||
eos_dir (str): Path to the EOS data directory.
|
||||
eos_config_dir (str): Path to the EOS configuration directory.
|
||||
|
||||
Returns:
|
||||
server_process: The process of the EOSdash server
|
||||
subprocess.Popen: The process of the EOSdash server.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the EOSdash server fails to start.
|
||||
"""
|
||||
eosdash_path = Path(__file__).parent.resolve().joinpath("eosdash.py")
|
||||
|
||||
if args is None:
|
||||
# No command line arguments
|
||||
host = config_eos.server.eosdash_host
|
||||
port = config_eos.server.eosdash_port
|
||||
eos_host = config_eos.server.host
|
||||
eos_port = config_eos.server.port
|
||||
log_level = "info"
|
||||
access_log = False
|
||||
reload = False
|
||||
else:
|
||||
host = args.host
|
||||
port = config_eos.server.eosdash_port if config_eos.server.eosdash_port else (args.port + 1)
|
||||
eos_host = args.host
|
||||
eos_port = args.port
|
||||
log_level = args.log_level
|
||||
access_log = args.access_log
|
||||
reload = args.reload
|
||||
# Do a one time check for port free to generate warnings if not so
|
||||
wait_for_port_free(port, timeout=0, waiting_app_name="EOSdash")
|
||||
|
||||
cmd = [
|
||||
sys.executable,
|
||||
str(eosdash_path),
|
||||
"-m",
|
||||
"akkudoktoreos.server.eosdash",
|
||||
"--host",
|
||||
str(host),
|
||||
"--port",
|
||||
@@ -187,11 +124,23 @@ def start_eosdash() -> subprocess.Popen:
|
||||
"--reload",
|
||||
str(reload),
|
||||
]
|
||||
# Set environment before any subprocess run, to keep custom config dir
|
||||
env = os.environ.copy()
|
||||
env["EOS_DIR"] = eos_dir
|
||||
env["EOS_CONFIG_DIR"] = eos_config_dir
|
||||
|
||||
try:
|
||||
server_process = subprocess.Popen(
|
||||
cmd,
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
start_new_session=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as ex:
|
||||
error_msg = f"Could not start EOSdash: {ex}"
|
||||
logger.error(error_msg)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
return server_process
|
||||
|
||||
@@ -201,20 +150,130 @@ def start_eosdash() -> subprocess.Popen:
|
||||
# ----------------------
|
||||
|
||||
|
||||
def cache_clear(clear_all: Optional[bool] = None) -> None:
|
||||
"""Cleanup expired cache files."""
|
||||
if clear_all:
|
||||
CacheFileStore().clear(clear_all=True)
|
||||
else:
|
||||
CacheFileStore().clear(before_datetime=to_datetime())
|
||||
|
||||
|
||||
def cache_load() -> dict:
|
||||
"""Load cache from cachefilestore.json."""
|
||||
return CacheFileStore().load_store()
|
||||
|
||||
|
||||
def cache_save() -> dict:
|
||||
"""Save cache to cachefilestore.json."""
|
||||
return CacheFileStore().save_store()
|
||||
|
||||
|
||||
@repeat_every(seconds=float(config_eos.cache.cleanup_interval))
|
||||
def cache_cleanup_task() -> None:
|
||||
"""Repeating task to clear cache from expired cache files."""
|
||||
cache_clear()
|
||||
|
||||
|
||||
@repeat_every(
|
||||
seconds=10,
|
||||
wait_first=config_eos.ems.startup_delay,
|
||||
)
|
||||
def energy_management_task() -> None:
|
||||
"""Repeating task for energy management."""
|
||||
ems_eos.manage_energy()
|
||||
|
||||
|
||||
async def server_shutdown_task() -> None:
|
||||
"""One-shot task for shutting down the EOS server.
|
||||
|
||||
This coroutine performs the following actions:
|
||||
1. Ensures the cache is saved by calling the cache_save function.
|
||||
2. Waits for 5 seconds to allow the EOS server to complete any ongoing tasks.
|
||||
3. Gracefully shuts down the current process by sending the appropriate signal.
|
||||
|
||||
If running on Windows, the CTRL_C_EVENT signal is sent to terminate the process.
|
||||
On other operating systems, the SIGTERM signal is used.
|
||||
|
||||
Finally, logs a message indicating that the EOS server has been terminated.
|
||||
"""
|
||||
# Assure cache is saved
|
||||
cache_save()
|
||||
|
||||
# Give EOS time to finish some work
|
||||
await asyncio.sleep(5)
|
||||
|
||||
# Gracefully shut down this process.
|
||||
pid = psutil.Process().pid
|
||||
if os.name == "nt":
|
||||
os.kill(pid, signal.CTRL_C_EVENT) # type: ignore[attr-defined,unused-ignore]
|
||||
else:
|
||||
os.kill(pid, signal.SIGTERM) # type: ignore[attr-defined,unused-ignore]
|
||||
|
||||
logger.info(f"🚀 EOS terminated, PID {pid}")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
"""Lifespan manager for the app."""
|
||||
# On startup
|
||||
if config_eos.server.startup_eosdash:
|
||||
try:
|
||||
eosdash_process = start_eosdash()
|
||||
if args is None:
|
||||
# No command line arguments
|
||||
host = config_eos.server.eosdash_host
|
||||
port = config_eos.server.eosdash_port
|
||||
eos_host = config_eos.server.host
|
||||
eos_port = config_eos.server.port
|
||||
log_level = "info"
|
||||
access_log = False
|
||||
reload = False
|
||||
else:
|
||||
host = args.host
|
||||
port = (
|
||||
config_eos.server.eosdash_port
|
||||
if config_eos.server.eosdash_port
|
||||
else (args.port + 1)
|
||||
)
|
||||
eos_host = args.host
|
||||
eos_port = args.port
|
||||
log_level = args.log_level
|
||||
access_log = args.access_log
|
||||
reload = args.reload
|
||||
|
||||
host = host if host else get_default_host()
|
||||
port = port if port else 8504
|
||||
eos_host = eos_host if eos_host else get_default_host()
|
||||
eos_port = eos_port if eos_port else 8503
|
||||
|
||||
eos_dir = str(config_eos.general.data_folder_path)
|
||||
eos_config_dir = str(config_eos.general.config_folder_path)
|
||||
|
||||
eosdash_process = start_eosdash(
|
||||
host=host,
|
||||
port=port,
|
||||
eos_host=eos_host,
|
||||
eos_port=eos_port,
|
||||
log_level=log_level,
|
||||
access_log=access_log,
|
||||
reload=reload,
|
||||
eos_dir=eos_dir,
|
||||
eos_config_dir=eos_config_dir,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start EOSdash server. Error: {e}")
|
||||
sys.exit(1)
|
||||
cache_load()
|
||||
if config_eos.cache.cleanup_interval is None:
|
||||
logger.warning("Cache file cleanup disabled. Set cache.cleanup_interval.")
|
||||
else:
|
||||
await cache_cleanup_task()
|
||||
await energy_management_task()
|
||||
|
||||
# Handover to application
|
||||
yield
|
||||
|
||||
# On shutdown
|
||||
# nothing to do
|
||||
cache_save()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
@@ -227,18 +286,138 @@ app = FastAPI(
|
||||
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
|
||||
},
|
||||
lifespan=lifespan,
|
||||
root_path=str(Path(__file__).parent),
|
||||
)
|
||||
|
||||
server_dir = Path(__file__).parent.resolve()
|
||||
|
||||
|
||||
class PdfResponse(FileResponse):
|
||||
media_type = "application/pdf"
|
||||
|
||||
|
||||
@app.put("/v1/config/reset", tags=["config"])
|
||||
def fastapi_config_update_post() -> ConfigEOS:
|
||||
@app.post("/v1/admin/cache/clear", tags=["admin"])
|
||||
def fastapi_admin_cache_clear_post(clear_all: Optional[bool] = None) -> dict:
|
||||
"""Clear the cache from expired data.
|
||||
|
||||
Deletes expired cache files.
|
||||
|
||||
Args:
|
||||
clear_all (Optional[bool]): Delete all cached files. Default is False.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data after cleanup.
|
||||
"""
|
||||
try:
|
||||
cache_clear(clear_all=clear_all)
|
||||
data = CacheFileStore().current_store()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Error on cache clear: {e}")
|
||||
return data
|
||||
|
||||
|
||||
@app.post("/v1/admin/cache/save", tags=["admin"])
|
||||
def fastapi_admin_cache_save_post() -> dict:
|
||||
"""Save the current cache management data.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data that was saved.
|
||||
"""
|
||||
try:
|
||||
data = cache_save()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Error on cache save: {e}")
|
||||
return data
|
||||
|
||||
|
||||
@app.post("/v1/admin/cache/load", tags=["admin"])
|
||||
def fastapi_admin_cache_load_post() -> dict:
|
||||
"""Load cache management data.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data that was loaded.
|
||||
"""
|
||||
try:
|
||||
data = cache_save()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Error on cache load: {e}")
|
||||
return data
|
||||
|
||||
|
||||
@app.get("/v1/admin/cache", tags=["admin"])
|
||||
def fastapi_admin_cache_get() -> dict:
|
||||
"""Current cache management data.
|
||||
|
||||
Returns:
|
||||
data (dict): The management data.
|
||||
"""
|
||||
try:
|
||||
data = CacheFileStore().current_store()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Error on cache data retrieval: {e}")
|
||||
return data
|
||||
|
||||
|
||||
@app.post("/v1/admin/server/restart", tags=["admin"])
|
||||
async def fastapi_admin_server_restart_post() -> dict:
|
||||
"""Restart the server.
|
||||
|
||||
Restart EOS properly by starting a new instance before exiting the old one.
|
||||
"""
|
||||
logger.info("🔄 Restarting EOS...")
|
||||
|
||||
# Start a new EOS (Uvicorn) process
|
||||
# Force a new process group to make the new process easily distinguishable from the current one
|
||||
# Set environment before any subprocess run, to keep custom config dir
|
||||
env = os.environ.copy()
|
||||
env["EOS_DIR"] = str(config_eos.general.data_folder_path)
|
||||
env["EOS_CONFIG_DIR"] = str(config_eos.general.config_folder_path)
|
||||
|
||||
new_process = subprocess.Popen(
|
||||
[
|
||||
sys.executable,
|
||||
]
|
||||
+ sys.argv,
|
||||
env=env,
|
||||
start_new_session=True,
|
||||
)
|
||||
logger.info(f"🚀 EOS restarted, PID {new_process.pid}")
|
||||
|
||||
# Gracefully shut down this process.
|
||||
asyncio.create_task(server_shutdown_task())
|
||||
|
||||
# Will be executed because shutdown is delegated to async coroutine
|
||||
return {
|
||||
"message": "Restarting EOS...",
|
||||
"pid": new_process.pid,
|
||||
}
|
||||
|
||||
|
||||
@app.post("/v1/admin/server/shutdown", tags=["admin"])
|
||||
async def fastapi_admin_server_shutdown_post() -> dict:
|
||||
"""Shutdown the server."""
|
||||
logger.info("🔄 Stopping EOS...")
|
||||
|
||||
# Gracefully shut down this process.
|
||||
asyncio.create_task(server_shutdown_task())
|
||||
|
||||
# Will be executed because shutdown is delegated to async coroutine
|
||||
return {
|
||||
"message": "Stopping EOS...",
|
||||
"pid": psutil.Process().pid,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/v1/health")
|
||||
def fastapi_health_get(): # type: ignore
|
||||
"""Health check endpoint to verify that the EOS server is alive."""
|
||||
return JSONResponse(
|
||||
{
|
||||
"status": "alive",
|
||||
"pid": psutil.Process().pid,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@app.post("/v1/config/reset", tags=["config"])
|
||||
def fastapi_config_reset_post() -> ConfigEOS:
|
||||
"""Reset the configuration to the EOS configuration file.
|
||||
|
||||
Returns:
|
||||
@@ -249,7 +428,7 @@ def fastapi_config_update_post() -> ConfigEOS:
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Cannot update configuration from file '{config_eos.config_file_path}': {e}",
|
||||
detail=f"Cannot reset configuration: {e}",
|
||||
)
|
||||
return config_eos
|
||||
|
||||
@@ -302,6 +481,58 @@ def fastapi_config_put(settings: SettingsEOS) -> ConfigEOS:
|
||||
return config_eos
|
||||
|
||||
|
||||
@app.put("/v1/config/{path:path}", tags=["config"])
|
||||
def fastapi_config_put_key(
|
||||
path: str = FastapiPath(
|
||||
..., description="The nested path to the configuration key (e.g., general/latitude)."
|
||||
),
|
||||
value: Any = Body(..., description="The value to assign to the specified configuration path."),
|
||||
) -> ConfigEOS:
|
||||
"""Update a nested key or index in the config model.
|
||||
|
||||
Args:
|
||||
path (str): The nested path to the key (e.g., "general/latitude" or "optimize/nested_list/0").
|
||||
value (Any): The new value to assign to the key or index at path.
|
||||
|
||||
Returns:
|
||||
configuration (ConfigEOS): The current configuration after the update.
|
||||
"""
|
||||
try:
|
||||
config_eos.set_config_value(path, value)
|
||||
except IndexError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except KeyError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
return config_eos
|
||||
|
||||
|
||||
@app.get("/v1/config/{path:path}", tags=["config"])
|
||||
def fastapi_config_get_key(
|
||||
path: str = FastapiPath(
|
||||
..., description="The nested path to the configuration key (e.g., general/latitude)."
|
||||
),
|
||||
) -> Response:
|
||||
"""Get the value of a nested key or index in the config model.
|
||||
|
||||
Args:
|
||||
path (str): The nested path to the key (e.g., "general/latitude" or "optimize/nested_list/0").
|
||||
|
||||
Returns:
|
||||
value (Any): The value of the selected nested key.
|
||||
"""
|
||||
try:
|
||||
return config_eos.get_config_value(path)
|
||||
except IndexError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except KeyError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/v1/measurement/keys", tags=["measurement"])
|
||||
def fastapi_measurement_keys_get() -> list[str]:
|
||||
"""Get a list of available measurement keys."""
|
||||
@@ -476,6 +707,49 @@ def fastapi_prediction_series_get(
|
||||
return PydanticDateTimeSeries.from_series(pdseries)
|
||||
|
||||
|
||||
@app.get("/v1/prediction/dataframe", tags=["prediction"])
|
||||
def fastapi_prediction_dataframe_get(
|
||||
keys: Annotated[list[str], Query(description="Prediction keys.")],
|
||||
start_datetime: Annotated[
|
||||
Optional[str],
|
||||
Query(description="Starting datetime (inclusive)."),
|
||||
] = None,
|
||||
end_datetime: Annotated[
|
||||
Optional[str],
|
||||
Query(description="Ending datetime (exclusive)."),
|
||||
] = None,
|
||||
interval: Annotated[
|
||||
Optional[str],
|
||||
Query(description="Time duration for each interval. Defaults to 1 hour."),
|
||||
] = None,
|
||||
) -> PydanticDateTimeDataFrame:
|
||||
"""Get prediction for given key within given date range as series.
|
||||
|
||||
Args:
|
||||
key (str): Prediction key
|
||||
start_datetime (Optional[str]): Starting datetime (inclusive).
|
||||
Defaults to start datetime of latest prediction.
|
||||
end_datetime (Optional[str]: Ending datetime (exclusive).
|
||||
|
||||
Defaults to end datetime of latest prediction.
|
||||
"""
|
||||
for key in keys:
|
||||
if key not in prediction_eos.record_keys:
|
||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
|
||||
if start_datetime is None:
|
||||
start_datetime = prediction_eos.start_datetime
|
||||
else:
|
||||
start_datetime = to_datetime(start_datetime)
|
||||
if end_datetime is None:
|
||||
end_datetime = prediction_eos.end_datetime
|
||||
else:
|
||||
end_datetime = to_datetime(end_datetime)
|
||||
df = prediction_eos.keys_to_dataframe(
|
||||
keys=keys, start_datetime=start_datetime, end_datetime=end_datetime, interval=interval
|
||||
)
|
||||
return PydanticDateTimeDataFrame.from_dataframe(df, tz=config_eos.general.timezone)
|
||||
|
||||
|
||||
@app.get("/v1/prediction/list", tags=["prediction"])
|
||||
def fastapi_prediction_list_get(
|
||||
key: Annotated[str, Query(description="Prediction key.")],
|
||||
@@ -489,7 +763,7 @@ def fastapi_prediction_list_get(
|
||||
] = None,
|
||||
interval: Annotated[
|
||||
Optional[str],
|
||||
Query(description="Time duration for each interval."),
|
||||
Query(description="Time duration for each interval. Defaults to 1 hour."),
|
||||
] = None,
|
||||
) -> List[Any]:
|
||||
"""Get prediction for given key within given date range as value list.
|
||||
@@ -526,8 +800,40 @@ def fastapi_prediction_list_get(
|
||||
return prediction_list
|
||||
|
||||
|
||||
@app.put("/v1/prediction/import/{provider_id}", tags=["prediction"])
|
||||
def fastapi_prediction_import_provider(
|
||||
provider_id: str = FastapiPath(..., description="Provider ID."),
|
||||
data: Optional[Union[PydanticDateTimeDataFrame, PydanticDateTimeData, dict]] = None,
|
||||
force_enable: Optional[bool] = None,
|
||||
) -> Response:
|
||||
"""Import prediction for given provider ID.
|
||||
|
||||
Args:
|
||||
provider_id: ID of provider to update.
|
||||
data: Prediction data.
|
||||
force_enable: Update data even if provider is disabled.
|
||||
Defaults to False.
|
||||
"""
|
||||
try:
|
||||
provider = prediction_eos.provider_by_id(provider_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=404, detail=f"Provider '{provider_id}' not found.")
|
||||
if not provider.enabled() and not force_enable:
|
||||
raise HTTPException(status_code=404, detail=f"Provider '{provider_id}' not enabled.")
|
||||
try:
|
||||
provider.import_from_json(json_str=json.dumps(data))
|
||||
provider.update_datetime = to_datetime(in_timezone=config_eos.general.timezone)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Error on import for provider '{provider_id}': {e}"
|
||||
)
|
||||
return Response()
|
||||
|
||||
|
||||
@app.post("/v1/prediction/update", tags=["prediction"])
|
||||
def fastapi_prediction_update(force_update: bool = False, force_enable: bool = False) -> Response:
|
||||
def fastapi_prediction_update(
|
||||
force_update: Optional[bool] = False, force_enable: Optional[bool] = False
|
||||
) -> Response:
|
||||
"""Update predictions for all providers.
|
||||
|
||||
Args:
|
||||
@@ -539,8 +845,11 @@ def fastapi_prediction_update(force_update: bool = False, force_enable: bool = F
|
||||
try:
|
||||
prediction_eos.update_data(force_update=force_update, force_enable=force_enable)
|
||||
except Exception as e:
|
||||
raise e
|
||||
# raise HTTPException(status_code=400, detail=f"Error on update of provider: {e}")
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Error on prediction update: {e}{trace}",
|
||||
)
|
||||
return Response()
|
||||
|
||||
|
||||
@@ -564,7 +873,9 @@ def fastapi_prediction_update_provider(
|
||||
try:
|
||||
provider.update_data(force_update=force_update, force_enable=force_enable)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Error on update of provider: {e}")
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Error on update of provider '{provider_id}': {e}"
|
||||
)
|
||||
return Response()
|
||||
|
||||
|
||||
@@ -856,74 +1167,66 @@ def site_map() -> RedirectResponse:
|
||||
return RedirectResponse(url="/docs")
|
||||
|
||||
|
||||
# Keep the proxy last to handle all requests that are not taken by the Rest API.
|
||||
|
||||
if config_eos.server.startup_eosdash:
|
||||
|
||||
@app.delete("/{path:path}", include_in_schema=False)
|
||||
async def proxy_delete(request: Request, path: str) -> Response:
|
||||
return await proxy(request, path)
|
||||
|
||||
@app.get("/{path:path}", include_in_schema=False)
|
||||
async def proxy_get(request: Request, path: str) -> Response:
|
||||
return await proxy(request, path)
|
||||
|
||||
@app.post("/{path:path}", include_in_schema=False)
|
||||
async def proxy_post(request: Request, path: str) -> Response:
|
||||
return await proxy(request, path)
|
||||
|
||||
@app.put("/{path:path}", include_in_schema=False)
|
||||
async def proxy_put(request: Request, path: str) -> Response:
|
||||
return await proxy(request, path)
|
||||
else:
|
||||
|
||||
@app.get("/", include_in_schema=False)
|
||||
def root() -> RedirectResponse:
|
||||
return RedirectResponse(url="/docs")
|
||||
# Keep the redirect last to handle all requests that are not taken by the Rest API.
|
||||
|
||||
|
||||
async def proxy(request: Request, path: str) -> Union[Response | RedirectResponse | HTMLResponse]:
|
||||
if config_eos.server.eosdash_host and config_eos.server.eosdash_port:
|
||||
# Proxy to EOSdash server
|
||||
url = f"http://{config_eos.server.eosdash_host}:{config_eos.server.eosdash_port}/{path}"
|
||||
headers = dict(request.headers)
|
||||
@app.delete("/{path:path}", include_in_schema=False)
|
||||
async def redirect_delete(request: Request, path: str) -> Response:
|
||||
return redirect(request, path)
|
||||
|
||||
data = await request.body()
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
if request.method == "GET":
|
||||
response = await client.get(url, headers=headers)
|
||||
elif request.method == "POST":
|
||||
response = await client.post(url, headers=headers, content=data)
|
||||
elif request.method == "PUT":
|
||||
response = await client.put(url, headers=headers, content=data)
|
||||
elif request.method == "DELETE":
|
||||
response = await client.delete(url, headers=headers, content=data)
|
||||
except Exception as e:
|
||||
@app.get("/{path:path}", include_in_schema=False)
|
||||
async def redirect_get(request: Request, path: str) -> Response:
|
||||
return redirect(request, path)
|
||||
|
||||
|
||||
@app.post("/{path:path}", include_in_schema=False)
|
||||
async def redirect_post(request: Request, path: str) -> Response:
|
||||
return redirect(request, path)
|
||||
|
||||
|
||||
@app.put("/{path:path}", include_in_schema=False)
|
||||
async def redirect_put(request: Request, path: str) -> Response:
|
||||
return redirect(request, path)
|
||||
|
||||
|
||||
def redirect(request: Request, path: str) -> Union[HTMLResponse, RedirectResponse]:
|
||||
# Path is not for EOSdash
|
||||
if not (path.startswith("eosdash") or path == ""):
|
||||
host = config_eos.server.eosdash_host
|
||||
if host is None:
|
||||
host = config_eos.server.host
|
||||
host = str(host)
|
||||
port = config_eos.server.eosdash_port
|
||||
if port is None:
|
||||
port = 8504
|
||||
# Make hostname Windows friendly
|
||||
if host == "0.0.0.0" and os.name == "nt":
|
||||
host = "localhost"
|
||||
url = f"http://{host}:{port}/"
|
||||
error_page = create_error_page(
|
||||
status_code="404",
|
||||
error_title="Page Not Found",
|
||||
error_message=f"""<pre>
|
||||
EOSdash server not reachable: '{url}'
|
||||
Did you start the EOSdash server
|
||||
or set 'startup_eosdash'?
|
||||
If there is no application server intended please
|
||||
set 'eosdash_host' or 'eosdash_port' to None.
|
||||
URL is unknown: '{request.url}'
|
||||
Did you want to connect to <a href="{url}" class="back-button">EOSdash</a>?
|
||||
</pre>
|
||||
""",
|
||||
error_details=f"{e}",
|
||||
error_details="Unknown URL",
|
||||
)
|
||||
return HTMLResponse(content=error_page, status_code=404)
|
||||
|
||||
return Response(
|
||||
content=response.content,
|
||||
status_code=response.status_code,
|
||||
headers=dict(response.headers),
|
||||
)
|
||||
else:
|
||||
# Make hostname Windows friendly
|
||||
host = str(config_eos.server.eosdash_host)
|
||||
if host == "0.0.0.0" and os.name == "nt":
|
||||
host = "localhost"
|
||||
if host and config_eos.server.eosdash_port:
|
||||
# Redirect to EOSdash server
|
||||
url = f"http://{host}:{config_eos.server.eosdash_port}/{path}"
|
||||
return RedirectResponse(url=url, status_code=303)
|
||||
|
||||
# Redirect the root URL to the site map
|
||||
return RedirectResponse(url="/docs")
|
||||
return RedirectResponse(url="/docs", status_code=303)
|
||||
|
||||
|
||||
def run_eos(host: str, port: int, log_level: str, access_log: bool, reload: bool) -> None:
|
||||
@@ -950,6 +1253,10 @@ def run_eos(host: str, port: int, log_level: str, access_log: bool, reload: bool
|
||||
# Make hostname Windows friendly
|
||||
if host == "0.0.0.0" and os.name == "nt":
|
||||
host = "localhost"
|
||||
|
||||
# Wait for EOS port to be free - e.g. in case of restart
|
||||
wait_for_port_free(port, timeout=120, waiting_app_name="EOS")
|
||||
|
||||
try:
|
||||
uvicorn.run(
|
||||
"akkudoktoreos.server.eos:app",
|
||||
@@ -1017,8 +1324,11 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
host = args.host if args.host else get_default_host()
|
||||
port = args.port if args.port else 8503
|
||||
|
||||
try:
|
||||
run_eos(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||
run_eos(host, port, args.log_level, args.access_log, args.reload)
|
||||
except:
|
||||
sys.exit(1)
|
||||
|
||||
|
@@ -1,127 +1,165 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from functools import reduce
|
||||
from typing import Any, Union
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import psutil
|
||||
import uvicorn
|
||||
from fasthtml.common import H1, Table, Td, Th, Thead, Titled, Tr, fast_app
|
||||
from pydantic.fields import ComputedFieldInfo, FieldInfo
|
||||
from pydantic_core import PydanticUndefined
|
||||
from fasthtml.common import FileResponse, JSONResponse
|
||||
from monsterui.core import FastHTML, Theme
|
||||
|
||||
from akkudoktoreos.config.config import get_config
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
from akkudoktoreos.server.dash.bokeh import BokehJS
|
||||
from akkudoktoreos.server.dash.components import Page
|
||||
|
||||
# Pages
|
||||
from akkudoktoreos.server.dash.configuration import Configuration
|
||||
from akkudoktoreos.server.dash.demo import Demo
|
||||
from akkudoktoreos.server.dash.footer import Footer
|
||||
from akkudoktoreos.server.dash.hello import Hello
|
||||
from akkudoktoreos.server.server import get_default_host, wait_for_port_free
|
||||
|
||||
# from akkudoktoreos.server.dash.altair import AltairJS
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
config_eos = get_config()
|
||||
|
||||
# The favicon for EOSdash
|
||||
favicon_filepath = Path(__file__).parent.joinpath("dash/assets/favicon/favicon.ico")
|
||||
if not favicon_filepath.exists():
|
||||
raise ValueError(f"Does not exist {favicon_filepath}")
|
||||
|
||||
# Command line arguments
|
||||
args = None
|
||||
args: Optional[argparse.Namespace] = None
|
||||
|
||||
|
||||
def get_default_value(field_info: Union[FieldInfo, ComputedFieldInfo], regular_field: bool) -> Any:
|
||||
default_value = ""
|
||||
if regular_field:
|
||||
if (val := field_info.default) is not PydanticUndefined:
|
||||
default_value = val
|
||||
else:
|
||||
default_value = "N/A"
|
||||
return default_value
|
||||
# Get frankenui and tailwind headers via CDN using Theme.green.headers()
|
||||
# Add altair headers
|
||||
# hdrs=(Theme.green.headers(highlightjs=True), AltairJS,)
|
||||
hdrs = (
|
||||
Theme.green.headers(highlightjs=True),
|
||||
BokehJS,
|
||||
)
|
||||
|
||||
|
||||
def resolve_nested_types(field_type: Any, parent_types: list[str]) -> list[tuple[Any, list[str]]]:
|
||||
resolved_types: list[tuple[Any, list[str]]] = []
|
||||
|
||||
origin = getattr(field_type, "__origin__", field_type)
|
||||
if origin is Union:
|
||||
for arg in getattr(field_type, "__args__", []):
|
||||
if arg is not type(None):
|
||||
resolved_types.extend(resolve_nested_types(arg, parent_types))
|
||||
else:
|
||||
resolved_types.append((field_type, parent_types))
|
||||
|
||||
return resolved_types
|
||||
|
||||
|
||||
configs = []
|
||||
inner_types: set[type[PydanticBaseModel]] = set()
|
||||
for field_name, field_info in list(config_eos.model_fields.items()) + list(
|
||||
config_eos.model_computed_fields.items()
|
||||
):
|
||||
|
||||
def extract_nested_models(
|
||||
subfield_info: Union[ComputedFieldInfo, FieldInfo], parent_types: list[str]
|
||||
) -> None:
|
||||
regular_field = isinstance(subfield_info, FieldInfo)
|
||||
subtype = subfield_info.annotation if regular_field else subfield_info.return_type
|
||||
|
||||
if subtype in inner_types:
|
||||
return
|
||||
|
||||
nested_types = resolve_nested_types(subtype, [])
|
||||
found_basic = False
|
||||
for nested_type, nested_parent_types in nested_types:
|
||||
if not isinstance(nested_type, type) or not issubclass(nested_type, PydanticBaseModel):
|
||||
if found_basic:
|
||||
continue
|
||||
|
||||
config = {}
|
||||
config["name"] = ".".join(parent_types)
|
||||
try:
|
||||
config["value"] = reduce(getattr, [config_eos] + parent_types)
|
||||
except AttributeError:
|
||||
# Parent value(s) are not set in current config
|
||||
config["value"] = ""
|
||||
config["default"] = get_default_value(subfield_info, regular_field)
|
||||
config["description"] = (
|
||||
subfield_info.description if subfield_info.description else ""
|
||||
)
|
||||
configs.append(config)
|
||||
found_basic = True
|
||||
else:
|
||||
new_parent_types = parent_types + nested_parent_types
|
||||
inner_types.add(nested_type)
|
||||
for nested_field_name, nested_field_info in list(
|
||||
nested_type.model_fields.items()
|
||||
) + list(nested_type.model_computed_fields.items()):
|
||||
extract_nested_models(
|
||||
nested_field_info,
|
||||
new_parent_types + [nested_field_name],
|
||||
)
|
||||
|
||||
extract_nested_models(field_info, [field_name])
|
||||
configs = sorted(configs, key=lambda x: x["name"])
|
||||
|
||||
|
||||
app, rt = fast_app(
|
||||
# The EOSdash application
|
||||
app: FastHTML = FastHTML(
|
||||
title="EOSdash",
|
||||
hdrs=hdrs,
|
||||
secret_key=os.getenv("EOS_SERVER__EOSDASH_SESSKEY"),
|
||||
)
|
||||
|
||||
|
||||
def config_table() -> Table:
|
||||
rows = [
|
||||
Tr(
|
||||
Td(config["name"]),
|
||||
Td(config["value"]),
|
||||
Td(config["default"]),
|
||||
Td(config["description"]),
|
||||
cls="even:bg-purple/5",
|
||||
def eos_server() -> tuple[str, int]:
|
||||
"""Retrieves the EOS server host and port configuration.
|
||||
|
||||
If `args` is provided, it uses the `eos_host` and `eos_port` from `args`.
|
||||
Otherwise, it falls back to the values from `config_eos.server`.
|
||||
|
||||
Returns:
|
||||
tuple[str, int]: A tuple containing:
|
||||
- `eos_host` (str): The EOS server hostname or IP.
|
||||
- `eos_port` (int): The EOS server port.
|
||||
"""
|
||||
if args is None:
|
||||
eos_host = str(config_eos.server.host)
|
||||
eos_port = config_eos.server.port
|
||||
else:
|
||||
eos_host = args.eos_host
|
||||
eos_port = args.eos_port
|
||||
eos_host = eos_host if eos_host else get_default_host()
|
||||
eos_port = eos_port if eos_port else 8503
|
||||
|
||||
return eos_host, eos_port
|
||||
|
||||
|
||||
@app.get("/favicon.ico")
|
||||
def get_eosdash_favicon(): # type: ignore
|
||||
"""Get favicon."""
|
||||
return FileResponse(path=favicon_filepath)
|
||||
|
||||
|
||||
@app.get("/")
|
||||
def get_eosdash(): # type: ignore
|
||||
"""Serves the main EOSdash page.
|
||||
|
||||
Returns:
|
||||
Page: The main dashboard page with navigation links and footer.
|
||||
"""
|
||||
return Page(
|
||||
None,
|
||||
{
|
||||
"EOSdash": "/eosdash/hello",
|
||||
"Config": "/eosdash/configuration",
|
||||
"Demo": "/eosdash/demo",
|
||||
},
|
||||
Hello(),
|
||||
Footer(*eos_server()),
|
||||
"/eosdash/footer",
|
||||
)
|
||||
for config in configs
|
||||
]
|
||||
flds = "Name", "Value", "Default", "Description"
|
||||
head = Thead(*map(Th, flds), cls="bg-purple/10")
|
||||
return Table(head, *rows, cls="w-full")
|
||||
|
||||
|
||||
@rt("/")
|
||||
def get(): # type: ignore
|
||||
return Titled("EOS Dashboard", H1("Configuration"), config_table())
|
||||
@app.get("/eosdash/footer")
|
||||
def get_eosdash_footer(): # type: ignore
|
||||
"""Serves the EOSdash Foooter information.
|
||||
|
||||
Returns:
|
||||
Footer: The Footer component.
|
||||
"""
|
||||
return Footer(*eos_server())
|
||||
|
||||
|
||||
def run_eosdash(host: str, port: int, log_level: str, access_log: bool, reload: bool) -> None:
|
||||
@app.get("/eosdash/hello")
|
||||
def get_eosdash_hello(): # type: ignore
|
||||
"""Serves the EOSdash Hello page.
|
||||
|
||||
Returns:
|
||||
Hello: The Hello page component.
|
||||
"""
|
||||
return Hello()
|
||||
|
||||
|
||||
@app.get("/eosdash/configuration")
|
||||
def get_eosdash_configuration(): # type: ignore
|
||||
"""Serves the EOSdash Configuration page.
|
||||
|
||||
Returns:
|
||||
Configuration: The Configuration page component.
|
||||
"""
|
||||
return Configuration(*eos_server())
|
||||
|
||||
|
||||
@app.get("/eosdash/demo")
|
||||
def get_eosdash_demo(): # type: ignore
|
||||
"""Serves the EOSdash Demo page.
|
||||
|
||||
Returns:
|
||||
Demo: The Demo page component.
|
||||
"""
|
||||
return Demo(*eos_server())
|
||||
|
||||
|
||||
@app.get("/eosdash/health")
|
||||
def get_eosdash_health(): # type: ignore
|
||||
"""Health check endpoint to verify that the EOSdash server is alive."""
|
||||
return JSONResponse(
|
||||
{
|
||||
"status": "alive",
|
||||
"pid": psutil.Process().pid,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@app.get("/eosdash/assets/{fname:path}.{ext:static}")
|
||||
def get_eosdash_assets(fname: str, ext: str): # type: ignore
|
||||
"""Get assets."""
|
||||
asset_filepath = Path(__file__).parent.joinpath(f"dash/assets/{fname}.{ext}")
|
||||
return FileResponse(path=asset_filepath)
|
||||
|
||||
|
||||
def run_eosdash() -> None:
|
||||
"""Run the EOSdash server with the specified configurations.
|
||||
|
||||
This function starts the EOSdash server using the Uvicorn ASGI server. It accepts
|
||||
@@ -131,31 +169,77 @@ def run_eosdash(host: str, port: int, log_level: str, access_log: bool, reload:
|
||||
server to the specified host and port, an error message is logged and the
|
||||
application exits.
|
||||
|
||||
Parameters:
|
||||
host (str): The hostname to bind the server to.
|
||||
port (int): The port number to bind the server to.
|
||||
log_level (str): The log level for the server. Options include "critical", "error",
|
||||
"warning", "info", "debug", and "trace".
|
||||
access_log (bool): Whether to enable or disable the access log. Set to True to enable.
|
||||
reload (bool): Whether to enable or disable auto-reload. Set to True for development.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
# Setup parameters from args, config_eos and default
|
||||
# Remember parameters that are also in config
|
||||
# - EOS host
|
||||
if args and args.eos_host:
|
||||
eos_host = args.eos_host
|
||||
elif config_eos.server.host:
|
||||
eos_host = config_eos.server.host
|
||||
else:
|
||||
eos_host = get_default_host()
|
||||
config_eos.server.host = eos_host
|
||||
# - EOS port
|
||||
if args and args.eos_port:
|
||||
eos_port = args.eos_port
|
||||
elif config_eos.server.port:
|
||||
eos_port = config_eos.server.port
|
||||
else:
|
||||
eos_port = 8503
|
||||
config_eos.server.port = eos_port
|
||||
# - EOSdash host
|
||||
if args and args.host:
|
||||
eosdash_host = args.host
|
||||
elif config_eos.server.eosdash.host:
|
||||
eosdash_host = config_eos.server.eosdash_host
|
||||
else:
|
||||
eosdash_host = get_default_host()
|
||||
config_eos.server.eosdash_host = eosdash_host
|
||||
# - EOS port
|
||||
if args and args.port:
|
||||
eosdash_port = args.port
|
||||
elif config_eos.server.eosdash_port:
|
||||
eosdash_port = config_eos.server.eosdash_port
|
||||
else:
|
||||
eosdash_port = 8504
|
||||
config_eos.server.eosdash_port = eosdash_port
|
||||
# - log level
|
||||
if args and args.log_level:
|
||||
log_level = args.log_level
|
||||
else:
|
||||
log_level = "info"
|
||||
# - access log
|
||||
if args and args.access_log:
|
||||
access_log = args.access_log
|
||||
else:
|
||||
access_log = False
|
||||
# - reload
|
||||
if args and args.reload:
|
||||
reload = args.reload
|
||||
else:
|
||||
reload = False
|
||||
|
||||
# Make hostname Windows friendly
|
||||
if host == "0.0.0.0" and os.name == "nt":
|
||||
host = "localhost"
|
||||
if eosdash_host == "0.0.0.0" and os.name == "nt":
|
||||
eosdash_host = "localhost"
|
||||
|
||||
# Wait for EOSdash port to be free - e.g. in case of restart
|
||||
wait_for_port_free(eosdash_port, timeout=120, waiting_app_name="EOSdash")
|
||||
|
||||
try:
|
||||
uvicorn.run(
|
||||
"akkudoktoreos.server.eosdash:app",
|
||||
host=host,
|
||||
port=port,
|
||||
log_level=log_level.lower(), # Convert log_level to lowercase
|
||||
host=eosdash_host,
|
||||
port=eosdash_port,
|
||||
log_level=log_level.lower(),
|
||||
access_log=access_log,
|
||||
reload=reload,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not bind to host {host}:{port}. Error: {e}")
|
||||
logger.error(f"Could not bind to host {eosdash_host}:{eosdash_port}. Error: {e}")
|
||||
raise e
|
||||
|
||||
|
||||
@@ -164,7 +248,7 @@ def main() -> None:
|
||||
|
||||
This function sets up the argument parser to accept command-line arguments for
|
||||
host, port, log_level, access_log, and reload. It uses default values from the
|
||||
config_eos module if arguments are not provided. After parsing the arguments,
|
||||
config module if arguments are not provided. After parsing the arguments,
|
||||
it starts the EOSdash server with the specified configurations.
|
||||
|
||||
Command-line Arguments:
|
||||
@@ -178,7 +262,6 @@ def main() -> None:
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description="Start EOSdash server.")
|
||||
|
||||
# Host and port arguments with defaults from config_eos
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
type=str,
|
||||
@@ -191,22 +274,18 @@ def main() -> None:
|
||||
default=config_eos.server.eosdash_port,
|
||||
help="Port for the EOSdash server (default: value from config)",
|
||||
)
|
||||
|
||||
# EOS Host and port arguments with defaults from config_eos
|
||||
parser.add_argument(
|
||||
"--eos-host",
|
||||
type=str,
|
||||
default=str(config_eos.server.host),
|
||||
help="Host for the EOS server (default: value from config)",
|
||||
help="Host of the EOS server (default: value from config)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--eos-port",
|
||||
type=int,
|
||||
default=config_eos.server.port,
|
||||
help="Port for the EOS server (default: value from config)",
|
||||
help="Port of the EOS server (default: value from config)",
|
||||
)
|
||||
|
||||
# Optional arguments for log_level, access_log, and reload
|
||||
parser.add_argument(
|
||||
"--log_level",
|
||||
type=str,
|
||||
@@ -217,7 +296,7 @@ def main() -> None:
|
||||
"--access_log",
|
||||
type=bool,
|
||||
default=False,
|
||||
help="Enable or disable access log. Options: True or False (default: True)",
|
||||
help="Enable or disable access log. Options: True or False (default: False)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--reload",
|
||||
@@ -226,11 +305,15 @@ def main() -> None:
|
||||
help="Enable or disable auto-reload. Useful for development. Options: True or False (default: False)",
|
||||
)
|
||||
|
||||
global args
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
run_eosdash(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||
except:
|
||||
run_eosdash()
|
||||
except Exception as ex:
|
||||
error_msg = f"Failed to run EOSdash: {ex}"
|
||||
logger.error(error_msg)
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
0
src/akkudoktoreos/server/rest/__init__.py
Normal file
91
src/akkudoktoreos/server/rest/error.py
Normal file
@@ -0,0 +1,91 @@
|
||||
ERROR_PAGE_TEMPLATE = """
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Energy Optimization System (EOS) Error</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, sans-serif;
|
||||
background-color: #f5f5f5;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.error-container {
|
||||
background: white;
|
||||
padding: 2rem;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||
max-width: 500px;
|
||||
width: 100%;
|
||||
text-align: center;
|
||||
}
|
||||
.error-code {
|
||||
font-size: 4rem;
|
||||
font-weight: bold;
|
||||
color: #e53e3e;
|
||||
margin: 0;
|
||||
}
|
||||
.error-title {
|
||||
font-size: 1.5rem;
|
||||
color: #2d3748;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
.error-message {
|
||||
color: #4a5568;
|
||||
margin-bottom: 1.5rem;
|
||||
}
|
||||
.error-details {
|
||||
background: #f7fafc;
|
||||
padding: 1rem;
|
||||
border-radius: 4px;
|
||||
margin-bottom: 1.5rem;
|
||||
text-align: center;
|
||||
font-family: monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
.back-button {
|
||||
background: #3182ce;
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 0.75rem 1.5rem;
|
||||
border-radius: 4px;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
.back-button:hover {
|
||||
background: #2c5282;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="error-container">
|
||||
<h1 class="error-code">STATUS_CODE</h1>
|
||||
<h2 class="error-title">ERROR_TITLE</h2>
|
||||
<p class="error-message">ERROR_MESSAGE</p>
|
||||
<div class="error-details">ERROR_DETAILS</div>
|
||||
<a href="/docs" class="back-button">Back to Home</a>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def create_error_page(
|
||||
status_code: str, error_title: str, error_message: str, error_details: str
|
||||
) -> str:
|
||||
"""Create an error page by replacing placeholders in the template."""
|
||||
return (
|
||||
ERROR_PAGE_TEMPLATE.replace("STATUS_CODE", status_code)
|
||||
.replace("ERROR_TITLE", error_title)
|
||||
.replace("ERROR_MESSAGE", error_message)
|
||||
.replace("ERROR_DETAILS", error_details)
|
||||
)
|
92
src/akkudoktoreos/server/rest/tasks.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Task handling taken from fastapi-utils/fastapi_utils/tasks.py."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Coroutine, Union
|
||||
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
NoArgsNoReturnFuncT = Callable[[], None]
|
||||
NoArgsNoReturnAsyncFuncT = Callable[[], Coroutine[Any, Any, None]]
|
||||
ExcArgNoReturnFuncT = Callable[[Exception], None]
|
||||
ExcArgNoReturnAsyncFuncT = Callable[[Exception], Coroutine[Any, Any, None]]
|
||||
NoArgsNoReturnAnyFuncT = Union[NoArgsNoReturnFuncT, NoArgsNoReturnAsyncFuncT]
|
||||
ExcArgNoReturnAnyFuncT = Union[ExcArgNoReturnFuncT, ExcArgNoReturnAsyncFuncT]
|
||||
NoArgsNoReturnDecorator = Callable[[NoArgsNoReturnAnyFuncT], NoArgsNoReturnAsyncFuncT]
|
||||
|
||||
|
||||
async def _handle_func(func: NoArgsNoReturnAnyFuncT) -> None:
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
await func()
|
||||
else:
|
||||
await run_in_threadpool(func)
|
||||
|
||||
|
||||
async def _handle_exc(exc: Exception, on_exception: ExcArgNoReturnAnyFuncT | None) -> None:
|
||||
if on_exception:
|
||||
if asyncio.iscoroutinefunction(on_exception):
|
||||
await on_exception(exc)
|
||||
else:
|
||||
await run_in_threadpool(on_exception, exc)
|
||||
|
||||
|
||||
def repeat_every(
|
||||
*,
|
||||
seconds: float,
|
||||
wait_first: float | None = None,
|
||||
logger: logging.Logger | None = None,
|
||||
raise_exceptions: bool = False,
|
||||
max_repetitions: int | None = None,
|
||||
on_complete: NoArgsNoReturnAnyFuncT | None = None,
|
||||
on_exception: ExcArgNoReturnAnyFuncT | None = None,
|
||||
) -> NoArgsNoReturnDecorator:
|
||||
"""A decorator that modifies a function so it is periodically re-executed after its first call.
|
||||
|
||||
The function it decorates should accept no arguments and return nothing. If necessary, this can be accomplished
|
||||
by using `functools.partial` or otherwise wrapping the target function prior to decoration.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
seconds: float
|
||||
The number of seconds to wait between repeated calls
|
||||
wait_first: float (default None)
|
||||
If not None, the function will wait for the given duration before the first call
|
||||
max_repetitions: Optional[int] (default None)
|
||||
The maximum number of times to call the repeated function. If `None`, the function is repeated forever.
|
||||
on_complete: Optional[Callable[[], None]] (default None)
|
||||
A function to call after the final repetition of the decorated function.
|
||||
on_exception: Optional[Callable[[Exception], None]] (default None)
|
||||
A function to call when an exception is raised by the decorated function.
|
||||
"""
|
||||
|
||||
def decorator(func: NoArgsNoReturnAnyFuncT) -> NoArgsNoReturnAsyncFuncT:
|
||||
"""Converts the decorated function into a repeated, periodically-called version."""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapped() -> None:
|
||||
async def loop() -> None:
|
||||
if wait_first is not None:
|
||||
await asyncio.sleep(wait_first)
|
||||
|
||||
repetitions = 0
|
||||
while max_repetitions is None or repetitions < max_repetitions:
|
||||
try:
|
||||
await _handle_func(func)
|
||||
|
||||
except Exception as exc:
|
||||
await _handle_exc(exc, on_exception)
|
||||
|
||||
repetitions += 1
|
||||
await asyncio.sleep(seconds)
|
||||
|
||||
if on_complete:
|
||||
await _handle_func(on_complete)
|
||||
|
||||
asyncio.ensure_future(loop())
|
||||
|
||||
return wrapped
|
||||
|
||||
return decorator
|
@@ -1,7 +1,10 @@
|
||||
"""Server Module."""
|
||||
|
||||
from typing import Optional
|
||||
import os
|
||||
import time
|
||||
from typing import Optional, Union
|
||||
|
||||
import psutil
|
||||
from pydantic import Field, IPvAnyAddress, field_validator
|
||||
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
@@ -10,24 +13,107 @@ from akkudoktoreos.core.logging import get_logger
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ServerCommonSettings(SettingsBaseModel):
|
||||
"""Server Configuration.
|
||||
def get_default_host() -> str:
|
||||
if os.name == "nt":
|
||||
return "127.0.0.1"
|
||||
return "0.0.0.0"
|
||||
|
||||
Attributes:
|
||||
To be added
|
||||
|
||||
def wait_for_port_free(port: int, timeout: int = 0, waiting_app_name: str = "App") -> bool:
|
||||
"""Wait for a network port to become free, with timeout.
|
||||
|
||||
Checks if the port is currently in use and logs warnings with process details.
|
||||
Retries every 3 seconds until timeout is reached.
|
||||
|
||||
Args:
|
||||
port: The network port number to check
|
||||
timeout: Maximum seconds to wait (0 means check once without waiting)
|
||||
waiting_app_name: Name of the application waiting for the port
|
||||
|
||||
Returns:
|
||||
bool: True if port is free, False if port is still in use after timeout
|
||||
|
||||
Raises:
|
||||
ValueError: If port number or timeout is invalid
|
||||
psutil.Error: If there are problems accessing process information
|
||||
"""
|
||||
if not 0 <= port <= 65535:
|
||||
raise ValueError(f"Invalid port number: {port}")
|
||||
if timeout < 0:
|
||||
raise ValueError(f"Invalid timeout: {timeout}")
|
||||
|
||||
host: Optional[IPvAnyAddress] = Field(default="0.0.0.0", description="EOS server IP address.")
|
||||
def get_processes_using_port() -> list[dict]:
|
||||
"""Get info about processes using the specified port."""
|
||||
processes: list[dict] = []
|
||||
seen_pids: set[int] = set()
|
||||
|
||||
try:
|
||||
for conn in psutil.net_connections(kind="inet"):
|
||||
if conn.laddr.port == port and conn.pid not in seen_pids:
|
||||
try:
|
||||
process = psutil.Process(conn.pid)
|
||||
seen_pids.add(conn.pid)
|
||||
processes.append(process.as_dict(attrs=["pid", "cmdline"]))
|
||||
except psutil.NoSuchProcess:
|
||||
continue
|
||||
except psutil.Error as e:
|
||||
logger.error(f"Error checking port {port}: {e}")
|
||||
raise
|
||||
|
||||
return processes
|
||||
|
||||
retries = max(int(timeout / 3), 1) if timeout > 0 else 1
|
||||
|
||||
for _ in range(retries):
|
||||
process_info = get_processes_using_port()
|
||||
|
||||
if not process_info:
|
||||
return True
|
||||
|
||||
if timeout <= 0:
|
||||
break
|
||||
|
||||
logger.info(f"{waiting_app_name} waiting for port {port} to become free...")
|
||||
time.sleep(3)
|
||||
|
||||
if process_info:
|
||||
logger.warning(
|
||||
f"{waiting_app_name} port {port} still in use after waiting {timeout} seconds."
|
||||
)
|
||||
for info in process_info:
|
||||
logger.warning(
|
||||
f"Process using port - PID: {info['pid']}, Command: {' '.join(info['cmdline'])}"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ServerCommonSettings(SettingsBaseModel):
|
||||
"""Server Configuration."""
|
||||
|
||||
host: Optional[IPvAnyAddress] = Field(
|
||||
default=get_default_host(), description="EOS server IP address."
|
||||
)
|
||||
port: Optional[int] = Field(default=8503, description="EOS server IP port number.")
|
||||
verbose: Optional[bool] = Field(default=False, description="Enable debug output")
|
||||
startup_eosdash: Optional[bool] = Field(
|
||||
default=True, description="EOS server to start EOSdash server."
|
||||
)
|
||||
eosdash_host: Optional[IPvAnyAddress] = Field(
|
||||
default="0.0.0.0", description="EOSdash server IP address."
|
||||
default=get_default_host(), description="EOSdash server IP address."
|
||||
)
|
||||
eosdash_port: Optional[int] = Field(default=8504, description="EOSdash server IP port number.")
|
||||
|
||||
@field_validator("host", "eosdash_host", mode="before")
|
||||
def validate_server_host(
|
||||
cls, value: Optional[Union[str, IPvAnyAddress]]
|
||||
) -> Optional[Union[str, IPvAnyAddress]]:
|
||||
if isinstance(value, str):
|
||||
if value.lower() in ("localhost", "loopback"):
|
||||
value = "127.0.0.1"
|
||||
return value
|
||||
|
||||
@field_validator("port", "eosdash_port")
|
||||
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
||||
if value is not None and not (1024 <= value <= 49151):
|
||||
|
@@ -1,5 +1,4 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import textwrap
|
||||
from collections.abc import Sequence
|
||||
@@ -47,7 +46,7 @@ class VisualizationReport(ConfigMixin):
|
||||
"""Add a chart function to the current group and save it as a PNG and SVG."""
|
||||
self.current_group.append(chart_func)
|
||||
if self.create_img and title:
|
||||
server_output_dir = self.config.general.data_cache_path
|
||||
server_output_dir = self.config.cache.path()
|
||||
server_output_dir.mkdir(parents=True, exist_ok=True)
|
||||
fig, ax = plt.subplots()
|
||||
chart_func()
|
||||
@@ -615,7 +614,7 @@ def prepare_visualize(
|
||||
|
||||
if filtered_balance.size > 0 or filtered_losses.size > 0:
|
||||
report.finalize_group()
|
||||
if logger.level == logging.DEBUG or results["fixed_seed"]:
|
||||
if logger.level == "DEBUG" or results["fixed_seed"]:
|
||||
report.create_line_chart(
|
||||
0,
|
||||
[
|
||||
@@ -711,9 +710,9 @@ def generate_example_report(filename: str = "example_report.pdf") -> None:
|
||||
|
||||
report.finalize_group() # Finalize the third group of charts
|
||||
|
||||
logger.setLevel(logging.DEBUG) # set level for example report
|
||||
logger.setLevel("DEBUG") # set level for example report
|
||||
|
||||
if logger.level == logging.DEBUG:
|
||||
if logger.level == "DEBUG":
|
||||
report.create_line_chart(
|
||||
x_hours,
|
||||
[np.array([0.2, 0.25, 0.3, 0.35])],
|
||||
|
@@ -1,18 +1,26 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Generator, Optional, Union
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
import pendulum
|
||||
import psutil
|
||||
import pytest
|
||||
from xprocess import ProcessStarter
|
||||
import requests
|
||||
from xprocess import ProcessStarter, XProcess
|
||||
|
||||
from akkudoktoreos.config.config import ConfigEOS, get_config
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.server.server import get_default_host
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -48,6 +56,12 @@ def pytest_addoption(parser):
|
||||
default=False,
|
||||
help="Verify that user config file is non-existent (will also fail if user config file exists before test run).",
|
||||
)
|
||||
parser.addoption(
|
||||
"--system-test",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="System test mode. Tests may access real resources, like prediction providers!",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -64,6 +78,18 @@ def config_mixin(config_eos):
|
||||
yield config_mixin_patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def is_system_test(request):
|
||||
yield bool(request.config.getoption("--system-test"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def prediction_eos():
|
||||
from akkudoktoreos.prediction.prediction import get_prediction
|
||||
|
||||
return get_prediction()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def devices_eos(config_mixin):
|
||||
from akkudoktoreos.devices.devices import get_devices
|
||||
@@ -87,13 +113,33 @@ def devices_mixin(devices_eos):
|
||||
# Before activating, make sure that no user config file exists (e.g. ~/.config/net.akkudoktoreos.eos/EOS.config.json)
|
||||
@pytest.fixture(autouse=True)
|
||||
def cfg_non_existent(request):
|
||||
if not bool(request.config.getoption("--check-config-side-effect")):
|
||||
yield
|
||||
if bool(request.config.getoption("--check-config-side-effect")):
|
||||
return
|
||||
|
||||
# Before test
|
||||
from platformdirs import user_config_dir
|
||||
|
||||
user_dir = user_config_dir(ConfigEOS.APP_NAME)
|
||||
assert not Path(user_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME).exists()
|
||||
assert not Path.cwd().joinpath(ConfigEOS.CONFIG_FILE_NAME).exists()
|
||||
user_config_file = Path(user_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME)
|
||||
cwd_config_file = Path.cwd().joinpath(ConfigEOS.CONFIG_FILE_NAME)
|
||||
assert not user_config_file.exists(), (
|
||||
f"Config file {user_config_file} exists, please delete before test!"
|
||||
)
|
||||
assert not cwd_config_file.exists(), (
|
||||
f"Config file {cwd_config_file} exists, please delete before test!"
|
||||
)
|
||||
|
||||
# Yield to test
|
||||
yield
|
||||
|
||||
# After test
|
||||
assert not user_config_file.exists(), (
|
||||
f"Config file {user_config_file} created, please check test!"
|
||||
)
|
||||
assert not cwd_config_file.exists(), (
|
||||
f"Config file {cwd_config_file} created, please check test!"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -149,28 +195,30 @@ def config_eos(
|
||||
assert config_file.exists()
|
||||
assert not config_file_cwd.exists()
|
||||
assert config_default_dirs[-1] / "data" == config_eos.general.data_folder_path
|
||||
assert config_default_dirs[-1] / "data/cache" == config_eos.general.data_cache_path
|
||||
assert config_default_dirs[-1] / "data/cache" == config_eos.cache.path()
|
||||
assert config_default_dirs[-1] / "data/output" == config_eos.general.data_output_path
|
||||
return config_eos
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_default_dirs():
|
||||
def config_default_dirs(tmpdir):
|
||||
"""Fixture that provides a list of directories to be used as config dir."""
|
||||
with tempfile.TemporaryDirectory() as tmp_user_home_dir:
|
||||
tmp_user_home_dir = Path(tmpdir)
|
||||
|
||||
# Default config directory from platform user config directory
|
||||
config_default_dir_user = Path(tmp_user_home_dir) / "config"
|
||||
config_default_dir_user = tmp_user_home_dir / "config"
|
||||
|
||||
# Default config directory from current working directory
|
||||
config_default_dir_cwd = Path(tmp_user_home_dir) / "cwd"
|
||||
config_default_dir_cwd = tmp_user_home_dir / "cwd"
|
||||
config_default_dir_cwd.mkdir()
|
||||
|
||||
# Default config directory from default config file
|
||||
config_default_dir_default = Path(__file__).parent.parent.joinpath("src/akkudoktoreos/data")
|
||||
|
||||
# Default data directory from platform user data directory
|
||||
data_default_dir_user = Path(tmp_user_home_dir)
|
||||
yield (
|
||||
data_default_dir_user = tmp_user_home_dir
|
||||
|
||||
return (
|
||||
config_default_dir_user,
|
||||
config_default_dir_cwd,
|
||||
config_default_dir_default,
|
||||
@@ -178,23 +226,221 @@ def config_default_dirs():
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def server_base(xprocess: XProcess) -> Generator[dict[str, Union[str, int]], None, None]:
|
||||
"""Fixture to start the server with temporary EOS_DIR and default config.
|
||||
|
||||
Args:
|
||||
xprocess (XProcess): The pytest-xprocess fixture to manage the server process.
|
||||
|
||||
Yields:
|
||||
dict[str, str]: A dictionary containing:
|
||||
- "server" (str): URL of the server.
|
||||
- "eos_dir" (str): Path to the temporary EOS_DIR.
|
||||
"""
|
||||
host = get_default_host()
|
||||
port = 8503
|
||||
eosdash_port = 8504
|
||||
|
||||
# Port of server may be still blocked by a server usage despite the other server already
|
||||
# shut down. CLOSE_WAIT, TIME_WAIT may typically take up to 120 seconds.
|
||||
server_timeout = 120
|
||||
|
||||
server = f"http://{host}:{port}"
|
||||
eosdash_server = f"http://{host}:{eosdash_port}"
|
||||
eos_tmp_dir = tempfile.TemporaryDirectory()
|
||||
eos_dir = str(eos_tmp_dir.name)
|
||||
|
||||
class Starter(ProcessStarter):
|
||||
# assure server to be installed
|
||||
try:
|
||||
project_dir = Path(__file__).parent.parent
|
||||
subprocess.run(
|
||||
[sys.executable, "-c", "import", "akkudoktoreos.server.eos"],
|
||||
check=True,
|
||||
env=os.environ,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=project_dir,
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
subprocess.run(
|
||||
[sys.executable, "-m", "pip", "install", "-e", str(project_dir)],
|
||||
env=os.environ,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=project_dir,
|
||||
)
|
||||
|
||||
# Set environment for server run
|
||||
env = os.environ.copy()
|
||||
env["EOS_DIR"] = eos_dir
|
||||
env["EOS_CONFIG_DIR"] = eos_dir
|
||||
|
||||
# command to start server process
|
||||
args = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"akkudoktoreos.server.eos",
|
||||
"--host",
|
||||
host,
|
||||
"--port",
|
||||
str(port),
|
||||
]
|
||||
|
||||
# Will wait for 'server_timeout' seconds before timing out
|
||||
timeout = server_timeout
|
||||
|
||||
# xprocess will now attempt to clean up upon interruptions
|
||||
terminate_on_interrupt = True
|
||||
|
||||
# checks if our server is ready
|
||||
def startup_check(self):
|
||||
try:
|
||||
result = requests.get(f"{server}/v1/health", timeout=2)
|
||||
if result.status_code == 200:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
def cleanup_eos_eosdash():
|
||||
# Cleanup any EOS process left.
|
||||
if os.name == "nt":
|
||||
# Windows does not provide SIGKILL
|
||||
sigkill = signal.SIGTERM
|
||||
else:
|
||||
sigkill = signal.SIGKILL # type: ignore
|
||||
# - Use pid on EOS health endpoint
|
||||
try:
|
||||
result = requests.get(f"{server}/v1/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
pid = result.json()["pid"]
|
||||
os.kill(pid, sigkill)
|
||||
time.sleep(1)
|
||||
result = requests.get(f"{server}/v1/health", timeout=2)
|
||||
assert result.status_code != HTTPStatus.OK
|
||||
except:
|
||||
pass
|
||||
# - Use pids from processes on EOS port
|
||||
for retries in range(int(server_timeout / 3)):
|
||||
pids: list[int] = []
|
||||
for conn in psutil.net_connections(kind="inet"):
|
||||
if conn.laddr.port == port:
|
||||
if conn.pid not in pids:
|
||||
# Get fresh process info
|
||||
try:
|
||||
process = psutil.Process(conn.pid)
|
||||
process_info = process.as_dict(attrs=["pid", "cmdline"])
|
||||
if "akkudoktoreos.server.eos" in process_info["cmdline"]:
|
||||
pids.append(conn.pid)
|
||||
except:
|
||||
# PID may already be dead
|
||||
pass
|
||||
for pid in pids:
|
||||
os.kill(pid, sigkill)
|
||||
if len(pids) == 0:
|
||||
break
|
||||
time.sleep(3)
|
||||
assert len(pids) == 0
|
||||
# Cleanup any EOSdash processes left.
|
||||
# - Use pid on EOSdash health endpoint
|
||||
try:
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
pid = result.json()["pid"]
|
||||
os.kill(pid, sigkill)
|
||||
time.sleep(1)
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
assert result.status_code != HTTPStatus.OK
|
||||
except:
|
||||
pass
|
||||
# - Use pids from processes on EOSdash port
|
||||
for retries in range(int(server_timeout / 3)):
|
||||
pids = []
|
||||
for conn in psutil.net_connections(kind="inet"):
|
||||
if conn.laddr.port == eosdash_port:
|
||||
if conn.pid not in pids:
|
||||
# Get fresh process info
|
||||
try:
|
||||
process = psutil.Process(conn.pid)
|
||||
process_info = process.as_dict(attrs=["pid", "cmdline"])
|
||||
if "akkudoktoreos.server.eosdash" in process_info["cmdline"]:
|
||||
pids.append(conn.pid)
|
||||
except:
|
||||
# PID may already be dead
|
||||
pass
|
||||
for pid in pids:
|
||||
os.kill(pid, sigkill)
|
||||
if len(pids) == 0:
|
||||
break
|
||||
time.sleep(3)
|
||||
assert len(pids) == 0
|
||||
|
||||
# Kill all running eos and eosdash process - just to be sure
|
||||
cleanup_eos_eosdash()
|
||||
|
||||
# Ensure there is an empty config file in the temporary EOS directory
|
||||
config_file_path = Path(eos_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME)
|
||||
with config_file_path.open(mode="w", encoding="utf-8", newline="\n") as fd:
|
||||
json.dump({}, fd)
|
||||
|
||||
# ensure process is running and return its logfile
|
||||
pid, logfile = xprocess.ensure("eos", Starter)
|
||||
logger.info(f"Started EOS ({pid}). This may take very long (up to {server_timeout} seconds).")
|
||||
logger.info(f"View xprocess logfile at: {logfile}")
|
||||
|
||||
yield {
|
||||
"server": server,
|
||||
"eosdash_server": eosdash_server,
|
||||
"eos_dir": eos_dir,
|
||||
"timeout": server_timeout,
|
||||
}
|
||||
|
||||
# clean up whole process tree afterwards
|
||||
xprocess.getinfo("eos").terminate()
|
||||
|
||||
# Cleanup any EOS process left.
|
||||
cleanup_eos_eosdash()
|
||||
|
||||
# Remove temporary EOS_DIR
|
||||
eos_tmp_dir.cleanup()
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def server_setup_for_class(xprocess) -> Generator[dict[str, Union[str, int]], None, None]:
|
||||
"""A fixture to start the server for a test class."""
|
||||
with server_base(xprocess) as result:
|
||||
yield result
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def server_setup_for_function(xprocess) -> Generator[dict[str, Union[str, int]], None, None]:
|
||||
"""A fixture to start the server for a test function."""
|
||||
with server_base(xprocess) as result:
|
||||
yield result
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def server(xprocess, config_eos, config_default_dirs):
|
||||
def server(xprocess, config_eos, config_default_dirs) -> Generator[str, None, None]:
|
||||
"""Fixture to start the server.
|
||||
|
||||
Provides URL of the server.
|
||||
"""
|
||||
# create url/port info to the server
|
||||
url = "http://0.0.0.0:8503"
|
||||
|
||||
class Starter(ProcessStarter):
|
||||
# Set environment before any subprocess run, to keep custom config dir
|
||||
env = os.environ.copy()
|
||||
env["EOS_DIR"] = str(config_default_dirs[-1])
|
||||
project_dir = config_eos.package_root_path
|
||||
project_dir = config_eos.package_root_path.parent.parent
|
||||
|
||||
# assure server to be installed
|
||||
try:
|
||||
subprocess.run(
|
||||
[sys.executable, "-c", "import akkudoktoreos.server.eos"],
|
||||
[sys.executable, "-c", "import", "akkudoktoreos.server.eos"],
|
||||
check=True,
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
@@ -203,7 +449,7 @@ def server(xprocess, config_eos, config_default_dirs):
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
subprocess.run(
|
||||
[sys.executable, "-m", "pip", "install", "-e", project_dir],
|
||||
[sys.executable, "-m", "pip", "install", "-e", str(project_dir)],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
@@ -212,24 +458,26 @@ def server(xprocess, config_eos, config_default_dirs):
|
||||
# command to start server process
|
||||
args = [sys.executable, "-m", "akkudoktoreos.server.eos"]
|
||||
|
||||
# startup pattern
|
||||
pattern = "Application startup complete."
|
||||
# search this number of lines for the startup pattern, if not found
|
||||
# a RuntimeError will be raised informing the user
|
||||
max_read_lines = 30
|
||||
|
||||
# will wait for 30 seconds before timing out
|
||||
timeout = 30
|
||||
# will wait for xx seconds before timing out
|
||||
timeout = 10
|
||||
|
||||
# xprocess will now attempt to clean up upon interruptions
|
||||
terminate_on_interrupt = True
|
||||
|
||||
# checks if our server is ready
|
||||
def startup_check(self):
|
||||
try:
|
||||
result = requests.get(f"{url}/v1/health")
|
||||
if result.status_code == 200:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
# ensure process is running and return its logfile
|
||||
pid, logfile = xprocess.ensure("eos", Starter)
|
||||
print(f"View xprocess logfile at: {logfile}")
|
||||
|
||||
# create url/port info to the server
|
||||
url = "http://127.0.0.1:8503"
|
||||
yield url
|
||||
|
||||
# clean up whole process tree afterwards
|
||||
|
@@ -115,9 +115,9 @@ def test_soc_limits(setup_pv_battery):
|
||||
|
||||
def test_max_charge_power_w(setup_pv_battery):
|
||||
battery = setup_pv_battery
|
||||
assert (
|
||||
battery.parameters.max_charge_power_w == 8000
|
||||
), "Default max charge power should be 5000W, We ask for 8000W here"
|
||||
assert battery.parameters.max_charge_power_w == 8000, (
|
||||
"Default max charge power should be 5000W, We ask for 8000W here"
|
||||
)
|
||||
|
||||
|
||||
def test_charge_energy_within_limits(setup_pv_battery):
|
||||
@@ -139,9 +139,9 @@ def test_charge_energy_exceeds_capacity(setup_pv_battery):
|
||||
# Try to overcharge beyond max capacity
|
||||
charged_wh, losses_wh = battery.charge_energy(wh=20000, hour=2)
|
||||
|
||||
assert (
|
||||
charged_wh + initial_soc_wh <= battery.max_soc_wh
|
||||
), "Charging should not exceed max capacity"
|
||||
assert charged_wh + initial_soc_wh <= battery.max_soc_wh, (
|
||||
"Charging should not exceed max capacity"
|
||||
)
|
||||
assert losses_wh >= 0, "Losses should not be negative"
|
||||
assert battery.soc_wh == battery.max_soc_wh, "SOC should be at max after overcharge attempt"
|
||||
|
||||
@@ -169,9 +169,9 @@ def test_charge_energy_relative_power(setup_pv_battery):
|
||||
|
||||
assert charged_wh > 0, "Charging should occur with relative power"
|
||||
assert losses_wh >= 0, "Losses should not be negative"
|
||||
assert (
|
||||
charged_wh <= battery.max_charge_power_w * relative_power
|
||||
), "Charging should respect relative power limit"
|
||||
assert charged_wh <= battery.max_charge_power_w * relative_power, (
|
||||
"Charging should respect relative power limit"
|
||||
)
|
||||
assert battery.soc_wh > 0, "SOC should increase after charging"
|
||||
|
||||
|
||||
@@ -200,19 +200,19 @@ def test_car_and_pv_battery_discharge_and_max_charge_power(setup_pv_battery, set
|
||||
# Test discharge for PV battery
|
||||
pv_discharged_wh, pv_loss_wh = pv_battery.discharge_energy(3000, 5)
|
||||
assert pv_discharged_wh > 0, "PV battery should discharge energy"
|
||||
assert (
|
||||
pv_battery.current_soc_percentage() >= pv_battery.parameters.min_soc_percentage
|
||||
), "PV battery SOC should stay above min SOC"
|
||||
assert (
|
||||
pv_battery.parameters.max_charge_power_w == 8000
|
||||
), "PV battery max charge power should remain as defined"
|
||||
assert pv_battery.current_soc_percentage() >= pv_battery.parameters.min_soc_percentage, (
|
||||
"PV battery SOC should stay above min SOC"
|
||||
)
|
||||
assert pv_battery.parameters.max_charge_power_w == 8000, (
|
||||
"PV battery max charge power should remain as defined"
|
||||
)
|
||||
|
||||
# Test discharge for car battery
|
||||
car_discharged_wh, car_loss_wh = car_battery.discharge_energy(5000, 10)
|
||||
assert car_discharged_wh > 0, "Car battery should discharge energy"
|
||||
assert (
|
||||
car_battery.current_soc_percentage() >= car_battery.parameters.min_soc_percentage
|
||||
), "Car battery SOC should stay above min SOC"
|
||||
assert (
|
||||
car_battery.parameters.max_charge_power_w == 7000
|
||||
), "Car battery max charge power should remain as defined"
|
||||
assert car_battery.current_soc_percentage() >= car_battery.parameters.min_soc_percentage, (
|
||||
"Car battery SOC should stay above min SOC"
|
||||
)
|
||||
assert car_battery.parameters.max_charge_power_w == 7000, (
|
||||
"Car battery max charge power should remain as defined"
|
||||
)
|
||||
|
694
tests/test_cache.py
Normal file
@@ -0,0 +1,694 @@
|
||||
import io
|
||||
import json
|
||||
import pickle
|
||||
import tempfile
|
||||
from datetime import date, datetime, timedelta
|
||||
from pathlib import Path
|
||||
from time import sleep
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import cachebox
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.core.cache import (
|
||||
CacheFileRecord,
|
||||
CacheFileStore,
|
||||
CacheUntilUpdateStore,
|
||||
cache_in_file,
|
||||
cache_until_update,
|
||||
cachemethod_until_update,
|
||||
)
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||
|
||||
# ---------------------------------
|
||||
# In-Memory Caching Functionality
|
||||
# ---------------------------------
|
||||
|
||||
|
||||
# Fixtures for testing
|
||||
@pytest.fixture
|
||||
def cache_until_update_store():
|
||||
"""Ensures CacheUntilUpdateStore is reset between tests."""
|
||||
cache = CacheUntilUpdateStore()
|
||||
CacheUntilUpdateStore().clear()
|
||||
assert len(cache) == 0
|
||||
return cache
|
||||
|
||||
|
||||
class TestCacheUntilUpdateStore:
|
||||
def test_cache_initialization(self, cache_until_update_store):
|
||||
"""Test that CacheUntilUpdateStore initializes with the correct properties."""
|
||||
cache = CacheUntilUpdateStore()
|
||||
assert isinstance(cache.cache, cachebox.LRUCache)
|
||||
assert cache.maxsize == 100
|
||||
assert len(cache) == 0
|
||||
|
||||
def test_singleton_behavior(self, cache_until_update_store):
|
||||
"""Test that CacheUntilUpdateStore is a singleton."""
|
||||
cache1 = CacheUntilUpdateStore()
|
||||
cache2 = CacheUntilUpdateStore()
|
||||
assert cache1 is cache2
|
||||
|
||||
def test_cache_storage(self, cache_until_update_store):
|
||||
"""Test that items can be added and retrieved from the cache."""
|
||||
cache = CacheUntilUpdateStore()
|
||||
cache["key1"] = "value1"
|
||||
assert cache["key1"] == "value1"
|
||||
assert len(cache) == 1
|
||||
|
||||
def test_cache_getattr_invalid_method(self, cache_until_update_store):
|
||||
"""Test that accessing an invalid method raises an AttributeError."""
|
||||
with pytest.raises(AttributeError):
|
||||
CacheUntilUpdateStore().non_existent_method() # This should raise AttributeError
|
||||
|
||||
|
||||
class TestCacheUntilUpdateDecorators:
|
||||
def test_cachemethod_until_update(self, cache_until_update_store):
|
||||
"""Test that cachemethod_until_update caches method results."""
|
||||
|
||||
class MyClass:
|
||||
@cachemethod_until_update
|
||||
def compute(self, value: int) -> int:
|
||||
return value * 2
|
||||
|
||||
obj = MyClass()
|
||||
|
||||
# Call method and assert caching
|
||||
assert CacheUntilUpdateStore.miss_count == 0
|
||||
assert CacheUntilUpdateStore.hit_count == 0
|
||||
result1 = obj.compute(5)
|
||||
assert CacheUntilUpdateStore.miss_count == 1
|
||||
assert CacheUntilUpdateStore.hit_count == 0
|
||||
result2 = obj.compute(5)
|
||||
assert CacheUntilUpdateStore.miss_count == 1
|
||||
assert CacheUntilUpdateStore.hit_count == 1
|
||||
assert result1 == result2
|
||||
|
||||
def test_cache_until_update(self, cache_until_update_store):
|
||||
"""Test that cache_until_update caches function results."""
|
||||
|
||||
@cache_until_update
|
||||
def compute(value: int) -> int:
|
||||
return value * 3
|
||||
|
||||
# Call function and assert caching
|
||||
result1 = compute(4)
|
||||
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_MISS
|
||||
result2 = compute(4)
|
||||
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_HIT
|
||||
assert result1 == result2
|
||||
|
||||
def test_cache_with_different_arguments(self, cache_until_update_store):
|
||||
"""Test that caching works for different arguments."""
|
||||
|
||||
class MyClass:
|
||||
@cachemethod_until_update
|
||||
def compute(self, value: int) -> int:
|
||||
return value * 2
|
||||
|
||||
obj = MyClass()
|
||||
|
||||
assert CacheUntilUpdateStore.miss_count == 0
|
||||
result1 = obj.compute(3)
|
||||
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_MISS
|
||||
assert CacheUntilUpdateStore.miss_count == 1
|
||||
result2 = obj.compute(5)
|
||||
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_MISS
|
||||
assert CacheUntilUpdateStore.miss_count == 2
|
||||
|
||||
assert result1 == 6
|
||||
assert result2 == 10
|
||||
|
||||
def test_cache_clearing(self, cache_until_update_store):
|
||||
"""Test that cache is cleared between EMS update cycles."""
|
||||
|
||||
class MyClass:
|
||||
@cachemethod_until_update
|
||||
def compute(self, value: int) -> int:
|
||||
return value * 2
|
||||
|
||||
obj = MyClass()
|
||||
obj.compute(5)
|
||||
|
||||
# Clear cache
|
||||
CacheUntilUpdateStore().clear()
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
_ = CacheUntilUpdateStore()["<invalid>"]
|
||||
|
||||
def test_decorator_works_for_standalone_function(self, cache_until_update_store):
|
||||
"""Test that cache_until_update works with standalone functions."""
|
||||
|
||||
@cache_until_update
|
||||
def add(a: int, b: int) -> int:
|
||||
return a + b
|
||||
|
||||
assert CacheUntilUpdateStore.miss_count == 0
|
||||
assert CacheUntilUpdateStore.hit_count == 0
|
||||
result1 = add(1, 2)
|
||||
assert CacheUntilUpdateStore.miss_count == 1
|
||||
assert CacheUntilUpdateStore.hit_count == 0
|
||||
result2 = add(1, 2)
|
||||
assert CacheUntilUpdateStore.miss_count == 1
|
||||
assert CacheUntilUpdateStore.hit_count == 1
|
||||
|
||||
assert result1 == result2
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# CacheFileStore
|
||||
# -----------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_store_file():
|
||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||
yield Path(temp_file.file.name)
|
||||
# temp_file.unlink()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache_file_store(temp_store_file):
|
||||
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
|
||||
cache = CacheFileStore()
|
||||
cache._store_file = temp_store_file
|
||||
cache.clear(clear_all=True)
|
||||
assert len(cache._store) == 0
|
||||
return cache
|
||||
|
||||
|
||||
class TestCacheFileStore:
|
||||
def test_generate_cache_file_key(self, cache_file_store):
|
||||
"""Test cache file key generation based on URL and date."""
|
||||
key = "http://example.com"
|
||||
|
||||
# Provide until date - assure until_dt is used.
|
||||
until_dt = to_datetime("2024-10-01")
|
||||
cache_file_key, cache_file_until_dt, ttl_duration = (
|
||||
cache_file_store._generate_cache_file_key(key=key, until_datetime=until_dt)
|
||||
)
|
||||
assert cache_file_key is not None
|
||||
assert compare_datetimes(cache_file_until_dt, until_dt).equal
|
||||
|
||||
# Provide until date again - assure same key is generated.
|
||||
cache_file_key1, cache_file_until_dt1, ttl_duration1 = (
|
||||
cache_file_store._generate_cache_file_key(key=key, until_datetime=until_dt)
|
||||
)
|
||||
assert cache_file_key1 == cache_file_key
|
||||
assert compare_datetimes(cache_file_until_dt1, until_dt).equal
|
||||
|
||||
# Provide no until date - assure today EOD is used.
|
||||
no_until_dt = to_datetime().end_of("day")
|
||||
cache_file_key, cache_file_until_dt, ttl_duration = (
|
||||
cache_file_store._generate_cache_file_key(key)
|
||||
)
|
||||
assert cache_file_key is not None
|
||||
assert compare_datetimes(cache_file_until_dt, no_until_dt).equal
|
||||
|
||||
# Provide with_ttl - assure until_dt is used.
|
||||
until_dt = to_datetime().add(hours=1)
|
||||
cache_file_key, cache_file_until_dt, ttl_duration = (
|
||||
cache_file_store._generate_cache_file_key(key, with_ttl="1 hour")
|
||||
)
|
||||
assert cache_file_key is not None
|
||||
assert compare_datetimes(cache_file_until_dt, until_dt).approximately_equal
|
||||
assert ttl_duration == to_duration("1 hour")
|
||||
|
||||
# Provide with_ttl again - assure same key is generated.
|
||||
until_dt = to_datetime().add(hours=1)
|
||||
cache_file_key1, cache_file_until_dt1, ttl_duration1 = (
|
||||
cache_file_store._generate_cache_file_key(key=key, with_ttl="1 hour")
|
||||
)
|
||||
assert cache_file_key1 == cache_file_key
|
||||
assert compare_datetimes(cache_file_until_dt1, until_dt).approximately_equal
|
||||
assert ttl_duration1 == to_duration("1 hour")
|
||||
|
||||
# Provide different with_ttl - assure different key is generated.
|
||||
until_dt = to_datetime().add(hours=1, minutes=1)
|
||||
cache_file_key2, cache_file_until_dt2, ttl_duration2 = (
|
||||
cache_file_store._generate_cache_file_key(key=key, with_ttl="1 hour 1 minute")
|
||||
)
|
||||
assert cache_file_key2 != cache_file_key
|
||||
assert compare_datetimes(cache_file_until_dt2, until_dt).approximately_equal
|
||||
assert ttl_duration2 == to_duration("1 hour 1 minute")
|
||||
|
||||
def test_get_file_path(self, cache_file_store):
|
||||
"""Test get file path from cache file object."""
|
||||
cache_file = cache_file_store.create("test_file", mode="w+", suffix=".txt")
|
||||
file_path = cache_file_store._get_file_path(cache_file)
|
||||
|
||||
assert file_path is not None
|
||||
|
||||
def test_until_datetime_by_options(self, cache_file_store):
|
||||
"""Test until datetime calculation based on options."""
|
||||
now = to_datetime()
|
||||
|
||||
# Test with until_datetime
|
||||
result, ttl_duration = cache_file_store._until_datetime_by_options(until_datetime=now)
|
||||
assert result == now
|
||||
assert ttl_duration is None
|
||||
|
||||
# -- From now on we expect a until_datetime in one hour
|
||||
ttl_duration_expected = to_duration("1 hour")
|
||||
|
||||
# Test with with_ttl as timedelta
|
||||
until_datetime_expected = to_datetime().add(hours=1)
|
||||
ttl = timedelta(hours=1)
|
||||
result, ttl_duration = cache_file_store._until_datetime_by_options(with_ttl=ttl)
|
||||
assert compare_datetimes(result, until_datetime_expected).approximately_equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# Test with with_ttl as int (seconds)
|
||||
until_datetime_expected = to_datetime().add(hours=1)
|
||||
ttl_seconds = 3600
|
||||
result, ttl_duration = cache_file_store._until_datetime_by_options(with_ttl=ttl_seconds)
|
||||
assert compare_datetimes(result, until_datetime_expected).approximately_equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# Test with with_ttl as string ("1 hour")
|
||||
until_datetime_expected = to_datetime().add(hours=1)
|
||||
ttl_string = "1 hour"
|
||||
result, ttl_duration = cache_file_store._until_datetime_by_options(with_ttl=ttl_string)
|
||||
assert compare_datetimes(result, until_datetime_expected).approximately_equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# -- From now on we expect a until_datetime today at end of day
|
||||
until_datetime_expected = to_datetime().end_of("day")
|
||||
ttl_duration_expected = None
|
||||
|
||||
# Test default case (end of today)
|
||||
result, ttl_duration = cache_file_store._until_datetime_by_options()
|
||||
assert compare_datetimes(result, until_datetime_expected).equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# -- From now on we expect a until_datetime in one day at end of day
|
||||
until_datetime_expected = to_datetime().add(days=1).end_of("day")
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# Test with until_date as date
|
||||
until_date = date.today() + timedelta(days=1)
|
||||
result, ttl_duration = cache_file_store._until_datetime_by_options(until_date=until_date)
|
||||
assert compare_datetimes(result, until_datetime_expected).equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# -- Test with multiple options (until_datetime takes precedence)
|
||||
specific_datetime = to_datetime().add(days=2)
|
||||
result, ttl_duration = cache_file_store._until_datetime_by_options(
|
||||
until_date=to_datetime().add(days=1).date(),
|
||||
until_datetime=specific_datetime,
|
||||
with_ttl=ttl,
|
||||
)
|
||||
assert compare_datetimes(result, specific_datetime).equal
|
||||
assert ttl_duration is None
|
||||
|
||||
# Test with invalid inputs
|
||||
with pytest.raises(ValueError):
|
||||
cache_file_store._until_datetime_by_options(until_date="invalid-date")
|
||||
with pytest.raises(ValueError):
|
||||
cache_file_store._until_datetime_by_options(with_ttl="invalid-ttl")
|
||||
with pytest.raises(ValueError):
|
||||
cache_file_store._until_datetime_by_options(until_datetime="invalid-datetime")
|
||||
|
||||
def test_create_cache_file(self, cache_file_store):
|
||||
"""Test the creation of a cache file and ensure it is stored correctly."""
|
||||
# Create a cache file for today's date
|
||||
cache_file = cache_file_store.create("test_file", mode="w+", suffix=".txt")
|
||||
|
||||
# Check that the file exists in the store and is a file-like object
|
||||
assert cache_file is not None
|
||||
assert hasattr(cache_file, "name")
|
||||
assert cache_file.name.endswith(".txt")
|
||||
|
||||
# Write some data to the file
|
||||
cache_file.seek(0)
|
||||
cache_file.write("Test data")
|
||||
cache_file.seek(0) # Reset file pointer
|
||||
assert cache_file.read() == "Test data"
|
||||
|
||||
def test_get_cache_file(self, cache_file_store):
|
||||
"""Test retrieving an existing cache file by key."""
|
||||
# Create a cache file and write data to it
|
||||
cache_file = cache_file_store.create("test_file", mode="w+")
|
||||
cache_file.seek(0)
|
||||
cache_file.write("Test data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Retrieve the cache file and verify the data
|
||||
retrieved_file = cache_file_store.get("test_file")
|
||||
assert retrieved_file is not None
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == "Test data"
|
||||
|
||||
def test_set_custom_file_object(self, cache_file_store):
|
||||
"""Test setting a custom file-like object (BytesIO or StringIO) in the store."""
|
||||
# Create a BytesIO object and set it into the cache
|
||||
file_obj = io.BytesIO(b"Binary data")
|
||||
cache_file_store.set("binary_file", file_obj)
|
||||
|
||||
# Retrieve the file from the store
|
||||
retrieved_file = cache_file_store.get("binary_file")
|
||||
assert isinstance(retrieved_file, io.BytesIO)
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == b"Binary data"
|
||||
|
||||
def test_delete_cache_file(self, cache_file_store):
|
||||
"""Test deleting a cache file from the store."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_file_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_file_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_file_store.get("file1") is cache_file1
|
||||
assert cache_file_store.get("file2") is cache_file2
|
||||
|
||||
# Delete cache files
|
||||
cache_file_store.delete("file1")
|
||||
cache_file_store.delete("file2")
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_file_store.get("file1") is None
|
||||
assert cache_file_store.get("file2") is None
|
||||
|
||||
def test_clear_all_cache_files(self, cache_file_store):
|
||||
"""Test clearing all cache files from the store."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_file_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_file_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_file_store.get("file1") is cache_file1
|
||||
assert cache_file_store.get("file2") is cache_file2
|
||||
|
||||
current_store = cache_file_store.current_store()
|
||||
assert current_store != {}
|
||||
|
||||
# Clear all cache files
|
||||
cache_file_store.clear(clear_all=True)
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_file_store.get("file1") is None
|
||||
assert cache_file_store.get("file2") is None
|
||||
|
||||
current_store = cache_file_store.current_store()
|
||||
assert current_store == {}
|
||||
|
||||
def test_clear_cache_files_by_date(self, cache_file_store):
|
||||
"""Test clearing cache files from the store by date."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_file_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_file_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_file_store.get("file1") is cache_file1
|
||||
assert cache_file_store.get("file2") is cache_file2
|
||||
|
||||
# Clear cache files that are older than today
|
||||
cache_file_store.clear(before_datetime=to_datetime().start_of("day"))
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_file_store.get("file1") is cache_file1
|
||||
assert cache_file_store.get("file2") is cache_file2
|
||||
|
||||
# Clear cache files that are older than tomorrow
|
||||
cache_file_store.clear(before_datetime=datetime.now() + timedelta(days=1))
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_file_store.get("file1") is None
|
||||
assert cache_file_store.get("file2") is None
|
||||
|
||||
def test_cache_file_with_date(self, cache_file_store):
|
||||
"""Test creating and retrieving cache files with a specific date."""
|
||||
# Use a specific date for cache file creation
|
||||
specific_date = datetime(2023, 10, 10)
|
||||
cache_file = cache_file_store.create("dated_file", mode="w+", until_date=specific_date)
|
||||
|
||||
# Write data to the cache file
|
||||
cache_file.write("Dated data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Retrieve the cache file with the specific date
|
||||
retrieved_file = cache_file_store.get("dated_file", until_date=specific_date)
|
||||
assert retrieved_file is not None
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == "Dated data"
|
||||
|
||||
def test_recreate_existing_cache_file(self, cache_file_store):
|
||||
"""Test creating a cache file with an existing key does not overwrite the existing file."""
|
||||
# Create a cache file
|
||||
cache_file = cache_file_store.create("test_file", mode="w+")
|
||||
cache_file.write("Original data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Attempt to recreate the same file (should return the existing one)
|
||||
new_file = cache_file_store.create("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
new_file.seek(0)
|
||||
assert new_file.read() == "Original data" # Data should be preserved
|
||||
|
||||
# Assure cache file store is a singleton
|
||||
cache_file_store2 = CacheFileStore()
|
||||
new_file = cache_file_store2.get("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
|
||||
def test_cache_file_store_is_singleton(self, cache_file_store):
|
||||
"""Test re-creating a cache store provides the same store."""
|
||||
# Create a cache file
|
||||
cache_file = cache_file_store.create("test_file", mode="w+")
|
||||
cache_file.write("Original data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Assure cache file store is a singleton
|
||||
cache_file_store2 = CacheFileStore()
|
||||
new_file = cache_file_store2.get("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
|
||||
def test_cache_file_store_save_store(self, cache_file_store):
|
||||
# Creating a sample cache record
|
||||
cache_file = MagicMock()
|
||||
cache_file.name = "cache_file_path"
|
||||
cache_file.mode = "wb+"
|
||||
cache_record = CacheFileRecord(
|
||||
cache_file=cache_file, until_datetime=to_datetime(), ttl_duration=None
|
||||
)
|
||||
cache_file_store._store = {"test_key": cache_record}
|
||||
|
||||
# Save the store to the file
|
||||
cache_file_store.save_store()
|
||||
|
||||
# Verify the file content
|
||||
with cache_file_store._store_file.open("r", encoding="utf-8", newline=None) as f:
|
||||
store_loaded = json.load(f)
|
||||
assert "test_key" in store_loaded
|
||||
assert store_loaded["test_key"]["cache_file"] == "cache_file_path"
|
||||
assert store_loaded["test_key"]["mode"] == "wb+"
|
||||
assert store_loaded["test_key"]["until_datetime"] == to_datetime(
|
||||
cache_record.until_datetime, as_string=True
|
||||
)
|
||||
assert store_loaded["test_key"]["ttl_duration"] is None
|
||||
|
||||
def test_cache_file_store_load_store(self, cache_file_store):
|
||||
# Creating a sample cache record and save it to the file
|
||||
cache_record = {
|
||||
"test_key": {
|
||||
"cache_file": "cache_file_path",
|
||||
"mode": "wb+",
|
||||
"until_datetime": to_datetime(as_string=True),
|
||||
"ttl_duration": None,
|
||||
}
|
||||
}
|
||||
with cache_file_store._store_file.open("w", encoding="utf-8", newline="\n") as f:
|
||||
json.dump(cache_record, f, indent=4)
|
||||
|
||||
# Mock the open function to return a MagicMock for the cache file
|
||||
with patch("builtins.open", new_callable=MagicMock) as mock_open:
|
||||
mock_open.return_value.name = "cache_file_path"
|
||||
mock_open.return_value.mode = "wb+"
|
||||
|
||||
# Load the store from the file
|
||||
cache_file_store.load_store()
|
||||
|
||||
# Verify the loaded store
|
||||
assert "test_key" in cache_file_store._store
|
||||
loaded_record = cache_file_store._store["test_key"]
|
||||
assert loaded_record.cache_file.name == "cache_file_path"
|
||||
assert loaded_record.cache_file.mode == "wb+"
|
||||
assert loaded_record.until_datetime == to_datetime(
|
||||
cache_record["test_key"]["until_datetime"]
|
||||
)
|
||||
assert loaded_record.ttl_duration is None
|
||||
|
||||
|
||||
class TestCacheFileDecorators:
|
||||
def test_cache_in_file_decorator_caches_function_result(self, cache_file_store):
|
||||
"""Test that the cache_in_file decorator caches a function result."""
|
||||
# Clear store to assure it is empty
|
||||
cache_file_store.clear(clear_all=True)
|
||||
assert len(cache_file_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "Some expensive computation result"
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
assert result == "Some expensive computation result"
|
||||
|
||||
# Assert that the create method was called to store the result
|
||||
assert len(cache_file_store._store) == 1
|
||||
|
||||
# Check if the result was written to the cache file
|
||||
key = next(iter(cache_file_store._store))
|
||||
cache_file = cache_file_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
|
||||
# Assert correct content was written to the file
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == "Some expensive computation result"
|
||||
|
||||
def test_cache_in_file_decorator_uses_cache(self, cache_file_store):
|
||||
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
||||
# Clear store to assure it is empty
|
||||
cache_file_store.clear(clear_all=True)
|
||||
assert len(cache_file_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "New result"
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result = my_function(until_date=to_datetime().add(days=1))
|
||||
assert result == "New result"
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_file_store._store))
|
||||
cache_file = cache_file_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
|
||||
# Call the decorated function again (should get result from cache)
|
||||
result = my_function(until_date=to_datetime().add(days=1))
|
||||
assert result == result2
|
||||
|
||||
def test_cache_in_file_decorator_forces_update_data(self, cache_file_store):
|
||||
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
||||
# Clear store to assure it is empty
|
||||
cache_file_store.clear(clear_all=True)
|
||||
assert len(cache_file_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "New result"
|
||||
|
||||
until_date = to_datetime().add(days=1).date()
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result1 = "New result"
|
||||
result = my_function(until_date=until_date)
|
||||
assert result == result1
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_file_store._store))
|
||||
cache_file = cache_file_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result2
|
||||
|
||||
# Call the decorated function again with force update (should get result from function)
|
||||
result = my_function(until_date=until_date, force_update=True) # type: ignore[call-arg]
|
||||
assert result == result1
|
||||
|
||||
# Assure result was written to the same cache file
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result1
|
||||
|
||||
def test_cache_in_file_handles_ttl(self, cache_file_store):
|
||||
"""Test that the cache_infile decorator handles the with_ttl parameter."""
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function():
|
||||
return "New result"
|
||||
|
||||
# Call the decorated function
|
||||
result1 = my_function(with_ttl="1 second") # type: ignore[call-arg]
|
||||
assert result1 == "New result"
|
||||
assert len(cache_file_store._store) == 1
|
||||
key = list(cache_file_store._store.keys())[0]
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_file_store._store))
|
||||
cache_file = cache_file_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result1
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result2
|
||||
|
||||
# Call the decorated function again
|
||||
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result2
|
||||
assert result == result2
|
||||
|
||||
# Wait one second to let the cache time out
|
||||
sleep(2)
|
||||
|
||||
# Call again - cache should be timed out
|
||||
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
|
||||
assert result == result1
|
||||
|
||||
def test_cache_in_file_handles_bytes_return(self, cache_file_store):
|
||||
"""Test that the cache_infile decorator handles bytes returned from the function."""
|
||||
# Clear store to assure it is empty
|
||||
cache_file_store.clear(clear_all=True)
|
||||
assert len(cache_file_store._store) == 0
|
||||
|
||||
# Define a function that returns bytes
|
||||
@cache_in_file()
|
||||
def my_function(until_date=None) -> bytes:
|
||||
return b"Some binary data"
|
||||
|
||||
# Call the decorated function
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
|
||||
# Check if the binary data was written to the cache file
|
||||
key = next(iter(cache_file_store._store))
|
||||
cache_file = cache_file_store._store[key].cache_file
|
||||
assert len(cache_file_store._store) == 1
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0)
|
||||
result1 = pickle.load(cache_file)
|
||||
assert result1 == result
|
||||
|
||||
# Access cache
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
assert len(cache_file_store._store) == 1
|
||||
assert cache_file_store._store[key].cache_file is not None
|
||||
assert result1 == result
|
@@ -1,491 +0,0 @@
|
||||
"""Test Module for CacheFileStore Module."""
|
||||
|
||||
import io
|
||||
import pickle
|
||||
from datetime import date, datetime, timedelta
|
||||
from time import sleep
|
||||
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.utils.cacheutil import CacheFileStore, cache_in_file
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||
|
||||
# -----------------------------
|
||||
# CacheFileStore
|
||||
# -----------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache_store():
|
||||
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
|
||||
cache = CacheFileStore()
|
||||
cache.clear(clear_all=True)
|
||||
assert len(cache._store) == 0
|
||||
return cache
|
||||
|
||||
|
||||
def test_generate_cache_file_key(cache_store):
|
||||
"""Test cache file key generation based on URL and date."""
|
||||
key = "http://example.com"
|
||||
|
||||
# Provide until date - assure until_dt is used.
|
||||
until_dt = to_datetime("2024-10-01")
|
||||
cache_file_key, cache_file_until_dt, ttl_duration = cache_store._generate_cache_file_key(
|
||||
key=key, until_datetime=until_dt
|
||||
)
|
||||
assert cache_file_key is not None
|
||||
assert compare_datetimes(cache_file_until_dt, until_dt).equal
|
||||
|
||||
# Provide until date again - assure same key is generated.
|
||||
cache_file_key1, cache_file_until_dt1, ttl_duration1 = cache_store._generate_cache_file_key(
|
||||
key=key, until_datetime=until_dt
|
||||
)
|
||||
assert cache_file_key1 == cache_file_key
|
||||
assert compare_datetimes(cache_file_until_dt1, until_dt).equal
|
||||
|
||||
# Provide no until date - assure today EOD is used.
|
||||
no_until_dt = to_datetime().end_of("day")
|
||||
cache_file_key, cache_file_until_dt, ttl_duration = cache_store._generate_cache_file_key(key)
|
||||
assert cache_file_key is not None
|
||||
assert compare_datetimes(cache_file_until_dt, no_until_dt).equal
|
||||
|
||||
# Provide with_ttl - assure until_dt is used.
|
||||
until_dt = to_datetime().add(hours=1)
|
||||
cache_file_key, cache_file_until_dt, ttl_duration = cache_store._generate_cache_file_key(
|
||||
key, with_ttl="1 hour"
|
||||
)
|
||||
assert cache_file_key is not None
|
||||
assert compare_datetimes(cache_file_until_dt, until_dt).approximately_equal
|
||||
assert ttl_duration == to_duration("1 hour")
|
||||
|
||||
# Provide with_ttl again - assure same key is generated.
|
||||
until_dt = to_datetime().add(hours=1)
|
||||
cache_file_key1, cache_file_until_dt1, ttl_duration1 = cache_store._generate_cache_file_key(
|
||||
key=key, with_ttl="1 hour"
|
||||
)
|
||||
assert cache_file_key1 == cache_file_key
|
||||
assert compare_datetimes(cache_file_until_dt1, until_dt).approximately_equal
|
||||
assert ttl_duration1 == to_duration("1 hour")
|
||||
|
||||
# Provide different with_ttl - assure different key is generated.
|
||||
until_dt = to_datetime().add(hours=1, minutes=1)
|
||||
cache_file_key2, cache_file_until_dt2, ttl_duration2 = cache_store._generate_cache_file_key(
|
||||
key=key, with_ttl="1 hour 1 minute"
|
||||
)
|
||||
assert cache_file_key2 != cache_file_key
|
||||
assert compare_datetimes(cache_file_until_dt2, until_dt).approximately_equal
|
||||
assert ttl_duration2 == to_duration("1 hour 1 minute")
|
||||
|
||||
|
||||
def test_get_file_path(cache_store):
|
||||
"""Test get file path from cache file object."""
|
||||
cache_file = cache_store.create("test_file", mode="w+", suffix=".txt")
|
||||
file_path = cache_store._get_file_path(cache_file)
|
||||
|
||||
assert file_path is not None
|
||||
|
||||
|
||||
def test_until_datetime_by_options(cache_store):
|
||||
"""Test until datetime calculation based on options."""
|
||||
now = to_datetime()
|
||||
|
||||
# Test with until_datetime
|
||||
result, ttl_duration = cache_store._until_datetime_by_options(until_datetime=now)
|
||||
assert result == now
|
||||
assert ttl_duration is None
|
||||
|
||||
# -- From now on we expect a until_datetime in one hour
|
||||
ttl_duration_expected = to_duration("1 hour")
|
||||
|
||||
# Test with with_ttl as timedelta
|
||||
until_datetime_expected = to_datetime().add(hours=1)
|
||||
ttl = timedelta(hours=1)
|
||||
result, ttl_duration = cache_store._until_datetime_by_options(with_ttl=ttl)
|
||||
assert compare_datetimes(result, until_datetime_expected).approximately_equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# Test with with_ttl as int (seconds)
|
||||
until_datetime_expected = to_datetime().add(hours=1)
|
||||
ttl_seconds = 3600
|
||||
result, ttl_duration = cache_store._until_datetime_by_options(with_ttl=ttl_seconds)
|
||||
assert compare_datetimes(result, until_datetime_expected).approximately_equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# Test with with_ttl as string ("1 hour")
|
||||
until_datetime_expected = to_datetime().add(hours=1)
|
||||
ttl_string = "1 hour"
|
||||
result, ttl_duration = cache_store._until_datetime_by_options(with_ttl=ttl_string)
|
||||
assert compare_datetimes(result, until_datetime_expected).approximately_equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# -- From now on we expect a until_datetime today at end of day
|
||||
until_datetime_expected = to_datetime().end_of("day")
|
||||
ttl_duration_expected = None
|
||||
|
||||
# Test default case (end of today)
|
||||
result, ttl_duration = cache_store._until_datetime_by_options()
|
||||
assert compare_datetimes(result, until_datetime_expected).equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# -- From now on we expect a until_datetime in one day at end of day
|
||||
until_datetime_expected = to_datetime().add(days=1).end_of("day")
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# Test with until_date as date
|
||||
until_date = date.today() + timedelta(days=1)
|
||||
result, ttl_duration = cache_store._until_datetime_by_options(until_date=until_date)
|
||||
assert compare_datetimes(result, until_datetime_expected).equal
|
||||
assert ttl_duration == ttl_duration_expected
|
||||
|
||||
# -- Test with multiple options (until_datetime takes precedence)
|
||||
specific_datetime = to_datetime().add(days=2)
|
||||
result, ttl_duration = cache_store._until_datetime_by_options(
|
||||
until_date=to_datetime().add(days=1).date(),
|
||||
until_datetime=specific_datetime,
|
||||
with_ttl=ttl,
|
||||
)
|
||||
assert compare_datetimes(result, specific_datetime).equal
|
||||
assert ttl_duration is None
|
||||
|
||||
# Test with invalid inputs
|
||||
with pytest.raises(ValueError):
|
||||
cache_store._until_datetime_by_options(until_date="invalid-date")
|
||||
with pytest.raises(ValueError):
|
||||
cache_store._until_datetime_by_options(with_ttl="invalid-ttl")
|
||||
with pytest.raises(ValueError):
|
||||
cache_store._until_datetime_by_options(until_datetime="invalid-datetime")
|
||||
|
||||
|
||||
def test_create_cache_file(cache_store):
|
||||
"""Test the creation of a cache file and ensure it is stored correctly."""
|
||||
# Create a cache file for today's date
|
||||
cache_file = cache_store.create("test_file", mode="w+", suffix=".txt")
|
||||
|
||||
# Check that the file exists in the store and is a file-like object
|
||||
assert cache_file is not None
|
||||
assert hasattr(cache_file, "name")
|
||||
assert cache_file.name.endswith(".txt")
|
||||
|
||||
# Write some data to the file
|
||||
cache_file.seek(0)
|
||||
cache_file.write("Test data")
|
||||
cache_file.seek(0) # Reset file pointer
|
||||
assert cache_file.read() == "Test data"
|
||||
|
||||
|
||||
def test_get_cache_file(cache_store):
|
||||
"""Test retrieving an existing cache file by key."""
|
||||
# Create a cache file and write data to it
|
||||
cache_file = cache_store.create("test_file", mode="w+")
|
||||
cache_file.seek(0)
|
||||
cache_file.write("Test data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Retrieve the cache file and verify the data
|
||||
retrieved_file = cache_store.get("test_file")
|
||||
assert retrieved_file is not None
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == "Test data"
|
||||
|
||||
|
||||
def test_set_custom_file_object(cache_store):
|
||||
"""Test setting a custom file-like object (BytesIO or StringIO) in the store."""
|
||||
# Create a BytesIO object and set it into the cache
|
||||
file_obj = io.BytesIO(b"Binary data")
|
||||
cache_store.set("binary_file", file_obj)
|
||||
|
||||
# Retrieve the file from the store
|
||||
retrieved_file = cache_store.get("binary_file")
|
||||
assert isinstance(retrieved_file, io.BytesIO)
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == b"Binary data"
|
||||
|
||||
|
||||
def test_delete_cache_file(cache_store):
|
||||
"""Test deleting a cache file from the store."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Delete cache files
|
||||
cache_store.delete("file1")
|
||||
cache_store.delete("file2")
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_store.get("file1") is None
|
||||
assert cache_store.get("file2") is None
|
||||
|
||||
|
||||
def test_clear_all_cache_files(cache_store):
|
||||
"""Test clearing all cache files from the store."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Clear all cache files
|
||||
cache_store.clear(clear_all=True)
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_store.get("file1") is None
|
||||
assert cache_store.get("file2") is None
|
||||
|
||||
|
||||
def test_clear_cache_files_by_date(cache_store):
|
||||
"""Test clearing cache files from the store by date."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Clear cache files that are older than today
|
||||
cache_store.clear(before_datetime=to_datetime().start_of("day"))
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Clear cache files that are older than tomorrow
|
||||
cache_store.clear(before_datetime=datetime.now() + timedelta(days=1))
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_store.get("file1") is None
|
||||
assert cache_store.get("file2") is None
|
||||
|
||||
|
||||
def test_cache_file_with_date(cache_store):
|
||||
"""Test creating and retrieving cache files with a specific date."""
|
||||
# Use a specific date for cache file creation
|
||||
specific_date = datetime(2023, 10, 10)
|
||||
cache_file = cache_store.create("dated_file", mode="w+", until_date=specific_date)
|
||||
|
||||
# Write data to the cache file
|
||||
cache_file.write("Dated data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Retrieve the cache file with the specific date
|
||||
retrieved_file = cache_store.get("dated_file", until_date=specific_date)
|
||||
assert retrieved_file is not None
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == "Dated data"
|
||||
|
||||
|
||||
def test_recreate_existing_cache_file(cache_store):
|
||||
"""Test creating a cache file with an existing key does not overwrite the existing file."""
|
||||
# Create a cache file
|
||||
cache_file = cache_store.create("test_file", mode="w+")
|
||||
cache_file.write("Original data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Attempt to recreate the same file (should return the existing one)
|
||||
new_file = cache_store.create("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
new_file.seek(0)
|
||||
assert new_file.read() == "Original data" # Data should be preserved
|
||||
|
||||
# Assure cache file store is a singleton
|
||||
cache_store2 = CacheFileStore()
|
||||
new_file = cache_store2.get("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
|
||||
|
||||
def test_cache_store_is_singleton(cache_store):
|
||||
"""Test re-creating a cache store provides the same store."""
|
||||
# Create a cache file
|
||||
cache_file = cache_store.create("test_file", mode="w+")
|
||||
cache_file.write("Original data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Assure cache file store is a singleton
|
||||
cache_store2 = CacheFileStore()
|
||||
new_file = cache_store2.get("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
|
||||
|
||||
def test_cache_in_file_decorator_caches_function_result(cache_store):
|
||||
"""Test that the cache_in_file decorator caches a function result."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "Some expensive computation result"
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
assert result == "Some expensive computation result"
|
||||
|
||||
# Assert that the create method was called to store the result
|
||||
assert len(cache_store._store) == 1
|
||||
|
||||
# Check if the result was written to the cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
|
||||
# Assert correct content was written to the file
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == "Some expensive computation result"
|
||||
|
||||
|
||||
def test_cache_in_file_decorator_uses_cache(cache_store):
|
||||
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "New result"
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result = my_function(until_date=to_datetime().add(days=1))
|
||||
assert result == "New result"
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
|
||||
# Call the decorated function again (should get result from cache)
|
||||
result = my_function(until_date=to_datetime().add(days=1))
|
||||
assert result == result2
|
||||
|
||||
|
||||
def test_cache_in_file_decorator_forces_update_data(cache_store):
|
||||
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "New result"
|
||||
|
||||
until_date = to_datetime().add(days=1).date()
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result1 = "New result"
|
||||
result = my_function(until_date=until_date)
|
||||
assert result == result1
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result2
|
||||
|
||||
# Call the decorated function again with force update (should get result from function)
|
||||
result = my_function(until_date=until_date, force_update=True) # type: ignore[call-arg]
|
||||
assert result == result1
|
||||
|
||||
# Assure result was written to the same cache file
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result1
|
||||
|
||||
|
||||
def test_cache_in_file_handles_ttl(cache_store):
|
||||
"""Test that the cache_infile decorator handles the with_ttl parameter."""
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function():
|
||||
return "New result"
|
||||
|
||||
# Call the decorated function
|
||||
result1 = my_function(with_ttl="1 second") # type: ignore[call-arg]
|
||||
assert result1 == "New result"
|
||||
assert len(cache_store._store) == 1
|
||||
key = list(cache_store._store.keys())[0]
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key].cache_file
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result1
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result2
|
||||
|
||||
# Call the decorated function again
|
||||
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result2
|
||||
assert result == result2
|
||||
|
||||
# Wait one second to let the cache time out
|
||||
sleep(2)
|
||||
|
||||
# Call again - cache should be timed out
|
||||
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
|
||||
assert result == result1
|
||||
|
||||
|
||||
def test_cache_in_file_handles_bytes_return(cache_store):
|
||||
"""Test that the cache_infile decorator handles bytes returned from the function."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a function that returns bytes
|
||||
@cache_in_file()
|
||||
def my_function(until_date=None) -> bytes:
|
||||
return b"Some binary data"
|
||||
|
||||
# Call the decorated function
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
|
||||
# Check if the binary data was written to the cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key].cache_file
|
||||
assert len(cache_store._store) == 1
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0)
|
||||
result1 = pickle.load(cache_file)
|
||||
assert result1 == result
|
||||
|
||||
# Access cache
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
assert len(cache_store._store) == 1
|
||||
assert cache_store._store[key].cache_file is not None
|
||||
assert result1 == result
|
@@ -2,8 +2,8 @@ import numpy as np
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.core.ems import (
|
||||
EnergieManagementSystem,
|
||||
EnergieManagementSystemParameters,
|
||||
EnergyManagement,
|
||||
EnergyManagementParameters,
|
||||
SimulationResult,
|
||||
get_ems,
|
||||
)
|
||||
@@ -20,8 +20,8 @@ start_hour = 1
|
||||
|
||||
# Example initialization of necessary components
|
||||
@pytest.fixture
|
||||
def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||
def create_ems_instance(devices_eos, config_eos) -> EnergyManagement:
|
||||
"""Fixture to create an EnergyManagement instance with given test parameters."""
|
||||
# Assure configuration holds the correct values
|
||||
config_eos.merge_settings_from_dict(
|
||||
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
|
||||
@@ -227,7 +227,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||
# Initialize the energy management system with the respective parameters
|
||||
ems = get_ems()
|
||||
ems.set_parameters(
|
||||
EnergieManagementSystemParameters(
|
||||
EnergyManagementParameters(
|
||||
pv_prognose_wh=pv_prognose_wh,
|
||||
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
|
||||
einspeiseverguetung_euro_pro_wh=einspeiseverguetung_euro_pro_wh,
|
||||
@@ -243,7 +243,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||
|
||||
|
||||
def test_simulation(create_ems_instance):
|
||||
"""Test the EnergieManagementSystem simulation method."""
|
||||
"""Test the EnergyManagement simulation method."""
|
||||
ems = create_ems_instance
|
||||
|
||||
# Simulate starting from hour 1 (this value can be adjusted)
|
||||
@@ -281,69 +281,69 @@ def test_simulation(create_ems_instance):
|
||||
assert SimulationResult(**result) is not None
|
||||
|
||||
# Check the length of the main arrays
|
||||
assert (
|
||||
len(result["Last_Wh_pro_Stunde"]) == 47
|
||||
), "The length of 'Last_Wh_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["Netzeinspeisung_Wh_pro_Stunde"]) == 47
|
||||
), "The length of 'Netzeinspeisung_Wh_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["Netzbezug_Wh_pro_Stunde"]) == 47
|
||||
), "The length of 'Netzbezug_Wh_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["Kosten_Euro_pro_Stunde"]) == 47
|
||||
), "The length of 'Kosten_Euro_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["akku_soc_pro_stunde"]) == 47
|
||||
), "The length of 'akku_soc_pro_stunde' should be 48."
|
||||
assert len(result["Last_Wh_pro_Stunde"]) == 47, (
|
||||
"The length of 'Last_Wh_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["Netzeinspeisung_Wh_pro_Stunde"]) == 47, (
|
||||
"The length of 'Netzeinspeisung_Wh_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["Netzbezug_Wh_pro_Stunde"]) == 47, (
|
||||
"The length of 'Netzbezug_Wh_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["Kosten_Euro_pro_Stunde"]) == 47, (
|
||||
"The length of 'Kosten_Euro_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["akku_soc_pro_stunde"]) == 47, (
|
||||
"The length of 'akku_soc_pro_stunde' should be 48."
|
||||
)
|
||||
|
||||
# Verify specific values in the 'Last_Wh_pro_Stunde' array
|
||||
assert (
|
||||
result["Last_Wh_pro_Stunde"][1] == 1527.13
|
||||
), "The value at index 1 of 'Last_Wh_pro_Stunde' should be 1527.13."
|
||||
assert (
|
||||
result["Last_Wh_pro_Stunde"][2] == 1468.88
|
||||
), "The value at index 2 of 'Last_Wh_pro_Stunde' should be 1468.88."
|
||||
assert (
|
||||
result["Last_Wh_pro_Stunde"][12] == 1132.03
|
||||
), "The value at index 12 of 'Last_Wh_pro_Stunde' should be 1132.03."
|
||||
assert result["Last_Wh_pro_Stunde"][1] == 1527.13, (
|
||||
"The value at index 1 of 'Last_Wh_pro_Stunde' should be 1527.13."
|
||||
)
|
||||
assert result["Last_Wh_pro_Stunde"][2] == 1468.88, (
|
||||
"The value at index 2 of 'Last_Wh_pro_Stunde' should be 1468.88."
|
||||
)
|
||||
assert result["Last_Wh_pro_Stunde"][12] == 1132.03, (
|
||||
"The value at index 12 of 'Last_Wh_pro_Stunde' should be 1132.03."
|
||||
)
|
||||
|
||||
# Verify that the value at index 0 is 'None'
|
||||
# Check that 'Netzeinspeisung_Wh_pro_Stunde' and 'Netzbezug_Wh_pro_Stunde' are consistent
|
||||
assert (
|
||||
result["Netzbezug_Wh_pro_Stunde"][1] == 0
|
||||
), "The value at index 1 of 'Netzbezug_Wh_pro_Stunde' should be 0."
|
||||
assert result["Netzbezug_Wh_pro_Stunde"][1] == 0, (
|
||||
"The value at index 1 of 'Netzbezug_Wh_pro_Stunde' should be 0."
|
||||
)
|
||||
|
||||
# Verify the total balance
|
||||
assert (
|
||||
abs(result["Gesamtbilanz_Euro"] - 1.958185274567674) < 1e-5
|
||||
), "Total balance should be 1.958185274567674."
|
||||
assert abs(result["Gesamtbilanz_Euro"] - 1.958185274567674) < 1e-5, (
|
||||
"Total balance should be 1.958185274567674."
|
||||
)
|
||||
|
||||
# Check total revenue and total costs
|
||||
assert (
|
||||
abs(result["Gesamteinnahmen_Euro"] - 1.168863124510214) < 1e-5
|
||||
), "Total revenue should be 1.168863124510214."
|
||||
assert (
|
||||
abs(result["Gesamtkosten_Euro"] - 3.127048399077888) < 1e-5
|
||||
), "Total costs should be 3.127048399077888 ."
|
||||
assert abs(result["Gesamteinnahmen_Euro"] - 1.168863124510214) < 1e-5, (
|
||||
"Total revenue should be 1.168863124510214."
|
||||
)
|
||||
assert abs(result["Gesamtkosten_Euro"] - 3.127048399077888) < 1e-5, (
|
||||
"Total costs should be 3.127048399077888 ."
|
||||
)
|
||||
|
||||
# Check the losses
|
||||
assert (
|
||||
abs(result["Gesamt_Verluste"] - 2871.5330639359036) < 1e-5
|
||||
), "Total losses should be 2871.5330639359036 ."
|
||||
assert abs(result["Gesamt_Verluste"] - 2871.5330639359036) < 1e-5, (
|
||||
"Total losses should be 2871.5330639359036 ."
|
||||
)
|
||||
|
||||
# Check the values in 'akku_soc_pro_stunde'
|
||||
assert (
|
||||
result["akku_soc_pro_stunde"][-1] == 42.151590909090906
|
||||
), "The value at index -1 of 'akku_soc_pro_stunde' should be 42.151590909090906."
|
||||
assert (
|
||||
result["akku_soc_pro_stunde"][1] == 60.08659090909091
|
||||
), "The value at index 1 of 'akku_soc_pro_stunde' should be 60.08659090909091."
|
||||
assert result["akku_soc_pro_stunde"][-1] == 42.151590909090906, (
|
||||
"The value at index -1 of 'akku_soc_pro_stunde' should be 42.151590909090906."
|
||||
)
|
||||
assert result["akku_soc_pro_stunde"][1] == 60.08659090909091, (
|
||||
"The value at index 1 of 'akku_soc_pro_stunde' should be 60.08659090909091."
|
||||
)
|
||||
|
||||
# Check home appliances
|
||||
assert (
|
||||
sum(ems.home_appliance.get_load_curve()) == 2000
|
||||
), "The sum of 'ems.home_appliance.get_load_curve()' should be 2000."
|
||||
assert sum(ems.home_appliance.get_load_curve()) == 2000, (
|
||||
"The sum of 'ems.home_appliance.get_load_curve()' should be 2000."
|
||||
)
|
||||
|
||||
assert (
|
||||
np.nansum(
|
||||
|
@@ -2,8 +2,8 @@ import numpy as np
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.core.ems import (
|
||||
EnergieManagementSystem,
|
||||
EnergieManagementSystemParameters,
|
||||
EnergyManagement,
|
||||
EnergyManagementParameters,
|
||||
SimulationResult,
|
||||
get_ems,
|
||||
)
|
||||
@@ -20,8 +20,8 @@ start_hour = 0
|
||||
|
||||
# Example initialization of necessary components
|
||||
@pytest.fixture
|
||||
def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||
def create_ems_instance(devices_eos, config_eos) -> EnergyManagement:
|
||||
"""Fixture to create an EnergyManagement instance with given test parameters."""
|
||||
# Assure configuration holds the correct values
|
||||
config_eos.merge_settings_from_dict(
|
||||
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
|
||||
@@ -130,7 +130,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||
# Initialize the energy management system with the respective parameters
|
||||
ems = get_ems()
|
||||
ems.set_parameters(
|
||||
EnergieManagementSystemParameters(
|
||||
EnergyManagementParameters(
|
||||
pv_prognose_wh=pv_prognose_wh,
|
||||
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
|
||||
einspeiseverguetung_euro_pro_wh=einspeiseverguetung_euro_pro_wh,
|
||||
@@ -153,7 +153,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||
|
||||
|
||||
def test_simulation(create_ems_instance):
|
||||
"""Test the EnergieManagementSystem simulation method."""
|
||||
"""Test the EnergyManagement simulation method."""
|
||||
ems = create_ems_instance
|
||||
|
||||
# Simulate starting from hour 0 (this value can be adjusted)
|
||||
@@ -211,113 +211,113 @@ def test_simulation(create_ems_instance):
|
||||
assert key in result, f"The key '{key}' should be present in the result."
|
||||
|
||||
# Check the length of the main arrays
|
||||
assert (
|
||||
len(result["Last_Wh_pro_Stunde"]) == 48
|
||||
), "The length of 'Last_Wh_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["Netzeinspeisung_Wh_pro_Stunde"]) == 48
|
||||
), "The length of 'Netzeinspeisung_Wh_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["Netzbezug_Wh_pro_Stunde"]) == 48
|
||||
), "The length of 'Netzbezug_Wh_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["Kosten_Euro_pro_Stunde"]) == 48
|
||||
), "The length of 'Kosten_Euro_pro_Stunde' should be 48."
|
||||
assert (
|
||||
len(result["akku_soc_pro_stunde"]) == 48
|
||||
), "The length of 'akku_soc_pro_stunde' should be 48."
|
||||
assert len(result["Last_Wh_pro_Stunde"]) == 48, (
|
||||
"The length of 'Last_Wh_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["Netzeinspeisung_Wh_pro_Stunde"]) == 48, (
|
||||
"The length of 'Netzeinspeisung_Wh_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["Netzbezug_Wh_pro_Stunde"]) == 48, (
|
||||
"The length of 'Netzbezug_Wh_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["Kosten_Euro_pro_Stunde"]) == 48, (
|
||||
"The length of 'Kosten_Euro_pro_Stunde' should be 48."
|
||||
)
|
||||
assert len(result["akku_soc_pro_stunde"]) == 48, (
|
||||
"The length of 'akku_soc_pro_stunde' should be 48."
|
||||
)
|
||||
|
||||
# Verfify DC and AC Charge Bins
|
||||
assert (
|
||||
abs(result["akku_soc_pro_stunde"][2] - 44.70681818181818) < 1e-5
|
||||
), "'akku_soc_pro_stunde[2]' should be 44.70681818181818."
|
||||
assert (
|
||||
abs(result["akku_soc_pro_stunde"][10] - 10.0) < 1e-5
|
||||
), "'akku_soc_pro_stunde[10]' should be 10."
|
||||
assert abs(result["akku_soc_pro_stunde"][2] - 44.70681818181818) < 1e-5, (
|
||||
"'akku_soc_pro_stunde[2]' should be 44.70681818181818."
|
||||
)
|
||||
assert abs(result["akku_soc_pro_stunde"][10] - 10.0) < 1e-5, (
|
||||
"'akku_soc_pro_stunde[10]' should be 10."
|
||||
)
|
||||
|
||||
assert (
|
||||
abs(result["Netzeinspeisung_Wh_pro_Stunde"][10] - 3946.93) < 1e-3
|
||||
), "'Netzeinspeisung_Wh_pro_Stunde[11]' should be 3946.93."
|
||||
assert abs(result["Netzeinspeisung_Wh_pro_Stunde"][10] - 3946.93) < 1e-3, (
|
||||
"'Netzeinspeisung_Wh_pro_Stunde[11]' should be 3946.93."
|
||||
)
|
||||
|
||||
assert (
|
||||
abs(result["Netzeinspeisung_Wh_pro_Stunde"][11] - 0.0) < 1e-3
|
||||
), "'Netzeinspeisung_Wh_pro_Stunde[11]' should be 0.0."
|
||||
assert abs(result["Netzeinspeisung_Wh_pro_Stunde"][11] - 0.0) < 1e-3, (
|
||||
"'Netzeinspeisung_Wh_pro_Stunde[11]' should be 0.0."
|
||||
)
|
||||
|
||||
assert (
|
||||
abs(result["akku_soc_pro_stunde"][20] - 10) < 1e-5
|
||||
), "'akku_soc_pro_stunde[20]' should be 10."
|
||||
assert (
|
||||
abs(result["Last_Wh_pro_Stunde"][20] - 6050.98) < 1e-3
|
||||
), "'Last_Wh_pro_Stunde[20]' should be 6050.98."
|
||||
assert abs(result["akku_soc_pro_stunde"][20] - 10) < 1e-5, (
|
||||
"'akku_soc_pro_stunde[20]' should be 10."
|
||||
)
|
||||
assert abs(result["Last_Wh_pro_Stunde"][20] - 6050.98) < 1e-3, (
|
||||
"'Last_Wh_pro_Stunde[20]' should be 6050.98."
|
||||
)
|
||||
|
||||
print("All tests passed successfully.")
|
||||
|
||||
|
||||
def test_set_parameters(create_ems_instance):
|
||||
"""Test the set_parameters method of EnergieManagementSystem."""
|
||||
"""Test the set_parameters method of EnergyManagement."""
|
||||
ems = create_ems_instance
|
||||
|
||||
# Check if parameters are set correctly
|
||||
assert ems.load_energy_array is not None, "load_energy_array should not be None"
|
||||
assert ems.pv_prediction_wh is not None, "pv_prediction_wh should not be None"
|
||||
assert ems.elect_price_hourly is not None, "elect_price_hourly should not be None"
|
||||
assert (
|
||||
ems.elect_revenue_per_hour_arr is not None
|
||||
), "elect_revenue_per_hour_arr should not be None"
|
||||
assert ems.elect_revenue_per_hour_arr is not None, (
|
||||
"elect_revenue_per_hour_arr should not be None"
|
||||
)
|
||||
|
||||
|
||||
def test_set_akku_discharge_hours(create_ems_instance):
|
||||
"""Test the set_akku_discharge_hours method of EnergieManagementSystem."""
|
||||
"""Test the set_akku_discharge_hours method of EnergyManagement."""
|
||||
ems = create_ems_instance
|
||||
discharge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||
ems.set_akku_discharge_hours(discharge_hours)
|
||||
assert np.array_equal(
|
||||
ems.battery.discharge_array, discharge_hours
|
||||
), "Discharge hours should be set correctly"
|
||||
assert np.array_equal(ems.battery.discharge_array, discharge_hours), (
|
||||
"Discharge hours should be set correctly"
|
||||
)
|
||||
|
||||
|
||||
def test_set_akku_ac_charge_hours(create_ems_instance):
|
||||
"""Test the set_akku_ac_charge_hours method of EnergieManagementSystem."""
|
||||
"""Test the set_akku_ac_charge_hours method of EnergyManagement."""
|
||||
ems = create_ems_instance
|
||||
ac_charge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||
ems.set_akku_ac_charge_hours(ac_charge_hours)
|
||||
assert np.array_equal(
|
||||
ems.ac_charge_hours, ac_charge_hours
|
||||
), "AC charge hours should be set correctly"
|
||||
assert np.array_equal(ems.ac_charge_hours, ac_charge_hours), (
|
||||
"AC charge hours should be set correctly"
|
||||
)
|
||||
|
||||
|
||||
def test_set_akku_dc_charge_hours(create_ems_instance):
|
||||
"""Test the set_akku_dc_charge_hours method of EnergieManagementSystem."""
|
||||
"""Test the set_akku_dc_charge_hours method of EnergyManagement."""
|
||||
ems = create_ems_instance
|
||||
dc_charge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||
ems.set_akku_dc_charge_hours(dc_charge_hours)
|
||||
assert np.array_equal(
|
||||
ems.dc_charge_hours, dc_charge_hours
|
||||
), "DC charge hours should be set correctly"
|
||||
assert np.array_equal(ems.dc_charge_hours, dc_charge_hours), (
|
||||
"DC charge hours should be set correctly"
|
||||
)
|
||||
|
||||
|
||||
def test_set_ev_charge_hours(create_ems_instance):
|
||||
"""Test the set_ev_charge_hours method of EnergieManagementSystem."""
|
||||
"""Test the set_ev_charge_hours method of EnergyManagement."""
|
||||
ems = create_ems_instance
|
||||
ev_charge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||
ems.set_ev_charge_hours(ev_charge_hours)
|
||||
assert np.array_equal(
|
||||
ems.ev_charge_hours, ev_charge_hours
|
||||
), "EV charge hours should be set correctly"
|
||||
assert np.array_equal(ems.ev_charge_hours, ev_charge_hours), (
|
||||
"EV charge hours should be set correctly"
|
||||
)
|
||||
|
||||
|
||||
def test_reset(create_ems_instance):
|
||||
"""Test the reset method of EnergieManagementSystem."""
|
||||
"""Test the reset method of EnergyManagement."""
|
||||
ems = create_ems_instance
|
||||
ems.reset()
|
||||
assert ems.ev.current_soc_percentage() == 100, "EV SOC should be reset to initial value"
|
||||
assert (
|
||||
ems.battery.current_soc_percentage() == 80
|
||||
), "Battery SOC should be reset to initial value"
|
||||
assert ems.battery.current_soc_percentage() == 80, (
|
||||
"Battery SOC should be reset to initial value"
|
||||
)
|
||||
|
||||
|
||||
def test_simulate_start_now(create_ems_instance):
|
||||
"""Test the simulate_start_now method of EnergieManagementSystem."""
|
||||
"""Test the simulate_start_now method of EnergyManagement."""
|
||||
ems = create_ems_instance
|
||||
result = ems.simulate_start_now()
|
||||
assert result is not None, "Result should not be None"
|
||||
|
@@ -86,7 +86,8 @@ def test_optimize(
|
||||
parameters=input_data, start_hour=start_hour, ngen=ngen
|
||||
)
|
||||
# Write test output to file, so we can take it as new data on intended change
|
||||
with open(DIR_TESTDATA / f"new_{fn_out}", "w") as f_out:
|
||||
TESTDATA_FILE = DIR_TESTDATA / f"new_{fn_out}"
|
||||
with TESTDATA_FILE.open("w", encoding="utf-8", newline="\n") as f_out:
|
||||
f_out.write(ergebnis.model_dump_json(indent=4, exclude_unset=True))
|
||||
|
||||
assert ergebnis.result.Gesamtbilanz_Euro == pytest.approx(
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@@ -46,12 +47,19 @@ def test_computed_paths(config_eos):
|
||||
"general": {
|
||||
"data_folder_path": "/base/data",
|
||||
"data_output_subpath": "extra/output",
|
||||
"data_cache_subpath": "somewhere/cache",
|
||||
}
|
||||
},
|
||||
"cache": {
|
||||
"subpath": "somewhere/cache",
|
||||
},
|
||||
}
|
||||
)
|
||||
assert config_eos.general.data_folder_path == Path("/base/data")
|
||||
assert config_eos.general.data_output_path == Path("/base/data/extra/output")
|
||||
assert config_eos.general.data_cache_path == Path("/base/data/somewhere/cache")
|
||||
assert config_eos.cache.path() == Path("/base/data/somewhere/cache")
|
||||
# Check non configurable pathes
|
||||
assert config_eos.package_root_path == Path(__file__).parent.parent.resolve().joinpath(
|
||||
"src/akkudoktoreos"
|
||||
)
|
||||
# reset settings so the config_eos fixture can verify the default paths
|
||||
config_eos.reset_settings()
|
||||
|
||||
@@ -212,3 +220,226 @@ def test_config_common_settings_timezone_none_when_coordinates_missing():
|
||||
assert config_no_latitude.timezone is None
|
||||
assert config_no_longitude.timezone is None
|
||||
assert config_no_coords.timezone is None
|
||||
|
||||
|
||||
# Test partial assignments and possible side effects
|
||||
@pytest.mark.parametrize(
|
||||
"path, value, expected, exception",
|
||||
[
|
||||
# Correct value assignment
|
||||
(
|
||||
"general/latitude",
|
||||
42.0,
|
||||
[("general.latitude", 42.0), ("general.longitude", 13.405)],
|
||||
None,
|
||||
),
|
||||
# Correct value assignment (trailing /)
|
||||
(
|
||||
"general/latitude/",
|
||||
41,
|
||||
[("general.latitude", 41.0), ("general.longitude", 13.405)],
|
||||
None,
|
||||
),
|
||||
# Correct value assignment (cast)
|
||||
(
|
||||
"general/latitude",
|
||||
"43.0",
|
||||
[("general.latitude", 43.0), ("general.longitude", 13.405)],
|
||||
None,
|
||||
),
|
||||
# Invalid value assignment (constraint)
|
||||
(
|
||||
"general/latitude",
|
||||
91.0,
|
||||
[("general.latitude", 52.52), ("general.longitude", 13.405)],
|
||||
ValueError,
|
||||
),
|
||||
# Invalid value assignment (type)
|
||||
(
|
||||
"general/latitude",
|
||||
"test",
|
||||
[("general.latitude", 52.52), ("general.longitude", 13.405)],
|
||||
ValueError,
|
||||
),
|
||||
# Invalid path
|
||||
(
|
||||
"general/latitude/test",
|
||||
"",
|
||||
[("general.latitude", 52.52), ("general.longitude", 13.405)],
|
||||
KeyError,
|
||||
),
|
||||
# Correct value nested assignment
|
||||
(
|
||||
"general",
|
||||
{"latitude": 22},
|
||||
[("general.latitude", 22.0), ("general.longitude", 13.405)],
|
||||
None,
|
||||
),
|
||||
# Invalid value nested assignment
|
||||
(
|
||||
"general",
|
||||
{"latitude": "test"},
|
||||
[("general.latitude", 52.52), ("general.longitude", 13.405)],
|
||||
ValueError,
|
||||
),
|
||||
# Correct value for list
|
||||
(
|
||||
"optimization/ev_available_charge_rates_percent/0",
|
||||
0.1,
|
||||
[
|
||||
(
|
||||
"optimization.ev_available_charge_rates_percent",
|
||||
[0.1, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0],
|
||||
)
|
||||
],
|
||||
None,
|
||||
),
|
||||
# Invalid value for list
|
||||
(
|
||||
"optimization/ev_available_charge_rates_percent/0",
|
||||
"invalid",
|
||||
[
|
||||
(
|
||||
"optimization.ev_available_charge_rates_percent",
|
||||
[0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0],
|
||||
)
|
||||
],
|
||||
ValueError,
|
||||
),
|
||||
# Invalid index (out of bound)
|
||||
(
|
||||
"optimization/ev_available_charge_rates_percent/10",
|
||||
0,
|
||||
[
|
||||
(
|
||||
"optimization.ev_available_charge_rates_percent",
|
||||
[0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0],
|
||||
)
|
||||
],
|
||||
IndexError,
|
||||
),
|
||||
# Invalid index (no number)
|
||||
(
|
||||
"optimization/ev_available_charge_rates_percent/test",
|
||||
0,
|
||||
[
|
||||
(
|
||||
"optimization.ev_available_charge_rates_percent",
|
||||
[0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0],
|
||||
)
|
||||
],
|
||||
IndexError,
|
||||
),
|
||||
# Unset value (set None)
|
||||
(
|
||||
"optimization/ev_available_charge_rates_percent",
|
||||
None,
|
||||
[
|
||||
(
|
||||
"optimization.ev_available_charge_rates_percent",
|
||||
None,
|
||||
)
|
||||
],
|
||||
None,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_set_nested_key(path, value, expected, exception, config_eos):
|
||||
if not exception:
|
||||
config_eos.set_config_value(path, value)
|
||||
for expected_path, expected_value in expected:
|
||||
assert eval(f"config_eos.{expected_path}") == expected_value
|
||||
else:
|
||||
with pytest.raises(exception):
|
||||
config_eos.set_config_value(path, value)
|
||||
for expected_path, expected_value in expected:
|
||||
assert eval(f"config_eos.{expected_path}") == expected_value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path, expected_value, exception",
|
||||
[
|
||||
("general/latitude", 52.52, None),
|
||||
("general/latitude/", 52.52, None),
|
||||
("general/latitude/test", None, KeyError),
|
||||
(
|
||||
"optimization/ev_available_charge_rates_percent/1",
|
||||
0.375,
|
||||
None,
|
||||
),
|
||||
("optimization/ev_available_charge_rates_percent/10", 0, IndexError),
|
||||
("optimization/ev_available_charge_rates_percent/test", 0, IndexError),
|
||||
(
|
||||
"optimization/ev_available_charge_rates_percent",
|
||||
[0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0],
|
||||
None,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_nested_key(path, expected_value, exception, config_eos):
|
||||
if not exception:
|
||||
assert config_eos.get_config_value(path) == expected_value
|
||||
else:
|
||||
with pytest.raises(exception):
|
||||
config_eos.get_config_value(path)
|
||||
|
||||
|
||||
def test_merge_settings_from_dict_invalid(config_eos):
|
||||
"""Test merging invalid data."""
|
||||
invalid_settings = {
|
||||
"general": {
|
||||
"latitude": "invalid_latitude" # Should be a float
|
||||
},
|
||||
}
|
||||
|
||||
with pytest.raises(Exception): # Pydantic ValidationError expected
|
||||
config_eos.merge_settings_from_dict(invalid_settings)
|
||||
|
||||
|
||||
def test_merge_settings_partial(config_eos):
|
||||
"""Test merging only a subset of settings."""
|
||||
partial_settings: dict[str, dict[str, Union[float, None, str]]] = {
|
||||
"general": {
|
||||
"latitude": 51.1657 # Only latitude is updated
|
||||
},
|
||||
}
|
||||
|
||||
config_eos.merge_settings_from_dict(partial_settings)
|
||||
assert config_eos.general.latitude == 51.1657
|
||||
assert config_eos.general.longitude == 13.405 # Should remain unchanged
|
||||
|
||||
partial_settings = {
|
||||
"weather": {
|
||||
"provider": "BrightSky",
|
||||
},
|
||||
}
|
||||
|
||||
config_eos.merge_settings_from_dict(partial_settings)
|
||||
assert config_eos.weather.provider == "BrightSky"
|
||||
|
||||
partial_settings = {
|
||||
"general": {
|
||||
"latitude": None,
|
||||
},
|
||||
"weather": {
|
||||
"provider": "ClearOutside",
|
||||
},
|
||||
}
|
||||
|
||||
config_eos.merge_settings_from_dict(partial_settings)
|
||||
assert config_eos.general.latitude is None
|
||||
assert config_eos.weather.provider == "ClearOutside"
|
||||
|
||||
# Assure update keeps same values
|
||||
config_eos.update()
|
||||
assert config_eos.general.latitude is None
|
||||
assert config_eos.weather.provider == "ClearOutside"
|
||||
|
||||
|
||||
def test_merge_settings_empty(config_eos):
|
||||
"""Test merging an empty dictionary does not change settings."""
|
||||
original_latitude = config_eos.general.latitude
|
||||
|
||||
config_eos.merge_settings_from_dict({}) # No changes
|
||||
|
||||
assert config_eos.general.latitude == original_latitude # Should remain unchanged
|
||||
|
@@ -116,7 +116,6 @@ class TestDataBase:
|
||||
def base(self):
|
||||
# Provide default values for configuration
|
||||
derived = DerivedBase()
|
||||
derived.config.update()
|
||||
return derived
|
||||
|
||||
def test_get_config_value_key_error(self, base):
|
||||
@@ -563,6 +562,102 @@ class TestDataSequence:
|
||||
assert dates == [to_datetime(datetime(2023, 11, 5)), to_datetime(datetime(2023, 11, 6))]
|
||||
assert values == [0.8, 0.9]
|
||||
|
||||
def test_to_dataframe_full_data(self, sequence):
|
||||
"""Test conversion of all records to a DataFrame without filtering."""
|
||||
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
|
||||
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
|
||||
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
|
||||
sequence.append(record1)
|
||||
sequence.append(record2)
|
||||
sequence.append(record3)
|
||||
|
||||
df = sequence.to_dataframe()
|
||||
|
||||
# Validate DataFrame structure
|
||||
assert isinstance(df, pd.DataFrame)
|
||||
assert not df.empty
|
||||
assert len(df) == 3 # All records should be included
|
||||
assert "data_value" in df.columns
|
||||
|
||||
def test_to_dataframe_with_filter(self, sequence):
|
||||
"""Test filtering records by datetime range."""
|
||||
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
|
||||
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
|
||||
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
|
||||
sequence.append(record1)
|
||||
sequence.append(record2)
|
||||
sequence.append(record3)
|
||||
|
||||
start = to_datetime("2024-01-01T12:30:00Z")
|
||||
end = to_datetime("2024-01-01T14:00:00Z")
|
||||
|
||||
df = sequence.to_dataframe(start_datetime=start, end_datetime=end)
|
||||
|
||||
assert isinstance(df, pd.DataFrame)
|
||||
assert not df.empty
|
||||
assert len(df) == 1 # Only one record should match the range
|
||||
assert df.index[0] == pd.Timestamp("2024-01-01T13:00:00Z")
|
||||
|
||||
def test_to_dataframe_no_matching_records(self, sequence):
|
||||
"""Test when no records match the given datetime filter."""
|
||||
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
|
||||
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
|
||||
sequence.append(record1)
|
||||
sequence.append(record2)
|
||||
|
||||
start = to_datetime("2024-01-01T14:00:00Z") # Start time after all records
|
||||
end = to_datetime("2024-01-01T15:00:00Z")
|
||||
|
||||
df = sequence.to_dataframe(start_datetime=start, end_datetime=end)
|
||||
|
||||
assert isinstance(df, pd.DataFrame)
|
||||
assert df.empty # No records should match
|
||||
|
||||
def test_to_dataframe_empty_sequence(self, sequence):
|
||||
"""Test when DataSequence has no records."""
|
||||
sequence = DataSequence(records=[])
|
||||
|
||||
df = sequence.to_dataframe()
|
||||
|
||||
assert isinstance(df, pd.DataFrame)
|
||||
assert df.empty # Should return an empty DataFrame
|
||||
|
||||
def test_to_dataframe_no_start_datetime(self, sequence):
|
||||
"""Test when only end_datetime is given (all past records should be included)."""
|
||||
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
|
||||
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
|
||||
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
|
||||
sequence.append(record1)
|
||||
sequence.append(record2)
|
||||
sequence.append(record3)
|
||||
|
||||
end = to_datetime("2024-01-01T13:00:00Z") # Include only first record
|
||||
|
||||
df = sequence.to_dataframe(end_datetime=end)
|
||||
|
||||
assert isinstance(df, pd.DataFrame)
|
||||
assert not df.empty
|
||||
assert len(df) == 1
|
||||
assert df.index[0] == pd.Timestamp("2024-01-01T12:00:00Z")
|
||||
|
||||
def test_to_dataframe_no_end_datetime(self, sequence):
|
||||
"""Test when only start_datetime is given (all future records should be included)."""
|
||||
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
|
||||
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
|
||||
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
|
||||
sequence.append(record1)
|
||||
sequence.append(record2)
|
||||
sequence.append(record3)
|
||||
|
||||
start = to_datetime("2024-01-01T13:00:00Z") # Include last two records
|
||||
|
||||
df = sequence.to_dataframe(start_datetime=start)
|
||||
|
||||
assert isinstance(df, pd.DataFrame)
|
||||
assert not df.empty
|
||||
assert len(df) == 2
|
||||
assert df.index[0] == pd.Timestamp("2024-01-01T13:00:00Z")
|
||||
|
||||
|
||||
class TestDataProvider:
|
||||
# Fixtures and helper functions
|
||||
@@ -588,9 +683,9 @@ class TestDataProvider:
|
||||
"""Test that DataProvider enforces singleton behavior."""
|
||||
instance1 = provider
|
||||
instance2 = DerivedDataProvider()
|
||||
assert (
|
||||
instance1 is instance2
|
||||
), "Singleton pattern is not enforced; instances are not the same."
|
||||
assert instance1 is instance2, (
|
||||
"Singleton pattern is not enforced; instances are not the same."
|
||||
)
|
||||
|
||||
def test_update_method_with_defaults(self, provider, sample_start_datetime, monkeypatch):
|
||||
"""Test the `update` method with default parameters."""
|
||||
@@ -608,9 +703,9 @@ class TestDataProvider:
|
||||
DerivedDataProvider.provider_updated = False
|
||||
provider.update_data(force_enable=True)
|
||||
assert provider.enabled() is False, "Provider should be disabled, but enabled() is True."
|
||||
assert (
|
||||
DerivedDataProvider.provider_updated is True
|
||||
), "Provider should have been executed, but was not."
|
||||
assert DerivedDataProvider.provider_updated is True, (
|
||||
"Provider should have been executed, but was not."
|
||||
)
|
||||
|
||||
def test_delete_by_datetime(self, provider, sample_start_datetime):
|
||||
"""Test `delete_by_datetime` method for removing records by datetime range."""
|
||||
@@ -625,12 +720,12 @@ class TestDataProvider:
|
||||
start_datetime=sample_start_datetime - to_duration("2 hours"),
|
||||
end_datetime=sample_start_datetime + to_duration("2 hours"),
|
||||
)
|
||||
assert (
|
||||
len(provider.records) == 1
|
||||
), "Only one record should remain after deletion by datetime."
|
||||
assert provider.records[0].date_time == sample_start_datetime - to_duration(
|
||||
"3 hours"
|
||||
), "Unexpected record remains."
|
||||
assert len(provider.records) == 1, (
|
||||
"Only one record should remain after deletion by datetime."
|
||||
)
|
||||
assert provider.records[0].date_time == sample_start_datetime - to_duration("3 hours"), (
|
||||
"Unexpected record remains."
|
||||
)
|
||||
|
||||
|
||||
class TestDataImportProvider:
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
@@ -14,7 +15,7 @@ def test_openapi_spec_current(config_eos):
|
||||
expected_spec_path = DIR_PROJECT_ROOT / "openapi.json"
|
||||
new_spec_path = DIR_TESTDATA / "openapi-new.json"
|
||||
|
||||
with open(expected_spec_path) as f_expected:
|
||||
with expected_spec_path.open("r", encoding="utf-8", newline=None) as f_expected:
|
||||
expected_spec = json.load(f_expected)
|
||||
|
||||
# Patch get_config and import within guard to patch global variables within the eos module.
|
||||
@@ -25,12 +26,14 @@ def test_openapi_spec_current(config_eos):
|
||||
from scripts import generate_openapi
|
||||
|
||||
spec = generate_openapi.generate_openapi()
|
||||
spec_str = json.dumps(spec, indent=4, sort_keys=True)
|
||||
|
||||
with open(new_spec_path, "w") as f_new:
|
||||
json.dump(spec, f_new, indent=4, sort_keys=True)
|
||||
if os.name == "nt":
|
||||
spec_str = spec_str.replace("127.0.0.1", "0.0.0.0")
|
||||
with new_spec_path.open("w", encoding="utf-8", newline="\n") as f_new:
|
||||
f_new.write(spec_str)
|
||||
|
||||
# Serialize to ensure comparison is consistent
|
||||
spec_str = json.dumps(spec, indent=4, sort_keys=True)
|
||||
expected_spec_str = json.dumps(expected_spec, indent=4, sort_keys=True)
|
||||
|
||||
try:
|
||||
@@ -47,7 +50,7 @@ def test_openapi_md_current(config_eos):
|
||||
expected_spec_md_path = DIR_PROJECT_ROOT / "docs" / "_generated" / "openapi.md"
|
||||
new_spec_md_path = DIR_TESTDATA / "openapi-new.md"
|
||||
|
||||
with open(expected_spec_md_path, encoding="utf8") as f_expected:
|
||||
with expected_spec_md_path.open("r", encoding="utf-8", newline=None) as f_expected:
|
||||
expected_spec_md = f_expected.read()
|
||||
|
||||
# Patch get_config and import within guard to patch global variables within the eos module.
|
||||
@@ -59,7 +62,9 @@ def test_openapi_md_current(config_eos):
|
||||
|
||||
spec_md = generate_openapi_md.generate_openapi_md()
|
||||
|
||||
with open(new_spec_md_path, "w", encoding="utf8") as f_new:
|
||||
if os.name == "nt":
|
||||
spec_md = spec_md.replace("127.0.0.1", "0.0.0.0")
|
||||
with new_spec_md_path.open("w", encoding="utf-8", newline="\n") as f_new:
|
||||
f_new.write(spec_md)
|
||||
|
||||
try:
|
||||
@@ -76,7 +81,7 @@ def test_config_md_current(config_eos):
|
||||
expected_config_md_path = DIR_PROJECT_ROOT / "docs" / "_generated" / "config.md"
|
||||
new_config_md_path = DIR_TESTDATA / "config-new.md"
|
||||
|
||||
with open(expected_config_md_path, encoding="utf8") as f_expected:
|
||||
with expected_config_md_path.open("r", encoding="utf-8", newline=None) as f_expected:
|
||||
expected_config_md = f_expected.read()
|
||||
|
||||
# Patch get_config and import within guard to patch global variables within the eos module.
|
||||
@@ -88,7 +93,9 @@ def test_config_md_current(config_eos):
|
||||
|
||||
config_md = generate_config_md.generate_config_md(config_eos)
|
||||
|
||||
with open(new_config_md_path, "w", encoding="utf8") as f_new:
|
||||
if os.name == "nt":
|
||||
config_md = config_md.replace("127.0.0.1", "0.0.0.0").replace("\\\\", "/")
|
||||
with new_config_md_path.open("w", encoding="utf-8", newline="\n") as f_new:
|
||||
f_new.write(config_md)
|
||||
|
||||
try:
|
||||
|
@@ -6,13 +6,14 @@ import numpy as np
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from akkudoktoreos.core.cache import CacheFileStore
|
||||
from akkudoktoreos.core.ems import get_ems
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.prediction.elecpriceakkudoktor import (
|
||||
AkkudoktorElecPrice,
|
||||
AkkudoktorElecPriceValue,
|
||||
ElecPriceAkkudoktor,
|
||||
)
|
||||
from akkudoktoreos.utils.cacheutil import CacheFileStore
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||
|
||||
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||
@@ -21,6 +22,8 @@ FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON = DIR_TESTDATA.joinpath(
|
||||
"elecpriceforecast_akkudoktor_1.json"
|
||||
)
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def provider(monkeypatch, config_eos):
|
||||
@@ -33,7 +36,9 @@ def provider(monkeypatch, config_eos):
|
||||
@pytest.fixture
|
||||
def sample_akkudoktor_1_json():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "r") as f_res:
|
||||
with FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON.open(
|
||||
"r", encoding="utf-8", newline=None
|
||||
) as f_res:
|
||||
input_data = json.load(f_res)
|
||||
return input_data
|
||||
|
||||
@@ -85,9 +90,6 @@ def test_request_forecast(mock_get, provider, sample_akkudoktor_1_json):
|
||||
mock_response.content = json.dumps(sample_akkudoktor_1_json)
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
# Preset, as this is usually done by update()
|
||||
provider.config.update()
|
||||
|
||||
# Test function
|
||||
akkudoktor_data = provider._request_forecast()
|
||||
|
||||
@@ -145,6 +147,7 @@ def test_update_data_with_incomplete_forecast(mock_get, provider):
|
||||
mock_response.status_code = 200
|
||||
mock_response.content = json.dumps(incomplete_data)
|
||||
mock_get.return_value = mock_response
|
||||
logger.info("The following errors are intentional and part of the test.")
|
||||
with pytest.raises(ValueError):
|
||||
provider._update_data(force_update=True)
|
||||
|
||||
@@ -172,7 +175,7 @@ def test_request_forecast_status_codes(
|
||||
provider._request_forecast()
|
||||
|
||||
|
||||
@patch("akkudoktoreos.utils.cacheutil.CacheFileStore")
|
||||
@patch("akkudoktoreos.core.cache.CacheFileStore")
|
||||
def test_cache_integration(mock_cache, provider):
|
||||
"""Test caching of 8-day electricity price data."""
|
||||
mock_cache_instance = mock_cache.return_value
|
||||
@@ -207,5 +210,7 @@ def test_akkudoktor_development_forecast_data(provider):
|
||||
|
||||
akkudoktor_data = provider._request_forecast()
|
||||
|
||||
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "w") as f_out:
|
||||
with FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON.open(
|
||||
"w", encoding="utf-8", newline="\n"
|
||||
) as f_out:
|
||||
json.dump(akkudoktor_data, f_out, indent=4)
|
||||
|
@@ -33,7 +33,7 @@ def provider(sample_import_1_json, config_eos):
|
||||
@pytest.fixture
|
||||
def sample_import_1_json():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON, "r") as f_res:
|
||||
with FILE_TESTDATA_ELECPRICEIMPORT_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
|
||||
input_data = json.load(f_res)
|
||||
return input_data
|
||||
|
||||
|
51
tests/test_eosdashserver.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import time
|
||||
from http import HTTPStatus
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class TestEOSDash:
|
||||
def test_eosdash_started(self, server_setup_for_class, is_system_test):
|
||||
"""Test the EOSdash server is started by EOS server."""
|
||||
server = server_setup_for_class["server"]
|
||||
eosdash_server = server_setup_for_class["eosdash_server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
timeout = server_setup_for_class["timeout"]
|
||||
|
||||
# Assure EOSdash is up
|
||||
startup = False
|
||||
error = ""
|
||||
for retries in range(int(timeout / 3)):
|
||||
try:
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
startup = True
|
||||
break
|
||||
error = f"{result.status_code}, {str(result.content)}"
|
||||
except Exception as ex:
|
||||
error = str(ex)
|
||||
time.sleep(3)
|
||||
assert startup, f"Connection to {eosdash_server}/eosdash/health failed: {error}"
|
||||
assert result.json()["status"] == "alive"
|
||||
|
||||
def test_eosdash_proxied_by_eos(self, server_setup_for_class, is_system_test):
|
||||
"""Test the EOSdash server proxied by EOS server."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
timeout = server_setup_for_class["timeout"]
|
||||
|
||||
# Assure EOSdash is up
|
||||
startup = False
|
||||
error = ""
|
||||
for retries in range(int(timeout / 3)):
|
||||
try:
|
||||
result = requests.get(f"{server}/eosdash/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
startup = True
|
||||
break
|
||||
error = f"{result.status_code}, {str(result.content)}"
|
||||
except Exception as ex:
|
||||
error = str(ex)
|
||||
time.sleep(3)
|
||||
assert startup, f"Connection to {server}/eosdash/health failed: {error}"
|
||||
assert result.json()["status"] == "alive"
|
@@ -26,6 +26,8 @@ def provider(config_eos):
|
||||
}
|
||||
}
|
||||
config_eos.merge_settings_from_dict(settings)
|
||||
assert config_eos.load.provider == "LoadAkkudoktor"
|
||||
assert config_eos.load.provider_settings.loadakkudoktor_year_energy == 1000
|
||||
return LoadAkkudoktor()
|
||||
|
||||
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""Test Module for logging Module."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,16 +13,7 @@ from akkudoktoreos.core.logging import get_logger
|
||||
# -----------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clean_up_log_file():
|
||||
"""Fixture to clean up log files after tests."""
|
||||
log_file = "test.log"
|
||||
yield log_file
|
||||
if os.path.exists(log_file):
|
||||
os.remove(log_file)
|
||||
|
||||
|
||||
def test_get_logger_console_logging(clean_up_log_file):
|
||||
def test_get_logger_console_logging():
|
||||
"""Test logger creation with console logging."""
|
||||
logger = get_logger("test_logger", logging_level="DEBUG")
|
||||
|
||||
@@ -37,9 +28,10 @@ def test_get_logger_console_logging(clean_up_log_file):
|
||||
assert isinstance(logger.handlers[0], logging.StreamHandler)
|
||||
|
||||
|
||||
def test_get_logger_file_logging(clean_up_log_file):
|
||||
def test_get_logger_file_logging(tmpdir):
|
||||
"""Test logger creation with file logging."""
|
||||
logger = get_logger("test_logger", log_file="test.log", logging_level="WARNING")
|
||||
log_file = Path(tmpdir).joinpath("test.log")
|
||||
logger = get_logger("test_logger", log_file=str(log_file), logging_level="WARNING")
|
||||
|
||||
# Check logger name
|
||||
assert logger.name == "test_logger"
|
||||
@@ -53,10 +45,10 @@ def test_get_logger_file_logging(clean_up_log_file):
|
||||
assert isinstance(logger.handlers[1], RotatingFileHandler)
|
||||
|
||||
# Check file existence
|
||||
assert os.path.exists("test.log")
|
||||
assert log_file.exists()
|
||||
|
||||
|
||||
def test_get_logger_no_file_logging(clean_up_log_file):
|
||||
def test_get_logger_no_file_logging():
|
||||
"""Test logger creation without file logging."""
|
||||
logger = get_logger("test_logger")
|
||||
|
||||
@@ -71,7 +63,7 @@ def test_get_logger_no_file_logging(clean_up_log_file):
|
||||
assert isinstance(logger.handlers[0], logging.StreamHandler)
|
||||
|
||||
|
||||
def test_get_logger_with_invalid_level(clean_up_log_file):
|
||||
def test_get_logger_with_invalid_level():
|
||||
"""Test logger creation with an invalid logging level."""
|
||||
with pytest.raises(ValueError, match="Unknown loggin level: INVALID"):
|
||||
logger = get_logger("test_logger", logging_level="INVALID")
|
||||
|
@@ -151,9 +151,9 @@ class TestPredictionProvider:
|
||||
"""Test that PredictionProvider enforces singleton behavior."""
|
||||
instance1 = provider
|
||||
instance2 = DerivedPredictionProvider()
|
||||
assert (
|
||||
instance1 is instance2
|
||||
), "Singleton pattern is not enforced; instances are not the same."
|
||||
assert instance1 is instance2, (
|
||||
"Singleton pattern is not enforced; instances are not the same."
|
||||
)
|
||||
|
||||
def test_update_computed_fields(self, provider, sample_start_datetime):
|
||||
"""Test that computed fields `end_datetime` and `keep_datetime` are correctly calculated."""
|
||||
@@ -169,12 +169,12 @@ class TestPredictionProvider:
|
||||
provider.config.prediction.historic_hours * 3600
|
||||
)
|
||||
|
||||
assert (
|
||||
provider.end_datetime == expected_end_datetime
|
||||
), "End datetime is not calculated correctly."
|
||||
assert (
|
||||
provider.keep_datetime == expected_keep_datetime
|
||||
), "Keep datetime is not calculated correctly."
|
||||
assert provider.end_datetime == expected_end_datetime, (
|
||||
"End datetime is not calculated correctly."
|
||||
)
|
||||
assert provider.keep_datetime == expected_keep_datetime, (
|
||||
"Keep datetime is not calculated correctly."
|
||||
)
|
||||
|
||||
def test_update_method_with_defaults(
|
||||
self, provider, sample_start_datetime, config_eos, monkeypatch
|
||||
@@ -201,17 +201,17 @@ class TestPredictionProvider:
|
||||
def test_update_method_force_enable(self, provider, monkeypatch):
|
||||
"""Test that `update` executes when `force_enable` is True, even if `enabled` is False."""
|
||||
# Preset values that are needed by update
|
||||
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "37.7749")
|
||||
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "-122.4194")
|
||||
monkeypatch.setenv("EOS_GENERAL__LATITUDE", "37.7749")
|
||||
monkeypatch.setenv("EOS_GENERAL__LONGITUDE", "-122.4194")
|
||||
|
||||
# Override enabled to return False for this test
|
||||
DerivedPredictionProvider.provider_enabled = False
|
||||
DerivedPredictionProvider.provider_updated = False
|
||||
provider.update_data(force_enable=True)
|
||||
assert provider.enabled() is False, "Provider should be disabled, but enabled() is True."
|
||||
assert (
|
||||
DerivedPredictionProvider.provider_updated is True
|
||||
), "Provider should have been executed, but was not."
|
||||
assert DerivedPredictionProvider.provider_updated is True, (
|
||||
"Provider should have been executed, but was not."
|
||||
)
|
||||
|
||||
def test_delete_by_datetime(self, provider, sample_start_datetime):
|
||||
"""Test `delete_by_datetime` method for removing records by datetime range."""
|
||||
@@ -226,12 +226,12 @@ class TestPredictionProvider:
|
||||
start_datetime=sample_start_datetime - to_duration("2 hours"),
|
||||
end_datetime=sample_start_datetime + to_duration("2 hours"),
|
||||
)
|
||||
assert (
|
||||
len(provider.records) == 1
|
||||
), "Only one record should remain after deletion by datetime."
|
||||
assert provider.records[0].date_time == sample_start_datetime - to_duration(
|
||||
"3 hours"
|
||||
), "Unexpected record remains."
|
||||
assert len(provider.records) == 1, (
|
||||
"Only one record should remain after deletion by datetime."
|
||||
)
|
||||
assert provider.records[0].date_time == sample_start_datetime - to_duration("3 hours"), (
|
||||
"Unexpected record remains."
|
||||
)
|
||||
|
||||
|
||||
class TestPredictionContainer:
|
||||
|
@@ -5,6 +5,7 @@ from unittest.mock import Mock, patch
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.core.ems import get_ems
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.prediction.prediction import get_prediction
|
||||
from akkudoktoreos.prediction.pvforecastakkudoktor import (
|
||||
AkkudoktorForecastHorizon,
|
||||
@@ -20,6 +21,8 @@ DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||
FILE_TESTDATA_PV_FORECAST_INPUT_1 = DIR_TESTDATA.joinpath("pv_forecast_input_1.json")
|
||||
FILE_TESTDATA_PV_FORECAST_RESULT_1 = DIR_TESTDATA.joinpath("pv_forecast_result_1.txt")
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_settings(config_eos):
|
||||
@@ -77,7 +80,7 @@ def sample_settings(config_eos):
|
||||
@pytest.fixture
|
||||
def sample_forecast_data():
|
||||
"""Fixture that returns sample forecast data converted to pydantic model."""
|
||||
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r", encoding="utf8") as f_in:
|
||||
with FILE_TESTDATA_PV_FORECAST_INPUT_1.open("r", encoding="utf-8", newline=None) as f_in:
|
||||
input_data = f_in.read()
|
||||
return PVForecastAkkudoktor._validate_data(input_data)
|
||||
|
||||
@@ -85,7 +88,7 @@ def sample_forecast_data():
|
||||
@pytest.fixture
|
||||
def sample_forecast_data_raw():
|
||||
"""Fixture that returns raw sample forecast data."""
|
||||
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r", encoding="utf8") as f_in:
|
||||
with FILE_TESTDATA_PV_FORECAST_INPUT_1.open("r", encoding="utf-8", newline=None) as f_in:
|
||||
input_data = f_in.read()
|
||||
return input_data
|
||||
|
||||
@@ -93,7 +96,7 @@ def sample_forecast_data_raw():
|
||||
@pytest.fixture
|
||||
def sample_forecast_report():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_PV_FORECAST_RESULT_1, "r", encoding="utf8") as f_res:
|
||||
with FILE_TESTDATA_PV_FORECAST_RESULT_1.open("r", encoding="utf-8", newline=None) as f_res:
|
||||
input_data = f_res.read()
|
||||
return input_data
|
||||
|
||||
@@ -223,6 +226,7 @@ def test_pvforecast_akkudoktor_data_record():
|
||||
|
||||
def test_pvforecast_akkudoktor_validate_data(provider_empty_instance, sample_forecast_data_raw):
|
||||
"""Test validation of PV forecast data on sample data."""
|
||||
logger.info("The following errors are intentional and part of the test.")
|
||||
with pytest.raises(
|
||||
ValueError,
|
||||
match="Field: meta\nError: Field required\nType: missing\nField: values\nError: Field required\nType: missing\n",
|
||||
|
@@ -33,7 +33,7 @@ def provider(sample_import_1_json, config_eos):
|
||||
@pytest.fixture
|
||||
def sample_import_1_json():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON, "r") as f_res:
|
||||
with FILE_TESTDATA_PVFORECASTIMPORT_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
|
||||
input_data = json.load(f_res)
|
||||
return input_data
|
||||
|
||||
|
@@ -1,13 +1,444 @@
|
||||
import json
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from akkudoktoreos.server.server import get_default_host
|
||||
|
||||
def test_server(server, config_eos):
|
||||
"""Test the server."""
|
||||
# validate correct path in server
|
||||
assert config_eos.general.data_folder_path is not None
|
||||
assert config_eos.general.data_folder_path.is_dir()
|
||||
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||
|
||||
FILE_TESTDATA_EOSSERVER_CONFIG_1 = DIR_TESTDATA.joinpath("eosserver_config_1.json")
|
||||
|
||||
|
||||
class TestServer:
|
||||
def test_server_setup_for_class(self, server_setup_for_class):
|
||||
"""Ensure server is started."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
|
||||
result = requests.get(f"{server}/v1/config")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Get testing config
|
||||
config_json = result.json()
|
||||
config_folder_path = Path(config_json["general"]["config_folder_path"])
|
||||
config_file_path = Path(config_json["general"]["config_file_path"])
|
||||
data_folder_path = Path(config_json["general"]["data_folder_path"])
|
||||
data_ouput_path = Path(config_json["general"]["data_output_path"])
|
||||
# Assure we are working in test environment
|
||||
assert str(config_folder_path).startswith(eos_dir)
|
||||
assert str(config_file_path).startswith(eos_dir)
|
||||
assert str(data_folder_path).startswith(eos_dir)
|
||||
assert str(data_ouput_path).startswith(eos_dir)
|
||||
|
||||
def test_prediction_brightsky(self, server_setup_for_class, is_system_test):
|
||||
"""Test weather prediction by BrightSky."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
|
||||
result = requests.get(f"{server}/v1/config")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Get testing config
|
||||
config_json = result.json()
|
||||
config_folder_path = Path(config_json["general"]["config_folder_path"])
|
||||
# Assure we are working in test environment
|
||||
assert str(config_folder_path).startswith(eos_dir)
|
||||
|
||||
result = requests.put(f"{server}/v1/config/weather/provider", json="BrightSky")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Assure prediction is enabled
|
||||
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
providers = result.json()
|
||||
assert "BrightSky" in providers
|
||||
|
||||
if is_system_test:
|
||||
result = requests.post(f"{server}/v1/prediction/update/BrightSky")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
data = result.json()
|
||||
assert len(data["data"]) > 24
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_prediction_clearoutside(self, server_setup_for_class, is_system_test):
|
||||
"""Test weather prediction by ClearOutside."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
|
||||
result = requests.put(f"{server}/v1/config/weather/provider", json="ClearOutside")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Assure prediction is enabled
|
||||
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
providers = result.json()
|
||||
assert "ClearOutside" in providers
|
||||
|
||||
if is_system_test:
|
||||
result = requests.post(f"{server}/v1/prediction/update/ClearOutside")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
data = result.json()
|
||||
assert len(data["data"]) > 24
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_prediction_pvforecastakkudoktor(self, server_setup_for_class, is_system_test):
|
||||
"""Test PV prediction by PVForecastAkkudoktor."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
|
||||
# Reset config
|
||||
with FILE_TESTDATA_EOSSERVER_CONFIG_1.open("r", encoding="utf-8", newline=None) as fd:
|
||||
config = json.load(fd)
|
||||
config["pvforecast"]["provider"] = "PVForecastAkkudoktor"
|
||||
result = requests.put(f"{server}/v1/config", json=config)
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Assure prediction is enabled
|
||||
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
providers = result.json()
|
||||
assert "PVForecastAkkudoktor" in providers
|
||||
|
||||
if is_system_test:
|
||||
result = requests.post(f"{server}/v1/prediction/update/PVForecastAkkudoktor")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
result = requests.get(f"{server}/v1/prediction/series?key=pvforecast_ac_power")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
data = result.json()
|
||||
assert len(data["data"]) > 24
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_prediction_elecpriceakkudoktor(self, server_setup_for_class, is_system_test):
|
||||
"""Test electricity price prediction by ElecPriceImport."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
|
||||
# Reset config
|
||||
with FILE_TESTDATA_EOSSERVER_CONFIG_1.open("r", encoding="utf-8", newline=None) as fd:
|
||||
config = json.load(fd)
|
||||
config["elecprice"]["provider"] = "ElecPriceAkkudoktor"
|
||||
result = requests.put(f"{server}/v1/config", json=config)
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Assure prediction is enabled
|
||||
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
providers = result.json()
|
||||
assert "ElecPriceAkkudoktor" in providers
|
||||
|
||||
if is_system_test:
|
||||
result = requests.post(f"{server}/v1/prediction/update/ElecPriceAkkudoktor")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
result = requests.get(f"{server}/v1/prediction/series?key=elecprice_marketprice_wh")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
data = result.json()
|
||||
assert len(data["data"]) > 24
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_prediction_loadakkudoktor(self, server_setup_for_class, is_system_test):
|
||||
"""Test load prediction by LoadAkkudoktor."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
|
||||
result = requests.put(f"{server}/v1/config/load/provider", json="LoadAkkudoktor")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Assure prediction is enabled
|
||||
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
providers = result.json()
|
||||
assert "LoadAkkudoktor" in providers
|
||||
|
||||
if is_system_test:
|
||||
result = requests.post(f"{server}/v1/prediction/update/LoadAkkudoktor")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
result = requests.get(f"{server}/v1/prediction/series?key=load_mean")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
data = result.json()
|
||||
assert len(data["data"]) > 24
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_admin_cache(self, server_setup_for_class, is_system_test):
|
||||
"""Test whether cache is reconstructed from cached files."""
|
||||
server = server_setup_for_class["server"]
|
||||
eos_dir = server_setup_for_class["eos_dir"]
|
||||
|
||||
result = requests.get(f"{server}/v1/admin/cache")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache = result.json()
|
||||
|
||||
if is_system_test:
|
||||
# There should be some cache data
|
||||
assert cache != {}
|
||||
|
||||
# Save cache
|
||||
result = requests.post(f"{server}/v1/admin/cache/save")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache_saved = result.json()
|
||||
assert cache_saved == cache
|
||||
|
||||
# Clear cache - should clear nothing as all cache files expire in the future
|
||||
result = requests.post(f"{server}/v1/admin/cache/clear")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache_cleared = result.json()
|
||||
assert cache_cleared == cache
|
||||
|
||||
# Force clear cache
|
||||
result = requests.post(f"{server}/v1/admin/cache/clear?clear_all=true")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache_cleared = result.json()
|
||||
assert cache_cleared == {}
|
||||
|
||||
# Try to load already deleted cache entries
|
||||
result = requests.post(f"{server}/v1/admin/cache/load")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache_loaded = result.json()
|
||||
assert cache_loaded == {}
|
||||
|
||||
# Cache should still be empty
|
||||
result = requests.get(f"{server}/v1/admin/cache")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache = result.json()
|
||||
assert cache == {}
|
||||
|
||||
|
||||
class TestServerStartStop:
|
||||
def test_server_start_eosdash(self, tmpdir):
|
||||
"""Test the EOSdash server startup from EOS."""
|
||||
# Do not use any fixture as this will make pytest the owner of the EOSdash port.
|
||||
host = get_default_host()
|
||||
if os.name == "nt":
|
||||
# Windows does not provide SIGKILL
|
||||
sigkill = signal.SIGTERM # type: ignore[attr-defined,unused-ignore]
|
||||
else:
|
||||
sigkill = signal.SIGKILL # type: ignore
|
||||
port = 8503
|
||||
eosdash_port = 8504
|
||||
timeout = 120
|
||||
|
||||
server = f"http://{host}:{port}"
|
||||
eosdash_server = f"http://{host}:{eosdash_port}"
|
||||
eos_dir = str(tmpdir)
|
||||
|
||||
# Cleanup any EOSdash process left.
|
||||
try:
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
pid = result.json()["pid"]
|
||||
os.kill(pid, sigkill)
|
||||
time.sleep(1)
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
assert result.status_code != HTTPStatus.OK
|
||||
except:
|
||||
pass
|
||||
|
||||
# Wait for EOSdash port to be freed
|
||||
process_info: list[dict] = []
|
||||
for retries in range(int(timeout / 3)):
|
||||
process_info = []
|
||||
pids: list[int] = []
|
||||
for conn in psutil.net_connections(kind="inet"):
|
||||
if conn.laddr.port == eosdash_port:
|
||||
if conn.pid not in pids:
|
||||
# Get fresh process info
|
||||
process = psutil.Process(conn.pid)
|
||||
pids.append(conn.pid)
|
||||
process_info.append(process.as_dict(attrs=["pid", "cmdline"]))
|
||||
if len(process_info) == 0:
|
||||
break
|
||||
time.sleep(3)
|
||||
assert len(process_info) == 0
|
||||
|
||||
# Import after test setup to prevent creation of config file before test
|
||||
from akkudoktoreos.server.eos import start_eosdash
|
||||
|
||||
process = start_eosdash(
|
||||
host=host,
|
||||
port=eosdash_port,
|
||||
eos_host=host,
|
||||
eos_port=port,
|
||||
log_level="debug",
|
||||
access_log=False,
|
||||
reload=False,
|
||||
eos_dir=eos_dir,
|
||||
eos_config_dir=eos_dir,
|
||||
)
|
||||
|
||||
# Assure EOSdash is up
|
||||
startup = False
|
||||
error = ""
|
||||
for retries in range(int(timeout / 3)):
|
||||
try:
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
startup = True
|
||||
break
|
||||
error = f"{result.status_code}, {str(result.content)}"
|
||||
except Exception as ex:
|
||||
error = str(ex)
|
||||
time.sleep(3)
|
||||
|
||||
assert startup, f"Connection to {eosdash_server}/eosdash/health failed: {error}"
|
||||
assert result.json()["status"] == "alive"
|
||||
|
||||
# Shutdown eosdash
|
||||
try:
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
pid = result.json()["pid"]
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
time.sleep(1)
|
||||
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
|
||||
assert result.status_code != HTTPStatus.OK
|
||||
except:
|
||||
pass
|
||||
|
||||
@pytest.mark.skipif(os.name == "nt", reason="Server restart not supported on Windows")
|
||||
def test_server_restart(self, server_setup_for_function, is_system_test):
|
||||
"""Test server restart."""
|
||||
server = server_setup_for_function["server"]
|
||||
eos_dir = server_setup_for_function["eos_dir"]
|
||||
timeout = server_setup_for_function["timeout"]
|
||||
|
||||
result = requests.get(f"{server}/v1/config")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Get testing config
|
||||
config_json = result.json()
|
||||
config_folder_path = Path(config_json["general"]["config_folder_path"])
|
||||
config_file_path = Path(config_json["general"]["config_file_path"])
|
||||
data_folder_path = Path(config_json["general"]["data_folder_path"])
|
||||
data_ouput_path = Path(config_json["general"]["data_output_path"])
|
||||
cache_file_path = data_folder_path.joinpath(config_json["cache"]["subpath"]).joinpath(
|
||||
"cachefilestore.json"
|
||||
)
|
||||
# Assure we are working in test environment
|
||||
assert str(config_folder_path).startswith(eos_dir)
|
||||
assert str(config_file_path).startswith(eos_dir)
|
||||
assert str(data_folder_path).startswith(eos_dir)
|
||||
assert str(data_ouput_path).startswith(eos_dir)
|
||||
|
||||
if is_system_test:
|
||||
# Prepare cache entry and get cached data
|
||||
result = requests.put(f"{server}/v1/config/weather/provider", json="BrightSky")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
result = requests.post(f"{server}/v1/prediction/update/BrightSky")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
data = result.json()
|
||||
assert data["data"] != {}
|
||||
|
||||
result = requests.put(f"{server}/v1/config/file")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
|
||||
# Save cache
|
||||
result = requests.post(f"{server}/v1/admin/cache/save")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache = result.json()
|
||||
|
||||
assert cache_file_path.exists()
|
||||
|
||||
result = requests.get(f"{server}/v1/admin/cache")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
cache = result.json()
|
||||
|
||||
result = requests.get(f"{server}/v1/health")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
pid = result.json()["pid"]
|
||||
|
||||
result = requests.post(f"{server}/v1/admin/server/restart")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
assert "Restarting EOS.." in result.json()["message"]
|
||||
new_pid = result.json()["pid"]
|
||||
|
||||
# Wait for server to shut down
|
||||
for retries in range(10):
|
||||
try:
|
||||
result = requests.get(f"{server}/v1/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
pid = result.json()["pid"]
|
||||
if pid == new_pid:
|
||||
# Already started
|
||||
break
|
||||
else:
|
||||
break
|
||||
except Exception as ex:
|
||||
break
|
||||
time.sleep(3)
|
||||
|
||||
# Assure EOS is up again
|
||||
startup = False
|
||||
error = ""
|
||||
for retries in range(int(timeout / 3)):
|
||||
try:
|
||||
result = requests.get(f"{server}/v1/health", timeout=2)
|
||||
if result.status_code == HTTPStatus.OK:
|
||||
startup = True
|
||||
break
|
||||
error = f"{result.status_code}, {str(result.content)}"
|
||||
except Exception as ex:
|
||||
error = str(ex)
|
||||
time.sleep(3)
|
||||
|
||||
assert startup, f"Connection to {server}/v1/health failed: {error}"
|
||||
assert result.json()["status"] == "alive"
|
||||
pid = result.json()["pid"]
|
||||
assert pid == new_pid
|
||||
|
||||
result = requests.get(f"{server}/v1/admin/cache")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
new_cache = result.json()
|
||||
|
||||
assert cache.items() <= new_cache.items()
|
||||
|
||||
if is_system_test:
|
||||
result = requests.get(f"{server}/v1/config")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
assert result.json()["weather"]["provider"] == "BrightSky"
|
||||
|
||||
# Wait for initialisation task to have finished
|
||||
time.sleep(5)
|
||||
|
||||
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
assert result.json() == data
|
||||
|
||||
# Shutdown the newly created server
|
||||
result = requests.post(f"{server}/v1/admin/server/shutdown")
|
||||
assert result.status_code == HTTPStatus.OK
|
||||
assert "Stopping EOS.." in result.json()["message"]
|
||||
new_pid = result.json()["pid"]
|
||||
|
@@ -5,9 +5,9 @@ from unittest.mock import Mock, patch
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.core.cache import CacheFileStore
|
||||
from akkudoktoreos.core.ems import get_ems
|
||||
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
||||
from akkudoktoreos.utils.cacheutil import CacheFileStore
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||
|
||||
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||
@@ -20,15 +20,15 @@ FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON = DIR_TESTDATA.joinpath("weatherforecast_b
|
||||
def provider(monkeypatch):
|
||||
"""Fixture to create a WeatherProvider instance."""
|
||||
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "BrightSky")
|
||||
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "50.0")
|
||||
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "10.0")
|
||||
monkeypatch.setenv("EOS_GENERAL__LATITUDE", "50.0")
|
||||
monkeypatch.setenv("EOS_GENERAL__LONGITUDE", "10.0")
|
||||
return WeatherBrightSky()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_brightsky_1_json():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "r") as f_res:
|
||||
with FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
|
||||
input_data = json.load(f_res)
|
||||
return input_data
|
||||
|
||||
@@ -36,7 +36,7 @@ def sample_brightsky_1_json():
|
||||
@pytest.fixture
|
||||
def sample_brightsky_2_json():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON, "r") as f_res:
|
||||
with FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON.open("r", encoding="utf-8", newline=None) as f_res:
|
||||
input_data = json.load(f_res)
|
||||
return input_data
|
||||
|
||||
@@ -107,9 +107,6 @@ def test_request_forecast(mock_get, provider, sample_brightsky_1_json):
|
||||
mock_response.content = json.dumps(sample_brightsky_1_json)
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
# Preset, as this is usually done by update()
|
||||
provider.config.update()
|
||||
|
||||
# Test function
|
||||
brightsky_data = provider._request_forecast()
|
||||
|
||||
@@ -165,10 +162,7 @@ def test_update_data(mock_get, provider, sample_brightsky_1_json, cache_store):
|
||||
|
||||
# Assert: Verify the result is as expected
|
||||
mock_get.assert_called_once()
|
||||
assert len(provider) == 338
|
||||
|
||||
# with open(FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON, "w") as f_out:
|
||||
# f_out.write(provider.to_json())
|
||||
assert len(provider) == 50
|
||||
|
||||
|
||||
# ------------------------------------------------
|
||||
@@ -176,15 +170,23 @@ def test_update_data(mock_get, provider, sample_brightsky_1_json, cache_store):
|
||||
# ------------------------------------------------
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="For development only")
|
||||
def test_brightsky_development_forecast_data(provider):
|
||||
def test_brightsky_development_forecast_data(provider, config_eos, is_system_test):
|
||||
"""Fetch data from real BrightSky server."""
|
||||
if not is_system_test:
|
||||
return
|
||||
|
||||
# Preset, as this is usually done by update_data()
|
||||
provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
||||
provider.latitude = 50.0
|
||||
provider.longitude = 10.0
|
||||
ems_eos = get_ems()
|
||||
ems_eos.set_start_datetime(to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin"))
|
||||
config_eos.general.latitude = 50.0
|
||||
config_eos.general.longitude = 10.0
|
||||
|
||||
brightsky_data = provider._request_forecast()
|
||||
|
||||
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "w") as f_out:
|
||||
with FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON.open("w", encoding="utf-8", newline="\n") as f_out:
|
||||
json.dump(brightsky_data, f_out, indent=4)
|
||||
|
||||
provider.update_data(force_enable=True, force_update=True)
|
||||
|
||||
with FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON.open("w", encoding="utf-8", newline="\n") as f_out:
|
||||
f_out.write(provider.model_dump_json(indent=4))
|
||||
|
@@ -9,9 +9,9 @@ import pvlib
|
||||
import pytest
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from akkudoktoreos.core.cache import CacheFileStore
|
||||
from akkudoktoreos.core.ems import get_ems
|
||||
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
||||
from akkudoktoreos.utils.cacheutil import CacheFileStore
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||
|
||||
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||
@@ -39,7 +39,9 @@ def provider(config_eos):
|
||||
@pytest.fixture
|
||||
def sample_clearout_1_html():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_HTML, "r") as f_res:
|
||||
with FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_HTML.open(
|
||||
"r", encoding="utf-8", newline=None
|
||||
) as f_res:
|
||||
input_data = f_res.read()
|
||||
return input_data
|
||||
|
||||
@@ -47,7 +49,7 @@ def sample_clearout_1_html():
|
||||
@pytest.fixture
|
||||
def sample_clearout_1_data():
|
||||
"""Fixture that returns sample forecast data."""
|
||||
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "r", encoding="utf8") as f_in:
|
||||
with FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA.open("r", encoding="utf-8", newline=None) as f_in:
|
||||
json_str = f_in.read()
|
||||
data = WeatherClearOutside.from_json(json_str)
|
||||
return data
|
||||
@@ -220,7 +222,9 @@ def test_development_forecast_data(mock_get, provider, sample_clearout_1_html):
|
||||
# Fill the instance
|
||||
provider.update_data(force_enable=True)
|
||||
|
||||
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "w", encoding="utf8") as f_out:
|
||||
with FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA.open(
|
||||
"w", encoding="utf-8", newline="\n"
|
||||
) as f_out:
|
||||
f_out.write(provider.to_json())
|
||||
|
||||
|
||||
|
@@ -33,7 +33,7 @@ def provider(sample_import_1_json, config_eos):
|
||||
@pytest.fixture
|
||||
def sample_import_1_json():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_WEATHERIMPORT_1_JSON, "r") as f_res:
|
||||
with FILE_TESTDATA_WEATHERIMPORT_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
|
||||
input_data = json.load(f_res)
|
||||
return input_data
|
||||
|
||||
|
86
tests/testdata/eosserver_config_1.json
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
{
|
||||
"elecprice": {
|
||||
"charges_kwh": 0.21,
|
||||
"provider": "ElecPriceImport"
|
||||
},
|
||||
"general": {
|
||||
"latitude": 52.5,
|
||||
"longitude": 13.4
|
||||
},
|
||||
"prediction": {
|
||||
"historic_hours": 48,
|
||||
"hours": 48
|
||||
},
|
||||
"load": {
|
||||
"provider": "LoadImport",
|
||||
"provider_settings": {
|
||||
"loadakkudoktor_year_energy": 20000
|
||||
}
|
||||
},
|
||||
"optimization": {
|
||||
"hours": 48
|
||||
},
|
||||
"pvforecast": {
|
||||
"planes": [
|
||||
{
|
||||
"peakpower": 5.0,
|
||||
"surface_azimuth": -10,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [
|
||||
20,
|
||||
27,
|
||||
22,
|
||||
20
|
||||
],
|
||||
"inverter_paco": 10000
|
||||
},
|
||||
{
|
||||
"peakpower": 4.8,
|
||||
"surface_azimuth": -90,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [
|
||||
30,
|
||||
30,
|
||||
30,
|
||||
50
|
||||
],
|
||||
"inverter_paco": 10000
|
||||
},
|
||||
{
|
||||
"peakpower": 1.4,
|
||||
"surface_azimuth": -40,
|
||||
"surface_tilt": 60,
|
||||
"userhorizon": [
|
||||
60,
|
||||
30,
|
||||
0,
|
||||
30
|
||||
],
|
||||
"inverter_paco": 2000
|
||||
},
|
||||
{
|
||||
"peakpower": 1.6,
|
||||
"surface_azimuth": 5,
|
||||
"surface_tilt": 45,
|
||||
"userhorizon": [
|
||||
45,
|
||||
25,
|
||||
30,
|
||||
60
|
||||
],
|
||||
"inverter_paco": 1400
|
||||
}
|
||||
],
|
||||
"provider": "PVForecastImport"
|
||||
},
|
||||
"server": {
|
||||
"startup_eosdash": true,
|
||||
"host": "0.0.0.0",
|
||||
"port": 8503,
|
||||
"eosdash_host": "0.0.0.0",
|
||||
"eosdash_port": 8504
|
||||
},
|
||||
"weather": {
|
||||
"provider": "WeatherImport"
|
||||
}
|
||||
}
|