mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-04-19 08:55:15 +00:00
Rename settings variables (remove prefixes)
This commit is contained in:
parent
1e1bac9fdb
commit
3257dac92b
3
.env
3
.env
@ -1,4 +1,5 @@
|
|||||||
EOS_VERSION=main
|
EOS_VERSION=main
|
||||||
EOS_PORT=8503
|
EOS_SERVER__PORT=8503
|
||||||
|
EOS_SERVER__EOSDASH_PORT=8504
|
||||||
|
|
||||||
PYTHON_VERSION=3.12.6
|
PYTHON_VERSION=3.12.6
|
||||||
|
@ -11,12 +11,14 @@ services:
|
|||||||
dockerfile: "Dockerfile"
|
dockerfile: "Dockerfile"
|
||||||
args:
|
args:
|
||||||
PYTHON_VERSION: "${PYTHON_VERSION}"
|
PYTHON_VERSION: "${PYTHON_VERSION}"
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
environment:
|
environment:
|
||||||
- EOS_CONFIG_DIR=config
|
- EOS_CONFIG_DIR=config
|
||||||
- latitude=52.2
|
- EOS_PREDICTION__LATITUDE=52.2
|
||||||
- longitude=13.4
|
- EOS_PREDICTION__LONGITUDE=13.4
|
||||||
- elecprice_provider=ElecPriceAkkudoktor
|
- EOS_ELECPRICE__PROVIDER=ElecPriceAkkudoktor
|
||||||
- elecprice_charges_kwh=0.21
|
- EOS_ELECPRICE__CHARGES_KWH=0.21
|
||||||
- server_fasthtml_host=none
|
|
||||||
ports:
|
ports:
|
||||||
- "${EOS_PORT}:${EOS_PORT}"
|
- "${EOS_SERVER__PORT}:${EOS_SERVER__PORT}"
|
||||||
|
- "${EOS_SERVER__EOSDASH_PORT}:${EOS_SERVER__EOSDASH_PORT}"
|
||||||
|
@ -55,8 +55,8 @@ General configuration to set directories of cache and output files.
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| logging_level_default | `EOS_LOGGING__LOGGING_LEVEL_DEFAULT` | `Optional[str]` | `rw` | `None` | EOS default logging level. |
|
| level | `EOS_LOGGING__LEVEL` | `Optional[str]` | `rw` | `None` | EOS default logging level. |
|
||||||
| logging_level_root | | `str` | `ro` | `N/A` | Root logger logging level. |
|
| root_level | | `str` | `ro` | `N/A` | Root logger logging level. |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Example Input
|
### Example Input
|
||||||
@ -66,7 +66,7 @@ General configuration to set directories of cache and output files.
|
|||||||
|
|
||||||
{
|
{
|
||||||
"logging": {
|
"logging": {
|
||||||
"logging_level_default": "INFO"
|
"level": "INFO"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -78,8 +78,8 @@ General configuration to set directories of cache and output files.
|
|||||||
|
|
||||||
{
|
{
|
||||||
"logging": {
|
"logging": {
|
||||||
"logging_level_default": "INFO",
|
"level": "INFO",
|
||||||
"logging_level_root": "INFO"
|
"root_level": "INFO"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -167,7 +167,7 @@ General configuration to set directories of cache and output files.
|
|||||||
| device_id | `str` | `rw` | `required` | ID of inverter |
|
| device_id | `str` | `rw` | `required` | ID of inverter |
|
||||||
| hours | `Optional[int]` | `rw` | `None` | Number of prediction hours. Defaults to global config prediction hours. |
|
| hours | `Optional[int]` | `rw` | `None` | Number of prediction hours. Defaults to global config prediction hours. |
|
||||||
| max_power_wh | `float` | `rw` | `required` | - |
|
| max_power_wh | `float` | `rw` | `required` | - |
|
||||||
| battery | `Optional[str]` | `rw` | `None` | ID of battery |
|
| battery_id | `Optional[str]` | `rw` | `None` | ID of battery |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
#### Example Input/Output
|
#### Example Input/Output
|
||||||
@ -182,7 +182,7 @@ General configuration to set directories of cache and output files.
|
|||||||
"device_id": "inverter1",
|
"device_id": "inverter1",
|
||||||
"hours": null,
|
"hours": null,
|
||||||
"max_power_wh": 10000.0,
|
"max_power_wh": 10000.0,
|
||||||
"battery": null
|
"battery_id": null
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@ -240,11 +240,11 @@ General configuration to set directories of cache and output files.
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| measurement_load0_name | `EOS_MEASUREMENT__MEASUREMENT_LOAD0_NAME` | `Optional[str]` | `rw` | `None` | Name of the load0 source |
|
| load0_name | `EOS_MEASUREMENT__LOAD0_NAME` | `Optional[str]` | `rw` | `None` | Name of the load0 source |
|
||||||
| measurement_load1_name | `EOS_MEASUREMENT__MEASUREMENT_LOAD1_NAME` | `Optional[str]` | `rw` | `None` | Name of the load1 source |
|
| load1_name | `EOS_MEASUREMENT__LOAD1_NAME` | `Optional[str]` | `rw` | `None` | Name of the load1 source |
|
||||||
| measurement_load2_name | `EOS_MEASUREMENT__MEASUREMENT_LOAD2_NAME` | `Optional[str]` | `rw` | `None` | Name of the load2 source |
|
| load2_name | `EOS_MEASUREMENT__LOAD2_NAME` | `Optional[str]` | `rw` | `None` | Name of the load2 source |
|
||||||
| measurement_load3_name | `EOS_MEASUREMENT__MEASUREMENT_LOAD3_NAME` | `Optional[str]` | `rw` | `None` | Name of the load3 source |
|
| load3_name | `EOS_MEASUREMENT__LOAD3_NAME` | `Optional[str]` | `rw` | `None` | Name of the load3 source |
|
||||||
| measurement_load4_name | `EOS_MEASUREMENT__MEASUREMENT_LOAD4_NAME` | `Optional[str]` | `rw` | `None` | Name of the load4 source |
|
| load4_name | `EOS_MEASUREMENT__LOAD4_NAME` | `Optional[str]` | `rw` | `None` | Name of the load4 source |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Example Input/Output
|
### Example Input/Output
|
||||||
@ -254,11 +254,11 @@ General configuration to set directories of cache and output files.
|
|||||||
|
|
||||||
{
|
{
|
||||||
"measurement": {
|
"measurement": {
|
||||||
"measurement_load0_name": "Household",
|
"load0_name": "Household",
|
||||||
"measurement_load1_name": null,
|
"load1_name": null,
|
||||||
"measurement_load2_name": null,
|
"load2_name": null,
|
||||||
"measurement_load3_name": null,
|
"load3_name": null,
|
||||||
"measurement_load4_name": null
|
"load4_name": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -266,7 +266,7 @@ General configuration to set directories of cache and output files.
|
|||||||
## General Optimization Configuration
|
## General Optimization Configuration
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
optimization_hours (int): Number of hours for optimizations.
|
hours (int): Number of hours for optimizations.
|
||||||
|
|
||||||
:::{table} optimization
|
:::{table} optimization
|
||||||
:widths: 10 20 10 5 5 30
|
:widths: 10 20 10 5 5 30
|
||||||
@ -274,9 +274,9 @@ Attributes:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| optimization_hours | `EOS_OPTIMIZATION__OPTIMIZATION_HOURS` | `Optional[int]` | `rw` | `48` | Number of hours into the future for optimizations. |
|
| hours | `EOS_OPTIMIZATION__HOURS` | `Optional[int]` | `rw` | `48` | Number of hours into the future for optimizations. |
|
||||||
| optimization_penalty | `EOS_OPTIMIZATION__OPTIMIZATION_PENALTY` | `Optional[int]` | `rw` | `10` | Penalty factor used in optimization. |
|
| penalty | `EOS_OPTIMIZATION__PENALTY` | `Optional[int]` | `rw` | `10` | Penalty factor used in optimization. |
|
||||||
| optimization_ev_available_charge_rates_percent | `EOS_OPTIMIZATION__OPTIMIZATION_EV_AVAILABLE_CHARGE_RATES_PERCENT` | `Optional[List[float]]` | `rw` | `[0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0]` | Charge rates available for the EV in percent of maximum charge. |
|
| ev_available_charge_rates_percent | `EOS_OPTIMIZATION__EV_AVAILABLE_CHARGE_RATES_PERCENT` | `Optional[List[float]]` | `rw` | `[0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0]` | Charge rates available for the EV in percent of maximum charge. |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Example Input/Output
|
### Example Input/Output
|
||||||
@ -286,9 +286,9 @@ Attributes:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"optimization": {
|
"optimization": {
|
||||||
"optimization_hours": 48,
|
"hours": 48,
|
||||||
"optimization_penalty": 10,
|
"penalty": 10,
|
||||||
"optimization_ev_available_charge_rates_percent": [
|
"ev_available_charge_rates_percent": [
|
||||||
0.0,
|
0.0,
|
||||||
0.375,
|
0.375,
|
||||||
0.5,
|
0.5,
|
||||||
@ -309,9 +309,9 @@ Validators ensure each parameter is within a specified range. A computed propert
|
|||||||
determines the time zone based on latitude and longitude.
|
determines the time zone based on latitude and longitude.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (Optional[int]): Number of hours into the future for predictions.
|
hours (Optional[int]): Number of hours into the future for predictions.
|
||||||
Must be non-negative.
|
Must be non-negative.
|
||||||
prediction_historic_hours (Optional[int]): Number of hours into the past for historical data.
|
historic_hours (Optional[int]): Number of hours into the past for historical data.
|
||||||
Must be non-negative.
|
Must be non-negative.
|
||||||
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
||||||
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
||||||
@ -321,8 +321,8 @@ Properties:
|
|||||||
and longitude.
|
and longitude.
|
||||||
|
|
||||||
Validators:
|
Validators:
|
||||||
validate_prediction_hours (int): Ensures `prediction_hours` is a non-negative integer.
|
validate_hours (int): Ensures `hours` is a non-negative integer.
|
||||||
validate_prediction_historic_hours (int): Ensures `prediction_historic_hours` is a non-negative integer.
|
validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.
|
||||||
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
||||||
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
||||||
|
|
||||||
@ -332,8 +332,8 @@ Validators:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| prediction_hours | `EOS_PREDICTION__PREDICTION_HOURS` | `Optional[int]` | `rw` | `48` | Number of hours into the future for predictions |
|
| hours | `EOS_PREDICTION__HOURS` | `Optional[int]` | `rw` | `48` | Number of hours into the future for predictions |
|
||||||
| prediction_historic_hours | `EOS_PREDICTION__PREDICTION_HISTORIC_HOURS` | `Optional[int]` | `rw` | `48` | Number of hours into the past for historical predictions data |
|
| historic_hours | `EOS_PREDICTION__HISTORIC_HOURS` | `Optional[int]` | `rw` | `48` | Number of hours into the past for historical predictions data |
|
||||||
| latitude | `EOS_PREDICTION__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°) |
|
| latitude | `EOS_PREDICTION__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°) |
|
||||||
| longitude | `EOS_PREDICTION__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees, within -180 to 180 (°) |
|
| longitude | `EOS_PREDICTION__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees, within -180 to 180 (°) |
|
||||||
| timezone | | `Optional[str]` | `ro` | `N/A` | Compute timezone based on latitude and longitude. |
|
| timezone | | `Optional[str]` | `ro` | `N/A` | Compute timezone based on latitude and longitude. |
|
||||||
@ -346,8 +346,8 @@ Validators:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 48,
|
"historic_hours": 48,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405
|
"longitude": 13.405
|
||||||
}
|
}
|
||||||
@ -361,8 +361,8 @@ Validators:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 48,
|
"historic_hours": 48,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
"timezone": "Europe/Berlin"
|
"timezone": "Europe/Berlin"
|
||||||
@ -378,8 +378,8 @@ Validators:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| elecprice_provider | `EOS_ELECPRICE__ELECPRICE_PROVIDER` | `Optional[str]` | `rw` | `None` | Electricity price provider id of provider to be used. |
|
| provider | `EOS_ELECPRICE__PROVIDER` | `Optional[str]` | `rw` | `None` | Electricity price provider id of provider to be used. |
|
||||||
| elecprice_charges_kwh | `EOS_ELECPRICE__ELECPRICE_CHARGES_KWH` | `Optional[float]` | `rw` | `None` | Electricity price charges (€/kWh). |
|
| charges_kwh | `EOS_ELECPRICE__CHARGES_KWH` | `Optional[float]` | `rw` | `None` | Electricity price charges (€/kWh). |
|
||||||
| provider_settings | `EOS_ELECPRICE__PROVIDER_SETTINGS` | `Optional[akkudoktoreos.prediction.elecpriceimport.ElecPriceImportCommonSettings]` | `rw` | `None` | Provider settings |
|
| provider_settings | `EOS_ELECPRICE__PROVIDER_SETTINGS` | `Optional[akkudoktoreos.prediction.elecpriceimport.ElecPriceImportCommonSettings]` | `rw` | `None` | Provider settings |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@ -390,8 +390,8 @@ Validators:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"elecprice": {
|
"elecprice": {
|
||||||
"elecprice_provider": "ElecPriceAkkudoktor",
|
"provider": "ElecPriceAkkudoktor",
|
||||||
"elecprice_charges_kwh": 0.21,
|
"charges_kwh": 0.21,
|
||||||
"provider_settings": null
|
"provider_settings": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -405,8 +405,8 @@ Validators:
|
|||||||
|
|
||||||
| Name | Type | Read-Only | Default | Description |
|
| Name | Type | Read-Only | Default | Description |
|
||||||
| ---- | ---- | --------- | ------- | ----------- |
|
| ---- | ---- | --------- | ------- | ----------- |
|
||||||
| elecpriceimport_file_path | `Union[str, pathlib.Path, NoneType]` | `rw` | `None` | Path to the file to import elecprice data from. |
|
| import_file_path | `Union[str, pathlib.Path, NoneType]` | `rw` | `None` | Path to the file to import elecprice data from. |
|
||||||
| elecpriceimport_json | `Optional[str]` | `rw` | `None` | JSON string, dictionary of electricity price forecast value lists. |
|
| import_json | `Optional[str]` | `rw` | `None` | JSON string, dictionary of electricity price forecast value lists. |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
#### Example Input/Output
|
#### Example Input/Output
|
||||||
@ -417,8 +417,8 @@ Validators:
|
|||||||
{
|
{
|
||||||
"elecprice": {
|
"elecprice": {
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"elecpriceimport_file_path": null,
|
"import_file_path": null,
|
||||||
"elecpriceimport_json": "{\"elecprice_marketprice_wh\": [0.0003384, 0.0003318, 0.0003284]}"
|
"import_json": "{\"elecprice_marketprice_wh\": [0.0003384, 0.0003318, 0.0003284]}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -432,7 +432,7 @@ Validators:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| load_provider | `EOS_LOAD__LOAD_PROVIDER` | `Optional[str]` | `rw` | `None` | Load provider id of provider to be used. |
|
| provider | `EOS_LOAD__PROVIDER` | `Optional[str]` | `rw` | `None` | Load provider id of provider to be used. |
|
||||||
| provider_settings | `EOS_LOAD__PROVIDER_SETTINGS` | `Union[akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktorCommonSettings, akkudoktoreos.prediction.loadimport.LoadImportCommonSettings, NoneType]` | `rw` | `None` | Provider settings |
|
| provider_settings | `EOS_LOAD__PROVIDER_SETTINGS` | `Union[akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktorCommonSettings, akkudoktoreos.prediction.loadimport.LoadImportCommonSettings, NoneType]` | `rw` | `None` | Provider settings |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@ -443,7 +443,7 @@ Validators:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"load": {
|
"load": {
|
||||||
"load_provider": "LoadAkkudoktor",
|
"provider": "LoadAkkudoktor",
|
||||||
"provider_settings": null
|
"provider_settings": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -457,8 +457,8 @@ Validators:
|
|||||||
|
|
||||||
| Name | Type | Read-Only | Default | Description |
|
| Name | Type | Read-Only | Default | Description |
|
||||||
| ---- | ---- | --------- | ------- | ----------- |
|
| ---- | ---- | --------- | ------- | ----------- |
|
||||||
| load_import_file_path | `Union[str, pathlib.Path, NoneType]` | `rw` | `None` | Path to the file to import load data from. |
|
| import_file_path | `Union[str, pathlib.Path, NoneType]` | `rw` | `None` | Path to the file to import load data from. |
|
||||||
| load_import_json | `Optional[str]` | `rw` | `None` | JSON string, dictionary of load forecast value lists. |
|
| import_json | `Optional[str]` | `rw` | `None` | JSON string, dictionary of load forecast value lists. |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
#### Example Input/Output
|
#### Example Input/Output
|
||||||
@ -469,8 +469,8 @@ Validators:
|
|||||||
{
|
{
|
||||||
"load": {
|
"load": {
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"load_import_file_path": null,
|
"import_file_path": null,
|
||||||
"load_import_json": "{\"load0_mean\": [676.71, 876.19, 527.13]}"
|
"import_json": "{\"load0_mean\": [676.71, 876.19, 527.13]}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -509,7 +509,7 @@ Validators:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| pvforecast_provider | `EOS_PVFORECAST__PVFORECAST_PROVIDER` | `Optional[str]` | `rw` | `None` | PVForecast provider id of provider to be used. |
|
| provider | `EOS_PVFORECAST__PROVIDER` | `Optional[str]` | `rw` | `None` | PVForecast provider id of provider to be used. |
|
||||||
| pvforecast0_surface_tilt | `EOS_PVFORECAST__PVFORECAST0_SURFACE_TILT` | `Optional[float]` | `rw` | `None` | Tilt angle from horizontal plane. Ignored for two-axis tracking. |
|
| pvforecast0_surface_tilt | `EOS_PVFORECAST__PVFORECAST0_SURFACE_TILT` | `Optional[float]` | `rw` | `None` | Tilt angle from horizontal plane. Ignored for two-axis tracking. |
|
||||||
| pvforecast0_surface_azimuth | `EOS_PVFORECAST__PVFORECAST0_SURFACE_AZIMUTH` | `Optional[float]` | `rw` | `None` | Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270). |
|
| pvforecast0_surface_azimuth | `EOS_PVFORECAST__PVFORECAST0_SURFACE_AZIMUTH` | `Optional[float]` | `rw` | `None` | Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270). |
|
||||||
| pvforecast0_userhorizon | `EOS_PVFORECAST__PVFORECAST0_USERHORIZON` | `Optional[List[float]]` | `rw` | `None` | Elevation of horizon in degrees, at equally spaced azimuth clockwise from north. |
|
| pvforecast0_userhorizon | `EOS_PVFORECAST__PVFORECAST0_USERHORIZON` | `Optional[List[float]]` | `rw` | `None` | Elevation of horizon in degrees, at equally spaced azimuth clockwise from north. |
|
||||||
@ -622,7 +622,7 @@ Validators:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_surface_tilt": 10.0,
|
"pvforecast0_surface_tilt": 10.0,
|
||||||
"pvforecast0_surface_azimuth": 10.0,
|
"pvforecast0_surface_azimuth": 10.0,
|
||||||
"pvforecast0_userhorizon": [
|
"pvforecast0_userhorizon": [
|
||||||
@ -739,7 +739,7 @@ Validators:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_surface_tilt": 10.0,
|
"pvforecast0_surface_tilt": 10.0,
|
||||||
"pvforecast0_surface_azimuth": 10.0,
|
"pvforecast0_surface_azimuth": 10.0,
|
||||||
"pvforecast0_userhorizon": [
|
"pvforecast0_userhorizon": [
|
||||||
@ -916,7 +916,7 @@ Validators:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| weather_provider | `EOS_WEATHER__WEATHER_PROVIDER` | `Optional[str]` | `rw` | `None` | Weather provider id of provider to be used. |
|
| provider | `EOS_WEATHER__PROVIDER` | `Optional[str]` | `rw` | `None` | Weather provider id of provider to be used. |
|
||||||
| provider_settings | `EOS_WEATHER__PROVIDER_SETTINGS` | `Optional[akkudoktoreos.prediction.weatherimport.WeatherImportCommonSettings]` | `rw` | `None` | Provider settings |
|
| provider_settings | `EOS_WEATHER__PROVIDER_SETTINGS` | `Optional[akkudoktoreos.prediction.weatherimport.WeatherImportCommonSettings]` | `rw` | `None` | Provider settings |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@ -927,7 +927,7 @@ Validators:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"weather": {
|
"weather": {
|
||||||
"weather_provider": "WeatherImport",
|
"provider": "WeatherImport",
|
||||||
"provider_settings": null
|
"provider_settings": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -941,8 +941,8 @@ Validators:
|
|||||||
|
|
||||||
| Name | Type | Read-Only | Default | Description |
|
| Name | Type | Read-Only | Default | Description |
|
||||||
| ---- | ---- | --------- | ------- | ----------- |
|
| ---- | ---- | --------- | ------- | ----------- |
|
||||||
| weatherimport_file_path | `Union[str, pathlib.Path, NoneType]` | `rw` | `None` | Path to the file to import weather data from. |
|
| import_file_path | `Union[str, pathlib.Path, NoneType]` | `rw` | `None` | Path to the file to import weather data from. |
|
||||||
| weatherimport_json | `Optional[str]` | `rw` | `None` | JSON string, dictionary of weather forecast value lists. |
|
| import_json | `Optional[str]` | `rw` | `None` | JSON string, dictionary of weather forecast value lists. |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
#### Example Input/Output
|
#### Example Input/Output
|
||||||
@ -953,8 +953,8 @@ Validators:
|
|||||||
{
|
{
|
||||||
"weather": {
|
"weather": {
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"weatherimport_file_path": null,
|
"import_file_path": null,
|
||||||
"weatherimport_json": "{\"weather_temp_air\": [18.3, 17.8, 16.9]}"
|
"import_json": "{\"weather_temp_air\": [18.3, 17.8, 16.9]}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -971,12 +971,12 @@ Attributes:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| server_eos_host | `EOS_SERVER__SERVER_EOS_HOST` | `Optional[pydantic.networks.IPvAnyAddress]` | `rw` | `0.0.0.0` | EOS server IP address. |
|
| host | `EOS_SERVER__HOST` | `Optional[pydantic.networks.IPvAnyAddress]` | `rw` | `0.0.0.0` | EOS server IP address. |
|
||||||
| server_eos_port | `EOS_SERVER__SERVER_EOS_PORT` | `Optional[int]` | `rw` | `8503` | EOS server IP port number. |
|
| port | `EOS_SERVER__PORT` | `Optional[int]` | `rw` | `8503` | EOS server IP port number. |
|
||||||
| server_eos_verbose | `EOS_SERVER__SERVER_EOS_VERBOSE` | `Optional[bool]` | `rw` | `False` | Enable debug output |
|
| verbose | `EOS_SERVER__VERBOSE` | `Optional[bool]` | `rw` | `False` | Enable debug output |
|
||||||
| server_eos_startup_eosdash | `EOS_SERVER__SERVER_EOS_STARTUP_EOSDASH` | `Optional[bool]` | `rw` | `True` | EOS server to start EOSdash server. |
|
| startup_eosdash | `EOS_SERVER__STARTUP_EOSDASH` | `Optional[bool]` | `rw` | `True` | EOS server to start EOSdash server. |
|
||||||
| server_eosdash_host | `EOS_SERVER__SERVER_EOSDASH_HOST` | `Optional[pydantic.networks.IPvAnyAddress]` | `rw` | `0.0.0.0` | EOSdash server IP address. |
|
| eosdash_host | `EOS_SERVER__EOSDASH_HOST` | `Optional[pydantic.networks.IPvAnyAddress]` | `rw` | `0.0.0.0` | EOSdash server IP address. |
|
||||||
| server_eosdash_port | `EOS_SERVER__SERVER_EOSDASH_PORT` | `Optional[int]` | `rw` | `8504` | EOSdash server IP port number. |
|
| eosdash_port | `EOS_SERVER__EOSDASH_PORT` | `Optional[int]` | `rw` | `8504` | EOSdash server IP port number. |
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Example Input/Output
|
### Example Input/Output
|
||||||
@ -986,12 +986,12 @@ Attributes:
|
|||||||
|
|
||||||
{
|
{
|
||||||
"server": {
|
"server": {
|
||||||
"server_eos_host": "0.0.0.0",
|
"host": "0.0.0.0",
|
||||||
"server_eos_port": 8503,
|
"port": 8503,
|
||||||
"server_eos_verbose": false,
|
"verbose": false,
|
||||||
"server_eos_startup_eosdash": true,
|
"startup_eosdash": true,
|
||||||
"server_eosdash_host": "0.0.0.0",
|
"eosdash_host": "0.0.0.0",
|
||||||
"server_eosdash_port": 8504
|
"eosdash_port": 8504
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
@ -63,7 +63,7 @@ Args:
|
|||||||
year_energy (float): Yearly energy consumption in Wh.
|
year_energy (float): Yearly energy consumption in Wh.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set LoadAkkudoktor as load_provider, then update data with
|
Set LoadAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=load_mean' instead.
|
'/v1/prediction/list?key=load_mean' instead.
|
||||||
@ -121,7 +121,7 @@ If no forecast values are available the missing ones at the start of the series
|
|||||||
filled with the first available forecast value.
|
filled with the first available forecast value.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set PVForecastAkkudoktor as pvforecast_provider, then update data with
|
Set PVForecastAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=pvforecast_ac_power' and
|
'/v1/prediction/list?key=pvforecast_ac_power' and
|
||||||
@ -151,7 +151,7 @@ Note:
|
|||||||
Electricity price charges are added.
|
Electricity price charges are added.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set ElecPriceAkkudoktor as elecprice_provider, then update data with
|
Set ElecPriceAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
||||||
|
@ -55,7 +55,6 @@ following special environment variables:
|
|||||||
- `EOS_CONFIG_DIR`: The directory to search for an EOS configuration file.
|
- `EOS_CONFIG_DIR`: The directory to search for an EOS configuration file.
|
||||||
- `EOS_DIR`: The directory used by EOS for data, which will also be searched for an EOS
|
- `EOS_DIR`: The directory used by EOS for data, which will also be searched for an EOS
|
||||||
configuration file.
|
configuration file.
|
||||||
- `EOS_LOGGING_LEVEL`: The logging level to use in EOS.
|
|
||||||
|
|
||||||
### EOS Configuration File
|
### EOS Configuration File
|
||||||
|
|
||||||
|
@ -56,21 +56,21 @@ A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html)
|
|||||||
The EOS measurement store provides for storing meter readings of loads. There are currently five loads
|
The EOS measurement store provides for storing meter readings of loads. There are currently five loads
|
||||||
foreseen. The associated `measurement key`s are:
|
foreseen. The associated `measurement key`s are:
|
||||||
|
|
||||||
- `measurement_load0_mr`: Load0 meter reading [kWh]
|
- `load0_mr`: Load0 meter reading [kWh]
|
||||||
- `measurement_load1_mr`: Load1 meter reading [kWh]
|
- `load1_mr`: Load1 meter reading [kWh]
|
||||||
- `measurement_load2_mr`: Load2 meter reading [kWh]
|
- `load2_mr`: Load2 meter reading [kWh]
|
||||||
- `measurement_load3_mr`: Load3 meter reading [kWh]
|
- `load3_mr`: Load3 meter reading [kWh]
|
||||||
- `measurement_load4_mr`: Load4 meter reading [kWh]
|
- `load4_mr`: Load4 meter reading [kWh]
|
||||||
|
|
||||||
For ease of use, you can assign descriptive names to the `measurement key`s to represent your
|
For ease of use, you can assign descriptive names to the `measurement key`s to represent your
|
||||||
system's load sources. Use the following `configuration options` to set these names
|
system's load sources. Use the following `configuration options` to set these names
|
||||||
(e.g., 'Dish Washer', 'Heat Pump'):
|
(e.g., 'Dish Washer', 'Heat Pump'):
|
||||||
|
|
||||||
- `measurement_load0_name`: Name of the load0 source
|
- `load0_name`: Name of the load0 source
|
||||||
- `measurement_load1_name`: Name of the load1 source
|
- `load1_name`: Name of the load1 source
|
||||||
- `measurement_load2_name`: Name of the load2 source
|
- `load2_name`: Name of the load2 source
|
||||||
- `measurement_load3_name`: Name of the load3 source
|
- `load3_name`: Name of the load3 source
|
||||||
- `measurement_load4_name`: Name of the load4 source
|
- `load4_name`: Name of the load4 source
|
||||||
|
|
||||||
Load measurements can be stored for any datetime. The values between different meter readings are
|
Load measurements can be stored for any datetime. The values between different meter readings are
|
||||||
linearly approximated. Since optimization occurs on the hour, storing values between hours is
|
linearly approximated. Since optimization occurs on the hour, storing values between hours is
|
||||||
@ -84,8 +84,8 @@ for specified intervals, usually one hour. This aggregated data can be used for
|
|||||||
The EOS measurement store also allows for the storage of meter readings for grid import and export.
|
The EOS measurement store also allows for the storage of meter readings for grid import and export.
|
||||||
The associated `measurement key`s are:
|
The associated `measurement key`s are:
|
||||||
|
|
||||||
- `measurement_grid_export_mr`: Export to grid meter reading [kWh]
|
- `grid_export_mr`: Export to grid meter reading [kWh]
|
||||||
- `measurement_grid_import_mr`: Import from grid meter reading [kWh]
|
- `grid_import_mr`: Import from grid meter reading [kWh]
|
||||||
|
|
||||||
:::{admonition} Todo
|
:::{admonition} Todo
|
||||||
:class: note
|
:class: note
|
||||||
|
@ -22,7 +22,7 @@ Most predictions can be sourced from various providers. The specific provider to
|
|||||||
in the EOS configuration. For example:
|
in the EOS configuration. For example:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
weather_provider = "ClearOutside"
|
provider = "ClearOutside"
|
||||||
```
|
```
|
||||||
|
|
||||||
Some providers offer multiple prediction keys. For instance, a weather provider might provide data
|
Some providers offer multiple prediction keys. For instance, a weather provider might provide data
|
||||||
@ -71,7 +71,7 @@ predictions are adjusted by real data from your system's measurements if given t
|
|||||||
|
|
||||||
For example, the load prediction provider `LoadAkkudoktor` takes generic load data assembled by
|
For example, the load prediction provider `LoadAkkudoktor` takes generic load data assembled by
|
||||||
Akkudoktor.net, maps that to the yearly energy consumption given in the configuration option
|
Akkudoktor.net, maps that to the yearly energy consumption given in the configuration option
|
||||||
`loadakkudoktor_year_energy`, and finally adjusts the predicted load by the `measurement_loads`
|
`loadakkudoktor_year_energy`, and finally adjusts the predicted load by the `loads`
|
||||||
of your system.
|
of your system.
|
||||||
|
|
||||||
## Prediction Updates
|
## Prediction Updates
|
||||||
@ -107,26 +107,26 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `elecprice_provider`: Electricity price provider id of provider to be used.
|
- `provider`: Electricity price provider id of provider to be used.
|
||||||
|
|
||||||
- `ElecPriceAkkudoktor`: Retrieves from Akkudoktor.net.
|
- `Akkudoktor`: Retrieves from Akkudoktor.net.
|
||||||
- `ElecPriceImport`: Imports from a file or JSON string.
|
- `Import`: Imports from a file or JSON string.
|
||||||
|
|
||||||
- `elecprice_charges_kwh`: Electricity price charges (€/kWh).
|
- `charges_kwh`: Electricity price charges (€/kWh).
|
||||||
- `elecpriceimport_file_path`: Path to the file to import electricity price forecast data from.
|
- `import_file_path`: Path to the file to import electricity price forecast data from.
|
||||||
- `elecpriceimport_json`: JSON string, dictionary of electricity price forecast value lists.
|
- `import_json`: JSON string, dictionary of electricity price forecast value lists.
|
||||||
|
|
||||||
### ElecPriceAkkudoktor Provider
|
### Akkudoktor Provider
|
||||||
|
|
||||||
The `ElecPriceAkkudoktor` provider retrieves electricity prices directly from **Akkudoktor.net**,
|
The `Akkudoktor` provider retrieves electricity prices directly from **Akkudoktor.net**,
|
||||||
which supplies price data for the next 24 hours. For periods beyond 24 hours, the provider generates
|
which supplies price data for the next 24 hours. For periods beyond 24 hours, the provider generates
|
||||||
prices by extrapolating historical price data combined with the most recent actual prices obtained
|
prices by extrapolating historical price data combined with the most recent actual prices obtained
|
||||||
from Akkudoktor.net. Electricity price charges given in the `elecprice_charges_kwh` configuration
|
from Akkudoktor.net. Electricity price charges given in the `charges_kwh` configuration
|
||||||
option are added.
|
option are added.
|
||||||
|
|
||||||
### ElecPriceImport Provider
|
### Import Provider
|
||||||
|
|
||||||
The `ElecPriceImport` provider is designed to import electricity prices from a file or a JSON
|
The `Import` provider is designed to import electricity prices from a file or a JSON
|
||||||
string. An external entity should update the file or JSON string whenever new prediction data
|
string. An external entity should update the file or JSON string whenever new prediction data
|
||||||
becomes available.
|
becomes available.
|
||||||
|
|
||||||
@ -136,7 +136,7 @@ The prediction key for the electricity price forecast data is:
|
|||||||
|
|
||||||
The electricity proce forecast data must be provided in one of the formats described in
|
The electricity proce forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source must be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`elecpriceimport_file_path` or `elecpriceimport_json` configuration option.
|
`import_file_path` or `import_json` configuration option.
|
||||||
|
|
||||||
## Load Prediction
|
## Load Prediction
|
||||||
|
|
||||||
@ -148,7 +148,7 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `load_provider`: Load provider id of provider to be used.
|
- `provider`: Load provider id of provider to be used.
|
||||||
|
|
||||||
- `LoadAkkudoktor`: Retrieves from local database.
|
- `LoadAkkudoktor`: Retrieves from local database.
|
||||||
- `LoadImport`: Imports from a file or JSON string.
|
- `LoadImport`: Imports from a file or JSON string.
|
||||||
@ -183,12 +183,12 @@ or `loadimport_json` configuration option.
|
|||||||
|
|
||||||
Prediction keys:
|
Prediction keys:
|
||||||
|
|
||||||
- `pvforecast_ac_power`: Total DC power (W).
|
- `ac_power`: Total DC power (W).
|
||||||
- `pvforecast_dc_power`: Total AC power (W).
|
- `dc_power`: Total AC power (W).
|
||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `pvforecast_provider`: PVForecast provider id of provider to be used.
|
- `provider`: PVForecast provider id of provider to be used.
|
||||||
|
|
||||||
- `PVForecastAkkudoktor`: Retrieves from Akkudoktor.net.
|
- `PVForecastAkkudoktor`: Retrieves from Akkudoktor.net.
|
||||||
- `PVForecastImport`: Imports from a file or JSON string.
|
- `PVForecastImport`: Imports from a file or JSON string.
|
||||||
@ -299,7 +299,7 @@ Example:
|
|||||||
{
|
{
|
||||||
"latitude": 50.1234,
|
"latitude": 50.1234,
|
||||||
"longitude": 9.7654,
|
"longitude": 9.7654,
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_peakpower": 5.0,
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"pvforecast0_surface_azimuth": -10,
|
||||||
"pvforecast0_surface_tilt": 7,
|
"pvforecast0_surface_tilt": 7,
|
||||||
@ -332,8 +332,8 @@ becomes available.
|
|||||||
|
|
||||||
The prediction keys for the PV forecast data are:
|
The prediction keys for the PV forecast data are:
|
||||||
|
|
||||||
- `pvforecast_ac_power`: Total DC power (W).
|
- `ac_power`: Total DC power (W).
|
||||||
- `pvforecast_dc_power`: Total AC power (W).
|
- `dc_power`: Total AC power (W).
|
||||||
|
|
||||||
The PV forecast data must be provided in one of the formats described in
|
The PV forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source must be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
@ -368,14 +368,14 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `weather_provider`: Load provider id of provider to be used.
|
- `provider`: Load provider id of provider to be used.
|
||||||
|
|
||||||
- `BrightSky`: Retrieves from https://api.brightsky.dev.
|
- `BrightSky`: Retrieves from https://api.brightsky.dev.
|
||||||
- `ClearOutside`: Retrieves from https://clearoutside.com/forecast.
|
- `ClearOutside`: Retrieves from https://clearoutside.com/forecast.
|
||||||
- `LoadImport`: Imports from a file or JSON string.
|
- `LoadImport`: Imports from a file or JSON string.
|
||||||
|
|
||||||
- `weatherimport_file_path`: Path to the file to import weatherforecast data from.
|
- `import_file_path`: Path to the file to import weatherforecast data from.
|
||||||
- `weatherimport_json`: JSON string, dictionary of weather forecast value lists.
|
- `import_json`: JSON string, dictionary of weather forecast value lists.
|
||||||
|
|
||||||
### BrightSky Provider
|
### BrightSky Provider
|
||||||
|
|
||||||
@ -459,4 +459,4 @@ The prediction keys for the PV forecast data are:
|
|||||||
|
|
||||||
The PV forecast data must be provided in one of the formats described in
|
The PV forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source must be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`weatherimport_file_path` or `pvforecastimport_json` configuration option.
|
`import_file_path` or `pvforecastimport_json` configuration option.
|
||||||
|
432
openapi.json
432
openapi.json
@ -244,7 +244,7 @@
|
|||||||
},
|
},
|
||||||
"ConfigEOS": {
|
"ConfigEOS": {
|
||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
"description": "Singleton configuration handler for the EOS application.\n\nConfigEOS extends `SettingsEOS` with support for default configuration paths and automatic\ninitialization.\n\n`ConfigEOS` ensures that only one instance of the class is created throughout the application,\nallowing consistent access to EOS configuration settings. This singleton instance loads\nconfiguration data from a predefined set of directories or creates a default configuration if\nnone is found.\n\nInitialization Process:\n - Upon instantiation, the singleton instance attempts to load a configuration file in this order:\n 1. The directory specified by the `EOS_CONFIG_DIR` environment variable\n 2. The directory specified by the `EOS_DIR` environment variable.\n 3. A platform specific default directory for EOS.\n 4. The current working directory.\n - The first available configuration file found in these directories is loaded.\n - If no configuration file is found, a default configuration file is created in the platform\n specific default directory, and default settings are loaded into it.\n\nAttributes from the loaded configuration are accessible directly as instance attributes of\n`ConfigEOS`, providing a centralized, shared configuration object for EOS.\n\nSingleton Behavior:\n - This class uses the `SingletonMixin` to ensure that all requests for `ConfigEOS` return\n the same instance, which contains the most up-to-date configuration. Modifying the configuration\n in one part of the application reflects across all references to this class.\n\nAttributes:\n config_folder_path (Optional[Path]): Path to the configuration directory.\n config_file_path (Optional[Path]): Path to the configuration file.\n\nRaises:\n FileNotFoundError: If no configuration file is found, and creating a default configuration fails.\n\nExample:\n To initialize and access configuration attributes (only one instance is created):\n ```python\n config_eos = ConfigEOS() # Always returns the same instance\n print(config_eos.prediction.prediction_hours) # Access a setting from the loaded configuration\n ```",
|
"description": "Singleton configuration handler for the EOS application.\n\nConfigEOS extends `SettingsEOS` with support for default configuration paths and automatic\ninitialization.\n\n`ConfigEOS` ensures that only one instance of the class is created throughout the application,\nallowing consistent access to EOS configuration settings. This singleton instance loads\nconfiguration data from a predefined set of directories or creates a default configuration if\nnone is found.\n\nInitialization Process:\n - Upon instantiation, the singleton instance attempts to load a configuration file in this order:\n 1. The directory specified by the `EOS_CONFIG_DIR` environment variable\n 2. The directory specified by the `EOS_DIR` environment variable.\n 3. A platform specific default directory for EOS.\n 4. The current working directory.\n - The first available configuration file found in these directories is loaded.\n - If no configuration file is found, a default configuration file is created in the platform\n specific default directory, and default settings are loaded into it.\n\nAttributes from the loaded configuration are accessible directly as instance attributes of\n`ConfigEOS`, providing a centralized, shared configuration object for EOS.\n\nSingleton Behavior:\n - This class uses the `SingletonMixin` to ensure that all requests for `ConfigEOS` return\n the same instance, which contains the most up-to-date configuration. Modifying the configuration\n in one part of the application reflects across all references to this class.\n\nAttributes:\n config_folder_path (Optional[Path]): Path to the configuration directory.\n config_file_path (Optional[Path]): Path to the configuration file.\n\nRaises:\n FileNotFoundError: If no configuration file is found, and creating a default configuration fails.\n\nExample:\n To initialize and access configuration attributes (only one instance is created):\n ```python\n config_eos = ConfigEOS() # Always returns the same instance\n print(config_eos.prediction.hours) # Access a setting from the loaded configuration\n ```",
|
||||||
"properties": {
|
"properties": {
|
||||||
"devices": {
|
"devices": {
|
||||||
"$ref": "#/components/schemas/DevicesCommonSettings",
|
"$ref": "#/components/schemas/DevicesCommonSettings",
|
||||||
@ -268,7 +268,7 @@
|
|||||||
"logging": {
|
"logging": {
|
||||||
"$ref": "#/components/schemas/LoggingCommonSettings-Output",
|
"$ref": "#/components/schemas/LoggingCommonSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
"logging_level_root": "INFO"
|
"root_level": "INFO"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"measurement": {
|
"measurement": {
|
||||||
@ -278,7 +278,7 @@
|
|||||||
"optimization": {
|
"optimization": {
|
||||||
"$ref": "#/components/schemas/OptimizationCommonSettings",
|
"$ref": "#/components/schemas/OptimizationCommonSettings",
|
||||||
"default": {
|
"default": {
|
||||||
"optimization_ev_available_charge_rates_percent": [
|
"ev_available_charge_rates_percent": [
|
||||||
0.0,
|
0.0,
|
||||||
0.375,
|
0.375,
|
||||||
0.5,
|
0.5,
|
||||||
@ -287,17 +287,17 @@
|
|||||||
0.875,
|
0.875,
|
||||||
1.0
|
1.0
|
||||||
],
|
],
|
||||||
"optimization_hours": 48,
|
"hours": 48,
|
||||||
"optimization_penalty": 10
|
"penalty": 10
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"$ref": "#/components/schemas/PredictionCommonSettings-Output",
|
"$ref": "#/components/schemas/PredictionCommonSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
|
"historic_hours": 48,
|
||||||
|
"hours": 48,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
"prediction_historic_hours": 48,
|
|
||||||
"prediction_hours": 48,
|
|
||||||
"timezone": "Europe/Berlin"
|
"timezone": "Europe/Berlin"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -345,12 +345,12 @@
|
|||||||
"server": {
|
"server": {
|
||||||
"$ref": "#/components/schemas/ServerCommonSettings",
|
"$ref": "#/components/schemas/ServerCommonSettings",
|
||||||
"default": {
|
"default": {
|
||||||
"server_eos_host": "0.0.0.0",
|
"eosdash_host": "0.0.0.0",
|
||||||
"server_eos_port": 8503,
|
"eosdash_port": 8504,
|
||||||
"server_eos_startup_eosdash": true,
|
"host": "0.0.0.0",
|
||||||
"server_eos_verbose": false,
|
"port": 8503,
|
||||||
"server_eosdash_host": "0.0.0.0",
|
"startup_eosdash": true,
|
||||||
"server_eosdash_port": 8504
|
"verbose": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"utils": {
|
"utils": {
|
||||||
@ -434,7 +434,7 @@
|
|||||||
"ElecPriceCommonSettings": {
|
"ElecPriceCommonSettings": {
|
||||||
"description": "Electricity Price Prediction Configuration.",
|
"description": "Electricity Price Prediction Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"elecprice_charges_kwh": {
|
"charges_kwh": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"minimum": 0.0,
|
"minimum": 0.0,
|
||||||
@ -448,9 +448,9 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
0.21
|
0.21
|
||||||
],
|
],
|
||||||
"title": "Elecprice Charges Kwh"
|
"title": "Charges Kwh"
|
||||||
},
|
},
|
||||||
"elecprice_provider": {
|
"provider": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -463,7 +463,7 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
"ElecPriceAkkudoktor"
|
"ElecPriceAkkudoktor"
|
||||||
],
|
],
|
||||||
"title": "Elecprice Provider"
|
"title": "Provider"
|
||||||
},
|
},
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
@ -486,7 +486,7 @@
|
|||||||
"ElecPriceImportCommonSettings": {
|
"ElecPriceImportCommonSettings": {
|
||||||
"description": "Common settings for elecprice data import from file or JSON String.",
|
"description": "Common settings for elecprice data import from file or JSON String.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"elecpriceimport_file_path": {
|
"import_file_path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -504,9 +504,9 @@
|
|||||||
null,
|
null,
|
||||||
"/path/to/prices.json"
|
"/path/to/prices.json"
|
||||||
],
|
],
|
||||||
"title": "Elecpriceimport File Path"
|
"title": "Import File Path"
|
||||||
},
|
},
|
||||||
"elecpriceimport_json": {
|
"import_json": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -519,7 +519,7 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
"{\"elecprice_marketprice_wh\": [0.0003384, 0.0003318, 0.0003284]}"
|
"{\"elecprice_marketprice_wh\": [0.0003384, 0.0003318, 0.0003284]}"
|
||||||
],
|
],
|
||||||
"title": "Elecpriceimport Json"
|
"title": "Import Json"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "ElecPriceImportCommonSettings",
|
"title": "ElecPriceImportCommonSettings",
|
||||||
@ -900,7 +900,7 @@
|
|||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
"description": "Inverter Device Simulation Configuration.",
|
"description": "Inverter Device Simulation Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"battery": {
|
"battery_id": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -914,7 +914,7 @@
|
|||||||
null,
|
null,
|
||||||
"battery1"
|
"battery1"
|
||||||
],
|
],
|
||||||
"title": "Battery"
|
"title": "Battery Id"
|
||||||
},
|
},
|
||||||
"device_id": {
|
"device_id": {
|
||||||
"description": "ID of inverter",
|
"description": "ID of inverter",
|
||||||
@ -981,7 +981,7 @@
|
|||||||
"LoadCommonSettings": {
|
"LoadCommonSettings": {
|
||||||
"description": "Load Prediction Configuration.",
|
"description": "Load Prediction Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"load_provider": {
|
"provider": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -994,7 +994,7 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
"LoadAkkudoktor"
|
"LoadAkkudoktor"
|
||||||
],
|
],
|
||||||
"title": "Load Provider"
|
"title": "Provider"
|
||||||
},
|
},
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
@ -1021,7 +1021,7 @@
|
|||||||
"LoadImportCommonSettings": {
|
"LoadImportCommonSettings": {
|
||||||
"description": "Common settings for load data import from file or JSON string.",
|
"description": "Common settings for load data import from file or JSON string.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"load_import_file_path": {
|
"import_file_path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1039,9 +1039,9 @@
|
|||||||
null,
|
null,
|
||||||
"/path/to/yearly_load.json"
|
"/path/to/yearly_load.json"
|
||||||
],
|
],
|
||||||
"title": "Load Import File Path"
|
"title": "Import File Path"
|
||||||
},
|
},
|
||||||
"load_import_json": {
|
"import_json": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1054,7 +1054,7 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
"{\"load0_mean\": [676.71, 876.19, 527.13]}"
|
"{\"load0_mean\": [676.71, 876.19, 527.13]}"
|
||||||
],
|
],
|
||||||
"title": "Load Import Json"
|
"title": "Import Json"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "LoadImportCommonSettings",
|
"title": "LoadImportCommonSettings",
|
||||||
@ -1063,7 +1063,7 @@
|
|||||||
"LoggingCommonSettings-Input": {
|
"LoggingCommonSettings-Input": {
|
||||||
"description": "Logging Configuration.",
|
"description": "Logging Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"logging_level_default": {
|
"level": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1080,7 +1080,7 @@
|
|||||||
"ERROR",
|
"ERROR",
|
||||||
"CRITICAL"
|
"CRITICAL"
|
||||||
],
|
],
|
||||||
"title": "Logging Level Default"
|
"title": "Level"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "LoggingCommonSettings",
|
"title": "LoggingCommonSettings",
|
||||||
@ -1089,7 +1089,7 @@
|
|||||||
"LoggingCommonSettings-Output": {
|
"LoggingCommonSettings-Output": {
|
||||||
"description": "Logging Configuration.",
|
"description": "Logging Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"logging_level_default": {
|
"level": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1106,17 +1106,17 @@
|
|||||||
"ERROR",
|
"ERROR",
|
||||||
"CRITICAL"
|
"CRITICAL"
|
||||||
],
|
],
|
||||||
"title": "Logging Level Default"
|
"title": "Level"
|
||||||
},
|
},
|
||||||
"logging_level_root": {
|
"root_level": {
|
||||||
"description": "Root logger logging level.",
|
"description": "Root logger logging level.",
|
||||||
"readOnly": true,
|
"readOnly": true,
|
||||||
"title": "Logging Level Root",
|
"title": "Root Level",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"logging_level_root"
|
"root_level"
|
||||||
],
|
],
|
||||||
"title": "LoggingCommonSettings",
|
"title": "LoggingCommonSettings",
|
||||||
"type": "object"
|
"type": "object"
|
||||||
@ -1124,7 +1124,7 @@
|
|||||||
"MeasurementCommonSettings": {
|
"MeasurementCommonSettings": {
|
||||||
"description": "Measurement Configuration.",
|
"description": "Measurement Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"measurement_load0_name": {
|
"load0_name": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1138,9 +1138,9 @@
|
|||||||
"Household",
|
"Household",
|
||||||
"Heat Pump"
|
"Heat Pump"
|
||||||
],
|
],
|
||||||
"title": "Measurement Load0 Name"
|
"title": "Load0 Name"
|
||||||
},
|
},
|
||||||
"measurement_load1_name": {
|
"load1_name": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1153,9 +1153,9 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
null
|
null
|
||||||
],
|
],
|
||||||
"title": "Measurement Load1 Name"
|
"title": "Load1 Name"
|
||||||
},
|
},
|
||||||
"measurement_load2_name": {
|
"load2_name": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1168,9 +1168,9 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
null
|
null
|
||||||
],
|
],
|
||||||
"title": "Measurement Load2 Name"
|
"title": "Load2 Name"
|
||||||
},
|
},
|
||||||
"measurement_load3_name": {
|
"load3_name": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1183,9 +1183,9 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
null
|
null
|
||||||
],
|
],
|
||||||
"title": "Measurement Load3 Name"
|
"title": "Load3 Name"
|
||||||
},
|
},
|
||||||
"measurement_load4_name": {
|
"load4_name": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -1198,16 +1198,16 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
null
|
null
|
||||||
],
|
],
|
||||||
"title": "Measurement Load4 Name"
|
"title": "Load4 Name"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "MeasurementCommonSettings",
|
"title": "MeasurementCommonSettings",
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
"OptimizationCommonSettings": {
|
"OptimizationCommonSettings": {
|
||||||
"description": "General Optimization Configuration.\n\nAttributes:\n optimization_hours (int): Number of hours for optimizations.",
|
"description": "General Optimization Configuration.\n\nAttributes:\n hours (int): Number of hours for optimizations.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"optimization_ev_available_charge_rates_percent": {
|
"ev_available_charge_rates_percent": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"items": {
|
"items": {
|
||||||
@ -1229,9 +1229,9 @@
|
|||||||
1.0
|
1.0
|
||||||
],
|
],
|
||||||
"description": "Charge rates available for the EV in percent of maximum charge.",
|
"description": "Charge rates available for the EV in percent of maximum charge.",
|
||||||
"title": "Optimization Ev Available Charge Rates Percent"
|
"title": "Ev Available Charge Rates Percent"
|
||||||
},
|
},
|
||||||
"optimization_hours": {
|
"hours": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"minimum": 0.0,
|
"minimum": 0.0,
|
||||||
@ -1243,9 +1243,9 @@
|
|||||||
],
|
],
|
||||||
"default": 48,
|
"default": 48,
|
||||||
"description": "Number of hours into the future for optimizations.",
|
"description": "Number of hours into the future for optimizations.",
|
||||||
"title": "Optimization Hours"
|
"title": "Hours"
|
||||||
},
|
},
|
||||||
"optimization_penalty": {
|
"penalty": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
@ -1256,7 +1256,7 @@
|
|||||||
],
|
],
|
||||||
"default": 10,
|
"default": 10,
|
||||||
"description": "Penalty factor used in optimization.",
|
"description": "Penalty factor used in optimization.",
|
||||||
"title": "Optimization Penalty"
|
"title": "Penalty"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "OptimizationCommonSettings",
|
"title": "OptimizationCommonSettings",
|
||||||
@ -1453,6 +1453,21 @@
|
|||||||
"PVForecastCommonSettings-Input": {
|
"PVForecastCommonSettings-Input": {
|
||||||
"description": "PV Forecast Configuration.",
|
"description": "PV Forecast Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"provider": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "PVForecast provider id of provider to be used.",
|
||||||
|
"examples": [
|
||||||
|
"PVForecastAkkudoktor"
|
||||||
|
],
|
||||||
|
"title": "Provider"
|
||||||
|
},
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@ -2949,8 +2964,15 @@
|
|||||||
null
|
null
|
||||||
],
|
],
|
||||||
"title": "Pvforecast5 Userhorizon"
|
"title": "Pvforecast5 Userhorizon"
|
||||||
},
|
}
|
||||||
"pvforecast_provider": {
|
},
|
||||||
|
"title": "PVForecastCommonSettings",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"PVForecastCommonSettings-Output": {
|
||||||
|
"description": "PV Forecast Configuration.",
|
||||||
|
"properties": {
|
||||||
|
"provider": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -2963,15 +2985,8 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
"PVForecastAkkudoktor"
|
"PVForecastAkkudoktor"
|
||||||
],
|
],
|
||||||
"title": "Pvforecast Provider"
|
"title": "Provider"
|
||||||
}
|
},
|
||||||
},
|
|
||||||
"title": "PVForecastCommonSettings",
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"PVForecastCommonSettings-Output": {
|
|
||||||
"description": "PV Forecast Configuration.",
|
|
||||||
"properties": {
|
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@ -4514,21 +4529,6 @@
|
|||||||
"description": "Compute a list of the user horizon per active planes.",
|
"description": "Compute a list of the user horizon per active planes.",
|
||||||
"readOnly": true,
|
"readOnly": true,
|
||||||
"title": "Pvforecast Planes Userhorizon"
|
"title": "Pvforecast Planes Userhorizon"
|
||||||
},
|
|
||||||
"pvforecast_provider": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"description": "PVForecast provider id of provider to be used.",
|
|
||||||
"examples": [
|
|
||||||
"PVForecastAkkudoktor"
|
|
||||||
],
|
|
||||||
"title": "Pvforecast Provider"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@ -4585,8 +4585,36 @@
|
|||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
"PredictionCommonSettings-Input": {
|
"PredictionCommonSettings-Input": {
|
||||||
"description": "General Prediction Configuration.\n\nThis class provides configuration for prediction settings, allowing users to specify\nparameters such as the forecast duration (in hours) and location (latitude and longitude).\nValidators ensure each parameter is within a specified range. A computed property, `timezone`,\ndetermines the time zone based on latitude and longitude.\n\nAttributes:\n prediction_hours (Optional[int]): Number of hours into the future for predictions.\n Must be non-negative.\n prediction_historic_hours (Optional[int]): Number of hours into the past for historical data.\n Must be non-negative.\n latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.\n longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.\n\nProperties:\n timezone (Optional[str]): Computed time zone string based on the specified latitude\n and longitude.\n\nValidators:\n validate_prediction_hours (int): Ensures `prediction_hours` is a non-negative integer.\n validate_prediction_historic_hours (int): Ensures `prediction_historic_hours` is a non-negative integer.\n validate_latitude (float): Ensures `latitude` is within the range -90 to 90.\n validate_longitude (float): Ensures `longitude` is within the range -180 to 180.",
|
"description": "General Prediction Configuration.\n\nThis class provides configuration for prediction settings, allowing users to specify\nparameters such as the forecast duration (in hours) and location (latitude and longitude).\nValidators ensure each parameter is within a specified range. A computed property, `timezone`,\ndetermines the time zone based on latitude and longitude.\n\nAttributes:\n hours (Optional[int]): Number of hours into the future for predictions.\n Must be non-negative.\n historic_hours (Optional[int]): Number of hours into the past for historical data.\n Must be non-negative.\n latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.\n longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.\n\nProperties:\n timezone (Optional[str]): Computed time zone string based on the specified latitude\n and longitude.\n\nValidators:\n validate_hours (int): Ensures `hours` is a non-negative integer.\n validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.\n validate_latitude (float): Ensures `latitude` is within the range -90 to 90.\n validate_longitude (float): Ensures `longitude` is within the range -180 to 180.",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"historic_hours": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"minimum": 0.0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": 48,
|
||||||
|
"description": "Number of hours into the past for historical predictions data",
|
||||||
|
"title": "Historic Hours"
|
||||||
|
},
|
||||||
|
"hours": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"minimum": 0.0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": 48,
|
||||||
|
"description": "Number of hours into the future for predictions",
|
||||||
|
"title": "Hours"
|
||||||
|
},
|
||||||
"latitude": {
|
"latitude": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@ -4616,42 +4644,42 @@
|
|||||||
"default": 13.405,
|
"default": 13.405,
|
||||||
"description": "Longitude in decimal degrees, within -180 to 180 (\u00b0)",
|
"description": "Longitude in decimal degrees, within -180 to 180 (\u00b0)",
|
||||||
"title": "Longitude"
|
"title": "Longitude"
|
||||||
},
|
|
||||||
"prediction_historic_hours": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"minimum": 0.0,
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": 48,
|
|
||||||
"description": "Number of hours into the past for historical predictions data",
|
|
||||||
"title": "Prediction Historic Hours"
|
|
||||||
},
|
|
||||||
"prediction_hours": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"minimum": 0.0,
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": 48,
|
|
||||||
"description": "Number of hours into the future for predictions",
|
|
||||||
"title": "Prediction Hours"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "PredictionCommonSettings",
|
"title": "PredictionCommonSettings",
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
"PredictionCommonSettings-Output": {
|
"PredictionCommonSettings-Output": {
|
||||||
"description": "General Prediction Configuration.\n\nThis class provides configuration for prediction settings, allowing users to specify\nparameters such as the forecast duration (in hours) and location (latitude and longitude).\nValidators ensure each parameter is within a specified range. A computed property, `timezone`,\ndetermines the time zone based on latitude and longitude.\n\nAttributes:\n prediction_hours (Optional[int]): Number of hours into the future for predictions.\n Must be non-negative.\n prediction_historic_hours (Optional[int]): Number of hours into the past for historical data.\n Must be non-negative.\n latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.\n longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.\n\nProperties:\n timezone (Optional[str]): Computed time zone string based on the specified latitude\n and longitude.\n\nValidators:\n validate_prediction_hours (int): Ensures `prediction_hours` is a non-negative integer.\n validate_prediction_historic_hours (int): Ensures `prediction_historic_hours` is a non-negative integer.\n validate_latitude (float): Ensures `latitude` is within the range -90 to 90.\n validate_longitude (float): Ensures `longitude` is within the range -180 to 180.",
|
"description": "General Prediction Configuration.\n\nThis class provides configuration for prediction settings, allowing users to specify\nparameters such as the forecast duration (in hours) and location (latitude and longitude).\nValidators ensure each parameter is within a specified range. A computed property, `timezone`,\ndetermines the time zone based on latitude and longitude.\n\nAttributes:\n hours (Optional[int]): Number of hours into the future for predictions.\n Must be non-negative.\n historic_hours (Optional[int]): Number of hours into the past for historical data.\n Must be non-negative.\n latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.\n longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.\n\nProperties:\n timezone (Optional[str]): Computed time zone string based on the specified latitude\n and longitude.\n\nValidators:\n validate_hours (int): Ensures `hours` is a non-negative integer.\n validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.\n validate_latitude (float): Ensures `latitude` is within the range -90 to 90.\n validate_longitude (float): Ensures `longitude` is within the range -180 to 180.",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"historic_hours": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"minimum": 0.0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": 48,
|
||||||
|
"description": "Number of hours into the past for historical predictions data",
|
||||||
|
"title": "Historic Hours"
|
||||||
|
},
|
||||||
|
"hours": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"minimum": 0.0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": 48,
|
||||||
|
"description": "Number of hours into the future for predictions",
|
||||||
|
"title": "Hours"
|
||||||
|
},
|
||||||
"latitude": {
|
"latitude": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@ -4682,34 +4710,6 @@
|
|||||||
"description": "Longitude in decimal degrees, within -180 to 180 (\u00b0)",
|
"description": "Longitude in decimal degrees, within -180 to 180 (\u00b0)",
|
||||||
"title": "Longitude"
|
"title": "Longitude"
|
||||||
},
|
},
|
||||||
"prediction_historic_hours": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"minimum": 0.0,
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": 48,
|
|
||||||
"description": "Number of hours into the past for historical predictions data",
|
|
||||||
"title": "Prediction Historic Hours"
|
|
||||||
},
|
|
||||||
"prediction_hours": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"minimum": 0.0,
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": 48,
|
|
||||||
"description": "Number of hours into the future for predictions",
|
|
||||||
"title": "Prediction Hours"
|
|
||||||
},
|
|
||||||
"timezone": {
|
"timezone": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@ -4838,60 +4838,7 @@
|
|||||||
"ServerCommonSettings": {
|
"ServerCommonSettings": {
|
||||||
"description": "Server Configuration.\n\nAttributes:\n To be added",
|
"description": "Server Configuration.\n\nAttributes:\n To be added",
|
||||||
"properties": {
|
"properties": {
|
||||||
"server_eos_host": {
|
"eosdash_host": {
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"format": "ipvanyaddress",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": "0.0.0.0",
|
|
||||||
"description": "EOS server IP address.",
|
|
||||||
"title": "Server Eos Host"
|
|
||||||
},
|
|
||||||
"server_eos_port": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": 8503,
|
|
||||||
"description": "EOS server IP port number.",
|
|
||||||
"title": "Server Eos Port"
|
|
||||||
},
|
|
||||||
"server_eos_startup_eosdash": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": true,
|
|
||||||
"description": "EOS server to start EOSdash server.",
|
|
||||||
"title": "Server Eos Startup Eosdash"
|
|
||||||
},
|
|
||||||
"server_eos_verbose": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": false,
|
|
||||||
"description": "Enable debug output",
|
|
||||||
"title": "Server Eos Verbose"
|
|
||||||
},
|
|
||||||
"server_eosdash_host": {
|
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"format": "ipvanyaddress",
|
"format": "ipvanyaddress",
|
||||||
@ -4903,9 +4850,9 @@
|
|||||||
],
|
],
|
||||||
"default": "0.0.0.0",
|
"default": "0.0.0.0",
|
||||||
"description": "EOSdash server IP address.",
|
"description": "EOSdash server IP address.",
|
||||||
"title": "Server Eosdash Host"
|
"title": "Eosdash Host"
|
||||||
},
|
},
|
||||||
"server_eosdash_port": {
|
"eosdash_port": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
@ -4916,7 +4863,60 @@
|
|||||||
],
|
],
|
||||||
"default": 8504,
|
"default": 8504,
|
||||||
"description": "EOSdash server IP port number.",
|
"description": "EOSdash server IP port number.",
|
||||||
"title": "Server Eosdash Port"
|
"title": "Eosdash Port"
|
||||||
|
},
|
||||||
|
"host": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"format": "ipvanyaddress",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": "0.0.0.0",
|
||||||
|
"description": "EOS server IP address.",
|
||||||
|
"title": "Host"
|
||||||
|
},
|
||||||
|
"port": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": 8503,
|
||||||
|
"description": "EOS server IP port number.",
|
||||||
|
"title": "Port"
|
||||||
|
},
|
||||||
|
"startup_eosdash": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": true,
|
||||||
|
"description": "EOS server to start EOSdash server.",
|
||||||
|
"title": "Startup Eosdash"
|
||||||
|
},
|
||||||
|
"verbose": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": false,
|
||||||
|
"description": "Enable debug output",
|
||||||
|
"title": "Verbose"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "ServerCommonSettings",
|
"title": "ServerCommonSettings",
|
||||||
@ -5390,6 +5390,21 @@
|
|||||||
"WeatherCommonSettings": {
|
"WeatherCommonSettings": {
|
||||||
"description": "Weather Forecast Configuration.",
|
"description": "Weather Forecast Configuration.",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"provider": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Weather provider id of provider to be used.",
|
||||||
|
"examples": [
|
||||||
|
"WeatherImport"
|
||||||
|
],
|
||||||
|
"title": "Provider"
|
||||||
|
},
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@ -5403,21 +5418,6 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
null
|
null
|
||||||
]
|
]
|
||||||
},
|
|
||||||
"weather_provider": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"description": "Weather provider id of provider to be used.",
|
|
||||||
"examples": [
|
|
||||||
"WeatherImport"
|
|
||||||
],
|
|
||||||
"title": "Weather Provider"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "WeatherCommonSettings",
|
"title": "WeatherCommonSettings",
|
||||||
@ -5426,7 +5426,7 @@
|
|||||||
"WeatherImportCommonSettings": {
|
"WeatherImportCommonSettings": {
|
||||||
"description": "Common settings for weather data import from file or JSON string.",
|
"description": "Common settings for weather data import from file or JSON string.",
|
||||||
"properties": {
|
"properties": {
|
||||||
"weatherimport_file_path": {
|
"import_file_path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -5444,9 +5444,9 @@
|
|||||||
null,
|
null,
|
||||||
"/path/to/weather_data.json"
|
"/path/to/weather_data.json"
|
||||||
],
|
],
|
||||||
"title": "Weatherimport File Path"
|
"title": "Import File Path"
|
||||||
},
|
},
|
||||||
"weatherimport_json": {
|
"import_json": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@ -5459,7 +5459,7 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
"{\"weather_temp_air\": [18.3, 17.8, 16.9]}"
|
"{\"weather_temp_air\": [18.3, 17.8, 16.9]}"
|
||||||
],
|
],
|
||||||
"title": "Weatherimport Json"
|
"title": "Import Json"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "WeatherImportCommonSettings",
|
"title": "WeatherImportCommonSettings",
|
||||||
@ -5519,7 +5519,7 @@
|
|||||||
},
|
},
|
||||||
"/gesamtlast_simple": {
|
"/gesamtlast_simple": {
|
||||||
"get": {
|
"get": {
|
||||||
"description": "Deprecated: Total Load Prediction.\n\nEndpoint to handle total load prediction.\n\nTotal load prediction starts at 00.00.00 today and is provided for 48 hours.\nIf no prediction values are available the missing ones at the start of the series are\nfilled with the first available prediction value.\n\nArgs:\n year_energy (float): Yearly energy consumption in Wh.\n\nNote:\n Set LoadAkkudoktor as load_provider, then update data with\n '/v1/prediction/update'\n and then request data with\n '/v1/prediction/list?key=load_mean' instead.",
|
"description": "Deprecated: Total Load Prediction.\n\nEndpoint to handle total load prediction.\n\nTotal load prediction starts at 00.00.00 today and is provided for 48 hours.\nIf no prediction values are available the missing ones at the start of the series are\nfilled with the first available prediction value.\n\nArgs:\n year_energy (float): Yearly energy consumption in Wh.\n\nNote:\n Set LoadAkkudoktor as provider, then update data with\n '/v1/prediction/update'\n and then request data with\n '/v1/prediction/list?key=load_mean' instead.",
|
||||||
"operationId": "fastapi_gesamtlast_simple_gesamtlast_simple_get",
|
"operationId": "fastapi_gesamtlast_simple_gesamtlast_simple_get",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
@ -5621,7 +5621,7 @@
|
|||||||
},
|
},
|
||||||
"/pvforecast": {
|
"/pvforecast": {
|
||||||
"get": {
|
"get": {
|
||||||
"description": "Deprecated: PV Forecast Prediction.\n\nEndpoint to handle PV forecast prediction.\n\nPVForecast starts at 00.00.00 today and is provided for 48 hours.\nIf no forecast values are available the missing ones at the start of the series are\nfilled with the first available forecast value.\n\nNote:\n Set PVForecastAkkudoktor as pvforecast_provider, then update data with\n '/v1/prediction/update'\n and then request data with\n '/v1/prediction/list?key=pvforecast_ac_power' and\n '/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.",
|
"description": "Deprecated: PV Forecast Prediction.\n\nEndpoint to handle PV forecast prediction.\n\nPVForecast starts at 00.00.00 today and is provided for 48 hours.\nIf no forecast values are available the missing ones at the start of the series are\nfilled with the first available forecast value.\n\nNote:\n Set PVForecastAkkudoktor as provider, then update data with\n '/v1/prediction/update'\n and then request data with\n '/v1/prediction/list?key=pvforecast_ac_power' and\n '/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.",
|
||||||
"operationId": "fastapi_pvforecast_pvforecast_get",
|
"operationId": "fastapi_pvforecast_pvforecast_get",
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
@ -5640,7 +5640,7 @@
|
|||||||
},
|
},
|
||||||
"/strompreis": {
|
"/strompreis": {
|
||||||
"get": {
|
"get": {
|
||||||
"description": "Deprecated: Electricity Market Price Prediction per Wh (\u20ac/Wh).\n\nElectricity prices start at 00.00.00 today and are provided for 48 hours.\nIf no prices are available the missing ones at the start of the series are\nfilled with the first available price.\n\nNote:\n Electricity price charges are added.\n\nNote:\n Set ElecPriceAkkudoktor as elecprice_provider, then update data with\n '/v1/prediction/update'\n and then request data with\n '/v1/prediction/list?key=elecprice_marketprice_wh' or\n '/v1/prediction/list?key=elecprice_marketprice_kwh' instead.",
|
"description": "Deprecated: Electricity Market Price Prediction per Wh (\u20ac/Wh).\n\nElectricity prices start at 00.00.00 today and are provided for 48 hours.\nIf no prices are available the missing ones at the start of the series are\nfilled with the first available price.\n\nNote:\n Electricity price charges are added.\n\nNote:\n Set ElecPriceAkkudoktor as provider, then update data with\n '/v1/prediction/update'\n and then request data with\n '/v1/prediction/list?key=elecprice_marketprice_wh' or\n '/v1/prediction/list?key=elecprice_marketprice_kwh' instead.",
|
||||||
"operationId": "fastapi_strompreis_strompreis_get",
|
"operationId": "fastapi_strompreis_strompreis_get",
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
|
@ -31,14 +31,14 @@ def prepare_optimization_real_parameters() -> OptimizationParameters:
|
|||||||
# Make a config
|
# Make a config
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
},
|
},
|
||||||
# PV Forecast
|
# PV Forecast
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_peakpower": 5.0,
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"pvforecast0_surface_azimuth": -10,
|
||||||
"pvforecast0_surface_tilt": 7,
|
"pvforecast0_surface_tilt": 7,
|
||||||
@ -63,15 +63,15 @@ def prepare_optimization_real_parameters() -> OptimizationParameters:
|
|||||||
},
|
},
|
||||||
# Weather Forecast
|
# Weather Forecast
|
||||||
"weather": {
|
"weather": {
|
||||||
"weather_provider": "ClearOutside",
|
"provider": "ClearOutside",
|
||||||
},
|
},
|
||||||
# Electricity Price Forecast
|
# Electricity Price Forecast
|
||||||
"elecprice": {
|
"elecprice": {
|
||||||
"elecprice_provider": "ElecPriceAkkudoktor",
|
"provider": "Akkudoktor",
|
||||||
},
|
},
|
||||||
# Load Forecast
|
# Load Forecast
|
||||||
"load": {
|
"load": {
|
||||||
"load_provider": "LoadAkkudoktor",
|
"provider": "LoadAkkudoktor",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"loadakkudoktor_year_energy": 5000, # Energy consumption per year in kWh
|
"loadakkudoktor_year_energy": 5000, # Energy consumption per year in kWh
|
||||||
},
|
},
|
||||||
@ -144,7 +144,7 @@ def prepare_optimization_real_parameters() -> OptimizationParameters:
|
|||||||
"initial_soc_percentage": 15,
|
"initial_soc_percentage": 15,
|
||||||
"min_soc_percentage": 15,
|
"min_soc_percentage": 15,
|
||||||
},
|
},
|
||||||
"inverter": {"device_id": "iv1", "max_power_wh": 10000, "battery": "battery1"},
|
"inverter": {"device_id": "iv1", "max_power_wh": 10000, "battery_id": "battery1"},
|
||||||
"eauto": {
|
"eauto": {
|
||||||
"device_id": "ev1",
|
"device_id": "ev1",
|
||||||
"min_soc_percentage": 50,
|
"min_soc_percentage": 50,
|
||||||
@ -341,7 +341,7 @@ def run_optimization(
|
|||||||
# Initialize the optimization problem using the default configuration
|
# Initialize the optimization problem using the default configuration
|
||||||
config_eos = get_config()
|
config_eos = get_config()
|
||||||
config_eos.merge_settings_from_dict(
|
config_eos.merge_settings_from_dict(
|
||||||
{"prediction": {"prediction_hours": 48}, "optimization": {"optimization_hours": 48}}
|
{"prediction": {"hours": 48}, "optimization": {"hours": 48}}
|
||||||
)
|
)
|
||||||
opt_class = optimization_problem(verbose=verbose, fixed_seed=seed)
|
opt_class = optimization_problem(verbose=verbose, fixed_seed=seed)
|
||||||
|
|
||||||
|
@ -17,13 +17,13 @@ def config_pvforecast() -> dict:
|
|||||||
"""Configure settings for PV forecast."""
|
"""Configure settings for PV forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
},
|
},
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_peakpower": 5.0,
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"pvforecast0_surface_azimuth": -10,
|
||||||
"pvforecast0_surface_tilt": 7,
|
"pvforecast0_surface_tilt": 7,
|
||||||
@ -54,8 +54,8 @@ def config_weather() -> dict:
|
|||||||
"""Configure settings for weather forecast."""
|
"""Configure settings for weather forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
},
|
},
|
||||||
@ -68,8 +68,8 @@ def config_elecprice() -> dict:
|
|||||||
"""Configure settings for electricity price forecast."""
|
"""Configure settings for electricity price forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
},
|
},
|
||||||
@ -82,8 +82,8 @@ def config_load() -> dict:
|
|||||||
"""Configure settings for load forecast."""
|
"""Configure settings for load forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
}
|
}
|
||||||
@ -108,17 +108,17 @@ def run_prediction(provider_id: str, verbose: bool = False) -> str:
|
|||||||
print(f"\nProvider ID: {provider_id}")
|
print(f"\nProvider ID: {provider_id}")
|
||||||
if provider_id in ("PVForecastAkkudoktor",):
|
if provider_id in ("PVForecastAkkudoktor",):
|
||||||
settings = config_pvforecast()
|
settings = config_pvforecast()
|
||||||
settings["pvforecast"]["pvforecast_provider"] = provider_id
|
settings["pvforecast"]["provider"] = provider_id
|
||||||
elif provider_id in ("BrightSky", "ClearOutside"):
|
elif provider_id in ("BrightSky", "ClearOutside"):
|
||||||
settings = config_weather()
|
settings = config_weather()
|
||||||
settings["weather"]["weather_provider"] = provider_id
|
settings["weather"]["provider"] = provider_id
|
||||||
elif provider_id in ("ElecPriceAkkudoktor",):
|
elif provider_id in ("Akkudoktor",):
|
||||||
settings = config_elecprice()
|
settings = config_elecprice()
|
||||||
settings["elecprice"]["elecprice_provider"] = provider_id
|
settings["elecprice"]["provider"] = provider_id
|
||||||
elif provider_id in ("LoadAkkudoktor",):
|
elif provider_id in ("LoadAkkudoktor",):
|
||||||
settings = config_elecprice()
|
settings = config_elecprice()
|
||||||
settings["load"]["loadakkudoktor_year_energy"] = 1000
|
settings["load"]["loadakkudoktor_year_energy"] = 1000
|
||||||
settings["load"]["load_provider"] = provider_id
|
settings["load"]["provider"] = provider_id
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unknown provider '{provider_id}'.")
|
raise ValueError(f"Unknown provider '{provider_id}'.")
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
@ -179,7 +179,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
To initialize and access configuration attributes (only one instance is created):
|
To initialize and access configuration attributes (only one instance is created):
|
||||||
```python
|
```python
|
||||||
config_eos = ConfigEOS() # Always returns the same instance
|
config_eos = ConfigEOS() # Always returns the same instance
|
||||||
print(config_eos.prediction.prediction_hours) # Access a setting from the loaded configuration
|
print(config_eos.prediction.hours) # Access a setting from the loaded configuration
|
||||||
```
|
```
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -328,7 +328,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
|
|
||||||
Example:
|
Example:
|
||||||
>>> config = get_config()
|
>>> config = get_config()
|
||||||
>>> new_data = {"prediction": {"prediction_hours": 24}, "server": {"server_eos_port": 8000}}
|
>>> new_data = {"prediction": {"hours": 24}, "server": {"port": 8000}}
|
||||||
>>> config.merge_settings_from_dict(new_data)
|
>>> config.merge_settings_from_dict(new_data)
|
||||||
"""
|
"""
|
||||||
self._setup(**merge_models(self, data))
|
self._setup(**merge_models(self, data))
|
||||||
|
@ -198,9 +198,9 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
|||||||
self.ev = ev
|
self.ev = ev
|
||||||
self.home_appliance = home_appliance
|
self.home_appliance = home_appliance
|
||||||
self.inverter = inverter
|
self.inverter = inverter
|
||||||
self.ac_charge_hours = np.full(self.config.prediction.prediction_hours, 0.0)
|
self.ac_charge_hours = np.full(self.config.prediction.hours, 0.0)
|
||||||
self.dc_charge_hours = np.full(self.config.prediction.prediction_hours, 1.0)
|
self.dc_charge_hours = np.full(self.config.prediction.hours, 1.0)
|
||||||
self.ev_charge_hours = np.full(self.config.prediction.prediction_hours, 0.0)
|
self.ev_charge_hours = np.full(self.config.prediction.hours, 0.0)
|
||||||
|
|
||||||
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
||||||
if self.battery is not None:
|
if self.battery is not None:
|
||||||
@ -251,7 +251,7 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
|||||||
error_msg = "Start datetime unknown."
|
error_msg = "Start datetime unknown."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
if self.config.prediction.prediction_hours is None:
|
if self.config.prediction.hours is None:
|
||||||
error_msg = "Prediction hours unknown."
|
error_msg = "Prediction hours unknown."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
|
@ -4,7 +4,6 @@ Kept in an extra module to avoid cyclic dependencies on package import.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, computed_field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
@ -16,21 +15,18 @@ from akkudoktoreos.core.logabc import logging_str_to_level
|
|||||||
class LoggingCommonSettings(SettingsBaseModel):
|
class LoggingCommonSettings(SettingsBaseModel):
|
||||||
"""Logging Configuration."""
|
"""Logging Configuration."""
|
||||||
|
|
||||||
logging_level_default: Optional[str] = Field(
|
level: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="EOS default logging level.",
|
description="EOS default logging level.",
|
||||||
examples=["INFO", "DEBUG", "WARNING", "ERROR", "CRITICAL"],
|
examples=["INFO", "DEBUG", "WARNING", "ERROR", "CRITICAL"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("logging_level_default", mode="after")
|
@field_validator("level", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
|
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
|
||||||
if isinstance(value, str) and value.upper() == "NONE":
|
if isinstance(value, str) and value.upper() == "NONE":
|
||||||
value = None
|
value = None
|
||||||
if value is None and (env_level := os.getenv("EOS_LOGGING_LEVEL")) is not None:
|
|
||||||
# Take default logging level from special environment variable
|
|
||||||
value = env_level
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
level = logging_str_to_level(value)
|
level = logging_str_to_level(value)
|
||||||
@ -40,7 +36,7 @@ class LoggingCommonSettings(SettingsBaseModel):
|
|||||||
# Computed fields
|
# Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def logging_level_root(self) -> str:
|
def root_level(self) -> str:
|
||||||
"""Root logger logging level."""
|
"""Root logger logging level."""
|
||||||
level = logging.getLogger().getEffectiveLevel()
|
level = logging.getLogger().getEffectiveLevel()
|
||||||
level_name = logging.getLevelName(level)
|
level_name = logging.getLevelName(level)
|
||||||
|
@ -51,16 +51,16 @@ class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
|||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def end_datetime(self) -> Optional[DateTime]:
|
def end_datetime(self) -> Optional[DateTime]:
|
||||||
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
"""Compute the end datetime based on the `start_datetime` and `hours`.
|
||||||
|
|
||||||
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||||
"""
|
"""
|
||||||
if self.ems.start_datetime and self.config.prediction.prediction_hours:
|
if self.ems.start_datetime and self.config.prediction.hours:
|
||||||
end_datetime = self.ems.start_datetime + to_duration(
|
end_datetime = self.ems.start_datetime + to_duration(
|
||||||
f"{self.config.prediction.prediction_hours} hours"
|
f"{self.config.prediction.hours} hours"
|
||||||
)
|
)
|
||||||
dst_change = end_datetime.offset_hours - self.ems.start_datetime.offset_hours
|
dst_change = end_datetime.offset_hours - self.ems.start_datetime.offset_hours
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
@ -18,9 +18,9 @@ class Heatpump:
|
|||||||
COP_COEFFICIENT = 0.1
|
COP_COEFFICIENT = 0.1
|
||||||
"""COP increase per degree"""
|
"""COP increase per degree"""
|
||||||
|
|
||||||
def __init__(self, max_heat_output: int, prediction_hours: int):
|
def __init__(self, max_heat_output: int, hours: int):
|
||||||
self.max_heat_output = max_heat_output
|
self.max_heat_output = max_heat_output
|
||||||
self.prediction_hours = prediction_hours
|
self.hours = hours
|
||||||
self.log = logging.getLogger(__name__)
|
self.log = logging.getLogger(__name__)
|
||||||
|
|
||||||
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
||||||
@ -117,9 +117,9 @@ class Heatpump:
|
|||||||
"""Simulate power data for 24 hours based on provided temperatures."""
|
"""Simulate power data for 24 hours based on provided temperatures."""
|
||||||
power_data: List[float] = []
|
power_data: List[float] = []
|
||||||
|
|
||||||
if len(temperatures) != self.prediction_hours:
|
if len(temperatures) != self.hours:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"The temperature array must contain exactly {self.prediction_hours} entries, "
|
f"The temperature array must contain exactly {self.hours} entries, "
|
||||||
"one for each hour of the day."
|
"one for each hour of the day."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ class InverterParameters(DeviceParameters):
|
|||||||
|
|
||||||
device_id: str = Field(description="ID of inverter", examples=["inverter1"])
|
device_id: str = Field(description="ID of inverter", examples=["inverter1"])
|
||||||
max_power_wh: float = Field(gt=0, examples=[10000])
|
max_power_wh: float = Field(gt=0, examples=[10000])
|
||||||
battery: Optional[str] = Field(
|
battery_id: Optional[str] = Field(
|
||||||
default=None, description="ID of battery", examples=[None, "battery1"]
|
default=None, description="ID of battery", examples=[None, "battery1"]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ class Inverter(DeviceBase):
|
|||||||
|
|
||||||
def _setup(self) -> None:
|
def _setup(self) -> None:
|
||||||
assert self.parameters is not None
|
assert self.parameters is not None
|
||||||
if self.parameters.battery is None:
|
if self.parameters.battery_id is None:
|
||||||
# For the moment raise exception
|
# For the moment raise exception
|
||||||
# TODO: Make battery configurable by config
|
# TODO: Make battery configurable by config
|
||||||
error_msg = "Battery for PV inverter is mandatory."
|
error_msg = "Battery for PV inverter is mandatory."
|
||||||
@ -42,7 +42,7 @@ class Inverter(DeviceBase):
|
|||||||
|
|
||||||
def _post_setup(self) -> None:
|
def _post_setup(self) -> None:
|
||||||
assert self.parameters is not None
|
assert self.parameters is not None
|
||||||
self.battery = self.devices.get_device_by_id(self.parameters.battery)
|
self.battery = self.devices.get_device_by_id(self.parameters.battery_id)
|
||||||
|
|
||||||
def process_energy(
|
def process_energy(
|
||||||
self, generation: float, consumption: float, hour: int
|
self, generation: float, consumption: float, hour: int
|
||||||
|
@ -25,19 +25,19 @@ logger = get_logger(__name__)
|
|||||||
class MeasurementCommonSettings(SettingsBaseModel):
|
class MeasurementCommonSettings(SettingsBaseModel):
|
||||||
"""Measurement Configuration."""
|
"""Measurement Configuration."""
|
||||||
|
|
||||||
measurement_load0_name: Optional[str] = Field(
|
load0_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load0 source", examples=["Household", "Heat Pump"]
|
default=None, description="Name of the load0 source", examples=["Household", "Heat Pump"]
|
||||||
)
|
)
|
||||||
measurement_load1_name: Optional[str] = Field(
|
load1_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load1 source", examples=[None]
|
default=None, description="Name of the load1 source", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load2_name: Optional[str] = Field(
|
load2_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load2 source", examples=[None]
|
default=None, description="Name of the load2 source", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load3_name: Optional[str] = Field(
|
load3_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load3 source", examples=[None]
|
default=None, description="Name of the load3 source", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load4_name: Optional[str] = Field(
|
load4_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load4 source", examples=[None]
|
default=None, description="Name of the load4 source", examples=[None]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -50,42 +50,42 @@ class MeasurementDataRecord(DataRecord):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Single loads, to be aggregated to total load
|
# Single loads, to be aggregated to total load
|
||||||
measurement_load0_mr: Optional[float] = Field(
|
load0_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load0 meter reading [kWh]", examples=[40421]
|
default=None, ge=0, description="Load0 meter reading [kWh]", examples=[40421]
|
||||||
)
|
)
|
||||||
measurement_load1_mr: Optional[float] = Field(
|
load1_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load1 meter reading [kWh]", examples=[None]
|
default=None, ge=0, description="Load1 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load2_mr: Optional[float] = Field(
|
load2_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load2 meter reading [kWh]", examples=[None]
|
default=None, ge=0, description="Load2 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load3_mr: Optional[float] = Field(
|
load3_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load3 meter reading [kWh]", examples=[None]
|
default=None, ge=0, description="Load3 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load4_mr: Optional[float] = Field(
|
load4_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load4 meter reading [kWh]", examples=[None]
|
default=None, ge=0, description="Load4 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
|
|
||||||
measurement_max_loads: ClassVar[int] = 5 # Maximum number of loads that can be set
|
max_loads: ClassVar[int] = 5 # Maximum number of loads that can be set
|
||||||
|
|
||||||
measurement_grid_export_mr: Optional[float] = Field(
|
grid_export_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Export to grid meter reading [kWh]", examples=[1000]
|
default=None, ge=0, description="Export to grid meter reading [kWh]", examples=[1000]
|
||||||
)
|
)
|
||||||
|
|
||||||
measurement_grid_import_mr: Optional[float] = Field(
|
grid_import_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Import from grid meter reading [kWh]", examples=[1000]
|
default=None, ge=0, description="Import from grid meter reading [kWh]", examples=[1000]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Computed fields
|
# Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def measurement_loads(self) -> List[str]:
|
def loads(self) -> List[str]:
|
||||||
"""Compute a list of active loads."""
|
"""Compute a list of active loads."""
|
||||||
active_loads = []
|
active_loads = []
|
||||||
|
|
||||||
# Loop through measurement_loadx
|
# Loop through loadx
|
||||||
for i in range(self.measurement_max_loads):
|
for i in range(self.max_loads):
|
||||||
load_attr = f"measurement_load{i}_mr"
|
load_attr = f"load{i}_mr"
|
||||||
|
|
||||||
# Check if either attribute is set and add to active loads
|
# Check if either attribute is set and add to active loads
|
||||||
if getattr(self, load_attr, None):
|
if getattr(self, load_attr, None):
|
||||||
@ -105,7 +105,7 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
)
|
)
|
||||||
|
|
||||||
topics: ClassVar[List[str]] = [
|
topics: ClassVar[List[str]] = [
|
||||||
"measurement_load",
|
"load",
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
@ -147,14 +147,16 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
"""Provides measurement key for given name and topic."""
|
"""Provides measurement key for given name and topic."""
|
||||||
topic = topic.lower()
|
topic = topic.lower()
|
||||||
|
|
||||||
|
print(self.topics)
|
||||||
if topic not in self.topics:
|
if topic not in self.topics:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
topic_keys = [
|
topic_keys = [
|
||||||
key for key in self.config.measurement.model_fields.keys() if key.startswith(topic)
|
key for key in self.config.measurement.model_fields.keys() if key.startswith(topic)
|
||||||
]
|
]
|
||||||
|
print(topic_keys)
|
||||||
key = None
|
key = None
|
||||||
if topic == "measurement_load":
|
if topic == "load":
|
||||||
for config_key in topic_keys:
|
for config_key in topic_keys:
|
||||||
if (
|
if (
|
||||||
config_key.endswith("_name")
|
config_key.endswith("_name")
|
||||||
@ -255,9 +257,9 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
end_datetime = self[-1].date_time
|
end_datetime = self[-1].date_time
|
||||||
size = self._interval_count(start_datetime, end_datetime, interval)
|
size = self._interval_count(start_datetime, end_datetime, interval)
|
||||||
load_total_array = np.zeros(size)
|
load_total_array = np.zeros(size)
|
||||||
# Loop through measurement_load<x>_mr
|
# Loop through load<x>_mr
|
||||||
for i in range(self.record_class().measurement_max_loads):
|
for i in range(self.record_class().max_loads):
|
||||||
key = f"measurement_load{i}_mr"
|
key = f"load{i}_mr"
|
||||||
# Calculate load per interval
|
# Calculate load per interval
|
||||||
load_array = self._energy_from_meter_readings(
|
load_array = self._energy_from_meter_readings(
|
||||||
key=key, start_datetime=start_datetime, end_datetime=end_datetime, interval=interval
|
key=key, start_datetime=start_datetime, end_datetime=end_datetime, interval=interval
|
||||||
|
@ -110,12 +110,8 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
):
|
):
|
||||||
"""Initialize the optimization problem with the required parameters."""
|
"""Initialize the optimization problem with the required parameters."""
|
||||||
self.opti_param: dict[str, Any] = {}
|
self.opti_param: dict[str, Any] = {}
|
||||||
self.fixed_eauto_hours = (
|
self.fixed_eauto_hours = self.config.prediction.hours - self.config.optimization.hours
|
||||||
self.config.prediction.prediction_hours - self.config.optimization.optimization_hours
|
self.possible_charge_values = self.config.optimization.ev_available_charge_rates_percent
|
||||||
)
|
|
||||||
self.possible_charge_values = (
|
|
||||||
self.config.optimization.optimization_ev_available_charge_rates_percent
|
|
||||||
)
|
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.fix_seed = fixed_seed
|
self.fix_seed = fixed_seed
|
||||||
self.optimize_ev = True
|
self.optimize_ev = True
|
||||||
@ -182,27 +178,25 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
total_states = 3 * len_ac
|
total_states = 3 * len_ac
|
||||||
|
|
||||||
# 1. Mutating the charge_discharge part
|
# 1. Mutating the charge_discharge part
|
||||||
charge_discharge_part = individual[: self.config.prediction.prediction_hours]
|
charge_discharge_part = individual[: self.config.prediction.hours]
|
||||||
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
||||||
|
|
||||||
# Instead of a fixed clamping to 0..8 or 0..6 dynamically:
|
# Instead of a fixed clamping to 0..8 or 0..6 dynamically:
|
||||||
charge_discharge_mutated = np.clip(charge_discharge_mutated, 0, total_states - 1)
|
charge_discharge_mutated = np.clip(charge_discharge_mutated, 0, total_states - 1)
|
||||||
individual[: self.config.prediction.prediction_hours] = charge_discharge_mutated
|
individual[: self.config.prediction.hours] = charge_discharge_mutated
|
||||||
|
|
||||||
# 2. Mutating the EV charge part, if active
|
# 2. Mutating the EV charge part, if active
|
||||||
if self.optimize_ev:
|
if self.optimize_ev:
|
||||||
ev_charge_part = individual[
|
ev_charge_part = individual[
|
||||||
self.config.prediction.prediction_hours : self.config.prediction.prediction_hours
|
self.config.prediction.hours : self.config.prediction.hours * 2
|
||||||
* 2
|
|
||||||
]
|
]
|
||||||
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
||||||
ev_charge_part_mutated[
|
ev_charge_part_mutated[self.config.prediction.hours - self.fixed_eauto_hours :] = [
|
||||||
self.config.prediction.prediction_hours - self.fixed_eauto_hours :
|
0
|
||||||
] = [0] * self.fixed_eauto_hours
|
] * self.fixed_eauto_hours
|
||||||
individual[
|
individual[self.config.prediction.hours : self.config.prediction.hours * 2] = (
|
||||||
self.config.prediction.prediction_hours : self.config.prediction.prediction_hours
|
ev_charge_part_mutated
|
||||||
* 2
|
)
|
||||||
] = ev_charge_part_mutated
|
|
||||||
|
|
||||||
# 3. Mutating the appliance start time, if applicable
|
# 3. Mutating the appliance start time, if applicable
|
||||||
if self.opti_param["home_appliance"] > 0:
|
if self.opti_param["home_appliance"] > 0:
|
||||||
@ -216,15 +210,13 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
def create_individual(self) -> list[int]:
|
def create_individual(self) -> list[int]:
|
||||||
# Start with discharge states for the individual
|
# Start with discharge states for the individual
|
||||||
individual_components = [
|
individual_components = [
|
||||||
self.toolbox.attr_discharge_state()
|
self.toolbox.attr_discharge_state() for _ in range(self.config.prediction.hours)
|
||||||
for _ in range(self.config.prediction.prediction_hours)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add EV charge index values if optimize_ev is True
|
# Add EV charge index values if optimize_ev is True
|
||||||
if self.optimize_ev:
|
if self.optimize_ev:
|
||||||
individual_components += [
|
individual_components += [
|
||||||
self.toolbox.attr_ev_charge_index()
|
self.toolbox.attr_ev_charge_index() for _ in range(self.config.prediction.hours)
|
||||||
for _ in range(self.config.prediction.prediction_hours)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add the start time of the household appliance if it's being optimized
|
# Add the start time of the household appliance if it's being optimized
|
||||||
@ -257,7 +249,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
individual.extend(eautocharge_hours_index.tolist())
|
individual.extend(eautocharge_hours_index.tolist())
|
||||||
elif self.optimize_ev:
|
elif self.optimize_ev:
|
||||||
# Falls optimize_ev aktiv ist, aber keine EV-Daten vorhanden sind, fügen wir Nullen hinzu
|
# Falls optimize_ev aktiv ist, aber keine EV-Daten vorhanden sind, fügen wir Nullen hinzu
|
||||||
individual.extend([0] * self.config.prediction.prediction_hours)
|
individual.extend([0] * self.config.prediction.hours)
|
||||||
|
|
||||||
# Add dishwasher start time if applicable
|
# Add dishwasher start time if applicable
|
||||||
if self.opti_param.get("home_appliance", 0) > 0 and washingstart_int is not None:
|
if self.opti_param.get("home_appliance", 0) > 0 and washingstart_int is not None:
|
||||||
@ -279,17 +271,12 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
3. Dishwasher start time (integer if applicable).
|
3. Dishwasher start time (integer if applicable).
|
||||||
"""
|
"""
|
||||||
# Discharge hours as a NumPy array of ints
|
# Discharge hours as a NumPy array of ints
|
||||||
discharge_hours_bin = np.array(
|
discharge_hours_bin = np.array(individual[: self.config.prediction.hours], dtype=int)
|
||||||
individual[: self.config.prediction.prediction_hours], dtype=int
|
|
||||||
)
|
|
||||||
|
|
||||||
# EV charge hours as a NumPy array of ints (if optimize_ev is True)
|
# EV charge hours as a NumPy array of ints (if optimize_ev is True)
|
||||||
eautocharge_hours_index = (
|
eautocharge_hours_index = (
|
||||||
np.array(
|
np.array(
|
||||||
individual[
|
individual[self.config.prediction.hours : self.config.prediction.hours * 2],
|
||||||
self.config.prediction.prediction_hours : self.config.prediction.prediction_hours
|
|
||||||
* 2
|
|
||||||
],
|
|
||||||
dtype=int,
|
dtype=int,
|
||||||
)
|
)
|
||||||
if self.optimize_ev
|
if self.optimize_ev
|
||||||
@ -401,7 +388,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
)
|
)
|
||||||
self.ems.set_ev_charge_hours(eautocharge_hours_float)
|
self.ems.set_ev_charge_hours(eautocharge_hours_float)
|
||||||
else:
|
else:
|
||||||
self.ems.set_ev_charge_hours(np.full(self.config.prediction.prediction_hours, 0))
|
self.ems.set_ev_charge_hours(np.full(self.config.prediction.hours, 0))
|
||||||
|
|
||||||
return self.ems.simulate(self.ems.start_datetime.hour)
|
return self.ems.simulate(self.ems.start_datetime.hour)
|
||||||
|
|
||||||
@ -463,7 +450,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
# min_length = min(battery_soc_per_hour.size, discharge_hours_bin.size)
|
# min_length = min(battery_soc_per_hour.size, discharge_hours_bin.size)
|
||||||
# battery_soc_per_hour_tail = battery_soc_per_hour[-min_length:]
|
# battery_soc_per_hour_tail = battery_soc_per_hour[-min_length:]
|
||||||
# discharge_hours_bin_tail = discharge_hours_bin[-min_length:]
|
# discharge_hours_bin_tail = discharge_hours_bin[-min_length:]
|
||||||
# len_ac = len(self.config.optimization.optimization_ev_available_charge_rates_percent)
|
# len_ac = len(self.config.optimization.ev_available_charge_rates_percent)
|
||||||
|
|
||||||
# # # Find hours where battery SoC is 0
|
# # # Find hours where battery SoC is 0
|
||||||
# # zero_soc_mask = battery_soc_per_hour_tail == 0
|
# # zero_soc_mask = battery_soc_per_hour_tail == 0
|
||||||
@ -512,7 +499,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
if parameters.eauto and self.ems.ev
|
if parameters.eauto and self.ems.ev
|
||||||
else 0
|
else 0
|
||||||
)
|
)
|
||||||
* self.config.optimization.optimization_penalty,
|
* self.config.optimization.penalty,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (gesamtbilanz,)
|
return (gesamtbilanz,)
|
||||||
@ -580,7 +567,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
start_hour = self.ems.start_datetime.hour
|
start_hour = self.ems.start_datetime.hour
|
||||||
|
|
||||||
einspeiseverguetung_euro_pro_wh = np.full(
|
einspeiseverguetung_euro_pro_wh = np.full(
|
||||||
self.config.prediction.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
self.config.prediction.hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: Refactor device setup phase out
|
# TODO: Refactor device setup phase out
|
||||||
@ -591,7 +578,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
if parameters.pv_akku:
|
if parameters.pv_akku:
|
||||||
akku = Battery(parameters.pv_akku)
|
akku = Battery(parameters.pv_akku)
|
||||||
self.devices.add_device(akku)
|
self.devices.add_device(akku)
|
||||||
akku.set_charge_per_hour(np.full(self.config.prediction.prediction_hours, 1))
|
akku.set_charge_per_hour(np.full(self.config.prediction.hours, 1))
|
||||||
|
|
||||||
eauto: Optional[Battery] = None
|
eauto: Optional[Battery] = None
|
||||||
if parameters.eauto:
|
if parameters.eauto:
|
||||||
@ -599,7 +586,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
parameters.eauto,
|
parameters.eauto,
|
||||||
)
|
)
|
||||||
self.devices.add_device(eauto)
|
self.devices.add_device(eauto)
|
||||||
eauto.set_charge_per_hour(np.full(self.config.prediction.prediction_hours, 1))
|
eauto.set_charge_per_hour(np.full(self.config.prediction.hours, 1))
|
||||||
self.optimize_ev = (
|
self.optimize_ev = (
|
||||||
parameters.eauto.min_soc_percentage - parameters.eauto.initial_soc_percentage >= 0
|
parameters.eauto.min_soc_percentage - parameters.eauto.initial_soc_percentage >= 0
|
||||||
)
|
)
|
||||||
|
@ -12,18 +12,16 @@ class OptimizationCommonSettings(SettingsBaseModel):
|
|||||||
"""General Optimization Configuration.
|
"""General Optimization Configuration.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
optimization_hours (int): Number of hours for optimizations.
|
hours (int): Number of hours for optimizations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
optimization_hours: Optional[int] = Field(
|
hours: Optional[int] = Field(
|
||||||
default=48, ge=0, description="Number of hours into the future for optimizations."
|
default=48, ge=0, description="Number of hours into the future for optimizations."
|
||||||
)
|
)
|
||||||
|
|
||||||
optimization_penalty: Optional[int] = Field(
|
penalty: Optional[int] = Field(default=10, description="Penalty factor used in optimization.")
|
||||||
default=10, description="Penalty factor used in optimization."
|
|
||||||
)
|
|
||||||
|
|
||||||
optimization_ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
||||||
default=[
|
default=[
|
||||||
0.0,
|
0.0,
|
||||||
6.0 / 16.0,
|
6.0 / 16.0,
|
||||||
|
@ -9,12 +9,12 @@ from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettin
|
|||||||
class ElecPriceCommonSettings(SettingsBaseModel):
|
class ElecPriceCommonSettings(SettingsBaseModel):
|
||||||
"""Electricity Price Prediction Configuration."""
|
"""Electricity Price Prediction Configuration."""
|
||||||
|
|
||||||
elecprice_provider: Optional[str] = Field(
|
provider: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Electricity price provider id of provider to be used.",
|
description="Electricity price provider id of provider to be used.",
|
||||||
examples=["ElecPriceAkkudoktor"],
|
examples=["ElecPriceAkkudoktor"],
|
||||||
)
|
)
|
||||||
elecprice_charges_kwh: Optional[float] = Field(
|
charges_kwh: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Electricity price charges (€/kWh).", examples=[0.21]
|
default=None, ge=0, description="Electricity price charges (€/kWh).", examples=[0.21]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -49,15 +49,15 @@ class ElecPriceProvider(PredictionProvider):
|
|||||||
electricity price_provider (str): Prediction provider for electricity price.
|
electricity price_provider (str): Prediction provider for electricity price.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@ -71,4 +71,4 @@ class ElecPriceProvider(PredictionProvider):
|
|||||||
return "ElecPriceProvider"
|
return "ElecPriceProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.elecprice.elecprice_provider
|
return self.provider_id() == self.config.elecprice.provider
|
||||||
|
@ -54,11 +54,11 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
hours (int, optional): Number of hours in the future for the forecast.
|
||||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
@ -125,18 +125,16 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
||||||
return capped_data
|
return capped_data
|
||||||
|
|
||||||
def _predict_ets(
|
def _predict_ets(self, history: np.ndarray, seasonal_periods: int, hours: int) -> np.ndarray:
|
||||||
self, history: np.ndarray, seasonal_periods: int, prediction_hours: int
|
|
||||||
) -> np.ndarray:
|
|
||||||
clean_history = self._cap_outliers(history)
|
clean_history = self._cap_outliers(history)
|
||||||
model = ExponentialSmoothing(
|
model = ExponentialSmoothing(
|
||||||
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
||||||
).fit()
|
).fit()
|
||||||
return model.forecast(prediction_hours)
|
return model.forecast(hours)
|
||||||
|
|
||||||
def _predict_median(self, history: np.ndarray, prediction_hours: int) -> np.ndarray:
|
def _predict_median(self, history: np.ndarray, hours: int) -> np.ndarray:
|
||||||
clean_history = self._cap_outliers(history)
|
clean_history = self._cap_outliers(history)
|
||||||
return np.full(prediction_hours, np.median(clean_history))
|
return np.full(hours, np.median(clean_history))
|
||||||
|
|
||||||
def _update_data(
|
def _update_data(
|
||||||
self, force_update: Optional[bool] = False
|
self, force_update: Optional[bool] = False
|
||||||
@ -155,8 +153,8 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
# Assumption that all lists are the same length and are ordered chronologically
|
# Assumption that all lists are the same length and are ordered chronologically
|
||||||
# in ascending order and have the same timestamps.
|
# in ascending order and have the same timestamps.
|
||||||
|
|
||||||
# Get elecprice_charges_kwh in wh
|
# Get charges_kwh in wh
|
||||||
charges_wh = (self.config.elecprice.elecprice_charges_kwh or 0) / 1000
|
charges_wh = (self.config.elecprice.charges_kwh or 0) / 1000
|
||||||
|
|
||||||
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
||||||
series_data = pd.Series(dtype=float) # Initialize an empty series
|
series_data = pd.Series(dtype=float) # Initialize an empty series
|
||||||
@ -183,27 +181,23 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
assert highest_orig_datetime # mypy fix
|
assert highest_orig_datetime # mypy fix
|
||||||
|
|
||||||
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
||||||
needed_prediction_hours = int(
|
needed_hours = int(
|
||||||
self.config.prediction.prediction_hours
|
self.config.prediction.hours
|
||||||
- ((highest_orig_datetime - self.start_datetime).total_seconds() // 3600)
|
- ((highest_orig_datetime - self.start_datetime).total_seconds() // 3600)
|
||||||
)
|
)
|
||||||
|
|
||||||
if needed_prediction_hours <= 0:
|
if needed_hours <= 0:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"No prediction needed. needed_prediction_hours={needed_prediction_hours}, prediction_hours={self.config.prediction.prediction_hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.start_datetime}"
|
f"No prediction needed. needed_hours={needed_hours}, hours={self.config.prediction.hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.start_datetime}"
|
||||||
) # this might keep data longer than self.start_datetime + self.config.prediction.prediction_hours in the records
|
) # this might keep data longer than self.start_datetime + self.config.prediction.hours in the records
|
||||||
return
|
return
|
||||||
|
|
||||||
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
||||||
prediction = self._predict_ets(
|
prediction = self._predict_ets(history, seasonal_periods=168, hours=needed_hours)
|
||||||
history, seasonal_periods=168, prediction_hours=needed_prediction_hours
|
|
||||||
)
|
|
||||||
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
||||||
prediction = self._predict_ets(
|
prediction = self._predict_ets(history, seasonal_periods=24, hours=needed_hours)
|
||||||
history, seasonal_periods=24, prediction_hours=needed_prediction_hours
|
|
||||||
)
|
|
||||||
elif amount_datasets > 0: # not enough data for ets, do median
|
elif amount_datasets > 0: # not enough data for ets, do median
|
||||||
prediction = self._predict_median(history, prediction_hours=needed_prediction_hours)
|
prediction = self._predict_median(history, hours=needed_hours)
|
||||||
else:
|
else:
|
||||||
logger.error("No data available for prediction")
|
logger.error("No data available for prediction")
|
||||||
raise ValueError("No data available")
|
raise ValueError("No data available")
|
||||||
|
@ -22,24 +22,22 @@ logger = get_logger(__name__)
|
|||||||
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for elecprice data import from file or JSON String."""
|
"""Common settings for elecprice data import from file or JSON String."""
|
||||||
|
|
||||||
elecpriceimport_file_path: Optional[Union[str, Path]] = Field(
|
import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Path to the file to import elecprice data from.",
|
description="Path to the file to import elecprice data from.",
|
||||||
examples=[None, "/path/to/prices.json"],
|
examples=[None, "/path/to/prices.json"],
|
||||||
)
|
)
|
||||||
|
|
||||||
elecpriceimport_json: Optional[str] = Field(
|
import_json: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="JSON string, dictionary of electricity price forecast value lists.",
|
description="JSON string, dictionary of electricity price forecast value lists.",
|
||||||
examples=['{"elecprice_marketprice_wh": [0.0003384, 0.0003318, 0.0003284]}'],
|
examples=['{"elecprice_marketprice_wh": [0.0003384, 0.0003318, 0.0003284]}'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("elecpriceimport_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_elecpriceimport_file_path(
|
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
cls, value: Optional[Union[str, Path]]
|
|
||||||
) -> Optional[Path]:
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
@ -65,12 +63,12 @@ class ElecPriceImport(ElecPriceProvider, PredictionImportProvider):
|
|||||||
return "ElecPriceImport"
|
return "ElecPriceImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.elecprice.provider_settings.elecpriceimport_file_path is not None:
|
if self.config.elecprice.provider_settings.import_file_path is not None:
|
||||||
self.import_from_file(
|
self.import_from_file(
|
||||||
self.config.elecprice.provider_settings.elecpriceimport_file_path,
|
self.config.elecprice.provider_settings.import_file_path,
|
||||||
key_prefix="elecprice",
|
key_prefix="elecprice",
|
||||||
)
|
)
|
||||||
if self.config.elecprice.provider_settings.elecpriceimport_json is not None:
|
if self.config.elecprice.provider_settings.import_json is not None:
|
||||||
self.import_from_json(
|
self.import_from_json(
|
||||||
self.config.elecprice.provider_settings.elecpriceimport_json, key_prefix="elecprice"
|
self.config.elecprice.provider_settings.import_json, key_prefix="elecprice"
|
||||||
)
|
)
|
||||||
|
@ -15,7 +15,7 @@ logger = get_logger(__name__)
|
|||||||
class LoadCommonSettings(SettingsBaseModel):
|
class LoadCommonSettings(SettingsBaseModel):
|
||||||
"""Load Prediction Configuration."""
|
"""Load Prediction Configuration."""
|
||||||
|
|
||||||
load_provider: Optional[str] = Field(
|
provider: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Load provider id of provider to be used.",
|
description="Load provider id of provider to be used.",
|
||||||
examples=["LoadAkkudoktor"],
|
examples=["LoadAkkudoktor"],
|
||||||
|
@ -33,18 +33,18 @@ class LoadProvider(PredictionProvider):
|
|||||||
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Configuration variables:
|
Configuration variables:
|
||||||
load_provider (str): Prediction provider for load.
|
provider (str): Prediction provider for load.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@ -58,4 +58,4 @@ class LoadProvider(PredictionProvider):
|
|||||||
return "LoadProvider"
|
return "LoadProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.load.load_provider
|
return self.provider_id() == self.config.load.provider
|
||||||
|
@ -111,7 +111,7 @@ class LoadAkkudoktor(LoadProvider):
|
|||||||
# We provide prediction starting at start of day, to be compatible to old system.
|
# We provide prediction starting at start of day, to be compatible to old system.
|
||||||
# End date for prediction is prediction hours from now.
|
# End date for prediction is prediction hours from now.
|
||||||
date = self.start_datetime.start_of("day")
|
date = self.start_datetime.start_of("day")
|
||||||
end_date = self.start_datetime.add(hours=self.config.prediction.prediction_hours)
|
end_date = self.start_datetime.add(hours=self.config.prediction.hours)
|
||||||
while compare_datetimes(date, end_date).lt:
|
while compare_datetimes(date, end_date).lt:
|
||||||
# Extract mean (index 0) and standard deviation (index 1) for the given day and hour
|
# Extract mean (index 0) and standard deviation (index 1) for the given day and hour
|
||||||
# Day indexing starts at 0, -1 because of that
|
# Day indexing starts at 0, -1 because of that
|
||||||
|
@ -22,19 +22,19 @@ logger = get_logger(__name__)
|
|||||||
class LoadImportCommonSettings(SettingsBaseModel):
|
class LoadImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for load data import from file or JSON string."""
|
"""Common settings for load data import from file or JSON string."""
|
||||||
|
|
||||||
load_import_file_path: Optional[Union[str, Path]] = Field(
|
import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Path to the file to import load data from.",
|
description="Path to the file to import load data from.",
|
||||||
examples=[None, "/path/to/yearly_load.json"],
|
examples=[None, "/path/to/yearly_load.json"],
|
||||||
)
|
)
|
||||||
load_import_json: Optional[str] = Field(
|
import_json: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="JSON string, dictionary of load forecast value lists.",
|
description="JSON string, dictionary of load forecast value lists.",
|
||||||
examples=['{"load0_mean": [676.71, 876.19, 527.13]}'],
|
examples=['{"load0_mean": [676.71, 876.19, 527.13]}'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("load_import_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -62,11 +62,7 @@ class LoadImport(LoadProvider, PredictionImportProvider):
|
|||||||
return "LoadImport"
|
return "LoadImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.load.provider_settings.load_import_file_path is not None:
|
if self.config.load.provider_settings.import_file_path is not None:
|
||||||
self.import_from_file(
|
self.import_from_file(self.config.provider_settings.import_file_path, key_prefix="load")
|
||||||
self.config.provider_settings.load_import_file_path, key_prefix="load"
|
if self.config.load.provider_settings.import_json is not None:
|
||||||
)
|
self.import_from_json(self.config.load.provider_settings.import_json, key_prefix="load")
|
||||||
if self.config.load.provider_settings.load_import_json is not None:
|
|
||||||
self.import_from_json(
|
|
||||||
self.config.load.provider_settings.load_import_json, key_prefix="load"
|
|
||||||
)
|
|
||||||
|
@ -53,9 +53,9 @@ class PredictionCommonSettings(SettingsBaseModel):
|
|||||||
determines the time zone based on latitude and longitude.
|
determines the time zone based on latitude and longitude.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (Optional[int]): Number of hours into the future for predictions.
|
hours (Optional[int]): Number of hours into the future for predictions.
|
||||||
Must be non-negative.
|
Must be non-negative.
|
||||||
prediction_historic_hours (Optional[int]): Number of hours into the past for historical data.
|
historic_hours (Optional[int]): Number of hours into the past for historical data.
|
||||||
Must be non-negative.
|
Must be non-negative.
|
||||||
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
||||||
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
||||||
@ -65,16 +65,16 @@ class PredictionCommonSettings(SettingsBaseModel):
|
|||||||
and longitude.
|
and longitude.
|
||||||
|
|
||||||
Validators:
|
Validators:
|
||||||
validate_prediction_hours (int): Ensures `prediction_hours` is a non-negative integer.
|
validate_hours (int): Ensures `hours` is a non-negative integer.
|
||||||
validate_prediction_historic_hours (int): Ensures `prediction_historic_hours` is a non-negative integer.
|
validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.
|
||||||
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
||||||
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
prediction_hours: Optional[int] = Field(
|
hours: Optional[int] = Field(
|
||||||
default=48, ge=0, description="Number of hours into the future for predictions"
|
default=48, ge=0, description="Number of hours into the future for predictions"
|
||||||
)
|
)
|
||||||
prediction_historic_hours: Optional[int] = Field(
|
historic_hours: Optional[int] = Field(
|
||||||
default=48,
|
default=48,
|
||||||
ge=0,
|
ge=0,
|
||||||
description="Number of hours into the past for historical predictions data",
|
description="Number of hours into the past for historical predictions data",
|
||||||
|
@ -114,16 +114,16 @@ class PredictionStartEndKeepMixin(PredictionBase):
|
|||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def end_datetime(self) -> Optional[DateTime]:
|
def end_datetime(self) -> Optional[DateTime]:
|
||||||
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
"""Compute the end datetime based on the `start_datetime` and `hours`.
|
||||||
|
|
||||||
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||||
"""
|
"""
|
||||||
if self.start_datetime and self.config.prediction.prediction_hours:
|
if self.start_datetime and self.config.prediction.hours:
|
||||||
end_datetime = self.start_datetime + to_duration(
|
end_datetime = self.start_datetime + to_duration(
|
||||||
f"{self.config.prediction.prediction_hours} hours"
|
f"{self.config.prediction.hours} hours"
|
||||||
)
|
)
|
||||||
dst_change = end_datetime.offset_hours - self.start_datetime.offset_hours
|
dst_change = end_datetime.offset_hours - self.start_datetime.offset_hours
|
||||||
logger.debug(f"Pre: {self.start_datetime}..{end_datetime}: DST change: {dst_change}")
|
logger.debug(f"Pre: {self.start_datetime}..{end_datetime}: DST change: {dst_change}")
|
||||||
@ -147,10 +147,10 @@ class PredictionStartEndKeepMixin(PredictionBase):
|
|||||||
return None
|
return None
|
||||||
historic_hours = self.historic_hours_min()
|
historic_hours = self.historic_hours_min()
|
||||||
if (
|
if (
|
||||||
self.config.prediction.prediction_historic_hours
|
self.config.prediction.historic_hours
|
||||||
and self.config.prediction.prediction_historic_hours > historic_hours
|
and self.config.prediction.historic_hours > historic_hours
|
||||||
):
|
):
|
||||||
historic_hours = int(self.config.prediction.prediction_historic_hours)
|
historic_hours = int(self.config.prediction.historic_hours)
|
||||||
return self.start_datetime - to_duration(f"{historic_hours} hours")
|
return self.start_datetime - to_duration(f"{historic_hours} hours")
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@ -19,7 +19,7 @@ class PVForecastCommonSettings(SettingsBaseModel):
|
|||||||
# Inverter Parameters
|
# Inverter Parameters
|
||||||
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/inverter.html
|
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/inverter.html
|
||||||
|
|
||||||
pvforecast_provider: Optional[str] = Field(
|
provider: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="PVForecast provider id of provider to be used.",
|
description="PVForecast provider id of provider to be used.",
|
||||||
examples=["PVForecastAkkudoktor"],
|
examples=["PVForecastAkkudoktor"],
|
||||||
|
@ -28,18 +28,18 @@ class PVForecastProvider(PredictionProvider):
|
|||||||
PVForecastProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
PVForecastProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Configuration variables:
|
Configuration variables:
|
||||||
pvforecast_provider (str): Prediction provider for pvforecast.
|
provider (str): Prediction provider for pvforecast.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions (inlcusive), defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions (inlcusive), defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range (exclusive),
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range (exclusive),
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data (inclusive), calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data (inclusive), calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@ -54,6 +54,6 @@ class PVForecastProvider(PredictionProvider):
|
|||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"PVForecastProvider ID {self.provider_id()} vs. config {self.config.pvforecast.pvforecast_provider}"
|
f"PVForecastProvider ID {self.provider_id()} vs. config {self.config.pvforecast.provider}"
|
||||||
)
|
)
|
||||||
return self.provider_id() == self.config.pvforecast.pvforecast_provider
|
return self.provider_id() == self.config.pvforecast.provider
|
||||||
|
@ -14,21 +14,25 @@ Classes:
|
|||||||
Example:
|
Example:
|
||||||
# Set up the configuration with necessary fields for URL generation
|
# Set up the configuration with necessary fields for URL generation
|
||||||
settings_data = {
|
settings_data = {
|
||||||
"prediction_hours": 48,
|
"prediction": {
|
||||||
"prediction_historic_hours": 24,
|
"hours": 48,
|
||||||
"latitude": 52.52,
|
"historic_hours": 24,
|
||||||
"longitude": 13.405,
|
"latitude": 52.52,
|
||||||
"pvforecast_provider": "Akkudoktor",
|
"longitude": 13.405,
|
||||||
"pvforecast0_peakpower": 5.0,
|
},
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"pvforecast": {
|
||||||
"pvforecast0_surface_tilt": 7,
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast0_inverter_paco": 10000,
|
"pvforecast0_surface_azimuth": -10,
|
||||||
"pvforecast1_peakpower": 4.8,
|
"pvforecast0_surface_tilt": 7,
|
||||||
"pvforecast1_surface_azimuth": -90,
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
"pvforecast1_surface_tilt": 7,
|
"pvforecast0_inverter_paco": 10000,
|
||||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
"pvforecast1_peakpower": 4.8,
|
||||||
"pvforecast1_inverter_paco": 10000,
|
"pvforecast1_surface_azimuth": -90,
|
||||||
|
"pvforecast1_surface_tilt": 7,
|
||||||
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
|
"pvforecast1_inverter_paco": 10000,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Create the config instance from the provided data
|
# Create the config instance from the provided data
|
||||||
@ -47,12 +51,12 @@ Example:
|
|||||||
print(forecast.report_ac_power_and_measurement())
|
print(forecast.report_ac_power_and_measurement())
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int): Number of hours into the future to forecast. Default is 48.
|
hours (int): Number of hours into the future to forecast. Default is 48.
|
||||||
prediction_historic_hours (int): Number of past hours to retain for analysis. Default is 24.
|
historic_hours (int): Number of past hours to retain for analysis. Default is 24.
|
||||||
latitude (float): Latitude for the forecast location.
|
latitude (float): Latitude for the forecast location.
|
||||||
longitude (float): Longitude for the forecast location.
|
longitude (float): Longitude for the forecast location.
|
||||||
start_datetime (datetime): Start time for the forecast, defaulting to current datetime.
|
start_datetime (datetime): Start time for the forecast, defaulting to current datetime.
|
||||||
end_datetime (datetime): Computed end datetime based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime): Computed end datetime based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime): Computed threshold datetime for retaining historical data.
|
keep_datetime (datetime): Computed threshold datetime for retaining historical data.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
@ -159,13 +163,13 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
hours (int, optional): Number of hours in the future for the forecast.
|
||||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||||
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
@ -286,10 +290,10 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
|
|
||||||
# Assumption that all lists are the same length and are ordered chronologically
|
# Assumption that all lists are the same length and are ordered chronologically
|
||||||
# in ascending order and have the same timestamps.
|
# in ascending order and have the same timestamps.
|
||||||
if len(akkudoktor_data.values[0]) < self.config.prediction.prediction_hours:
|
if len(akkudoktor_data.values[0]) < self.config.prediction.hours:
|
||||||
# Expect one value set per prediction hour
|
# Expect one value set per prediction hour
|
||||||
error_msg = (
|
error_msg = (
|
||||||
f"The forecast must cover at least {self.config.prediction.prediction_hours} hours, "
|
f"The forecast must cover at least {self.config.prediction.hours} hours, "
|
||||||
f"but only {len(akkudoktor_data.values[0])} data sets are given in forecast data."
|
f"but only {len(akkudoktor_data.values[0])} data sets are given in forecast data."
|
||||||
)
|
)
|
||||||
logger.error(f"Akkudoktor schema change: {error_msg}")
|
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||||
@ -318,9 +322,9 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
|
|
||||||
self.update_value(dt, data)
|
self.update_value(dt, data)
|
||||||
|
|
||||||
if len(self) < self.config.prediction.prediction_hours:
|
if len(self) < self.config.prediction.hours:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"The forecast must cover at least {self.config.prediction.prediction_hours} hours, "
|
f"The forecast must cover at least {self.config.prediction.hours} hours, "
|
||||||
f"but only {len(self)} hours starting from {self.start_datetime} "
|
f"but only {len(self)} hours starting from {self.start_datetime} "
|
||||||
f"were predicted."
|
f"were predicted."
|
||||||
)
|
)
|
||||||
@ -370,13 +374,13 @@ if __name__ == "__main__":
|
|||||||
# Set up the configuration with necessary fields for URL generation
|
# Set up the configuration with necessary fields for URL generation
|
||||||
settings_data = {
|
settings_data = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
},
|
},
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_peakpower": 5.0,
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"pvforecast0_surface_azimuth": -10,
|
||||||
"pvforecast0_surface_tilt": 7,
|
"pvforecast0_surface_tilt": 7,
|
||||||
|
@ -11,7 +11,7 @@ from akkudoktoreos.prediction.weatherimport import WeatherImportCommonSettings
|
|||||||
class WeatherCommonSettings(SettingsBaseModel):
|
class WeatherCommonSettings(SettingsBaseModel):
|
||||||
"""Weather Forecast Configuration."""
|
"""Weather Forecast Configuration."""
|
||||||
|
|
||||||
weather_provider: Optional[str] = Field(
|
provider: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Weather provider id of provider to be used.",
|
description="Weather provider id of provider to be used.",
|
||||||
examples=["WeatherImport"],
|
examples=["WeatherImport"],
|
||||||
|
@ -101,18 +101,18 @@ class WeatherProvider(PredictionProvider):
|
|||||||
WeatherProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
WeatherProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Configuration variables:
|
Configuration variables:
|
||||||
weather_provider (str): Prediction provider for weather.
|
provider (str): Prediction provider for weather.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@ -126,7 +126,7 @@ class WeatherProvider(PredictionProvider):
|
|||||||
return "WeatherProvider"
|
return "WeatherProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.weather.weather_provider
|
return self.provider_id() == self.config.weather.provider
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def estimate_irradiance_from_cloud_cover(
|
def estimate_irradiance_from_cloud_cover(
|
||||||
|
@ -62,13 +62,13 @@ class WeatherBrightSky(WeatherProvider):
|
|||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
hours (int, optional): Number of hours in the future for the forecast.
|
||||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||||
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
|
@ -68,15 +68,15 @@ class WeatherClearOutside(WeatherProvider):
|
|||||||
WeatherClearOutside is a thread-safe singleton, ensuring only one instance of this class is created.
|
WeatherClearOutside is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -22,22 +22,22 @@ logger = get_logger(__name__)
|
|||||||
class WeatherImportCommonSettings(SettingsBaseModel):
|
class WeatherImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for weather data import from file or JSON string."""
|
"""Common settings for weather data import from file or JSON string."""
|
||||||
|
|
||||||
weatherimport_file_path: Optional[Union[str, Path]] = Field(
|
import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Path to the file to import weather data from.",
|
description="Path to the file to import weather data from.",
|
||||||
examples=[None, "/path/to/weather_data.json"],
|
examples=[None, "/path/to/weather_data.json"],
|
||||||
)
|
)
|
||||||
|
|
||||||
weatherimport_json: Optional[str] = Field(
|
import_json: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="JSON string, dictionary of weather forecast value lists.",
|
description="JSON string, dictionary of weather forecast value lists.",
|
||||||
examples=['{"weather_temp_air": [18.3, 17.8, 16.9]}'],
|
examples=['{"weather_temp_air": [18.3, 17.8, 16.9]}'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("weatherimport_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_weatherimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
@ -63,11 +63,11 @@ class WeatherImport(WeatherProvider, PredictionImportProvider):
|
|||||||
return "WeatherImport"
|
return "WeatherImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.weather.provider_settings.weatherimport_file_path is not None:
|
if self.config.weather.provider_settings.import_file_path is not None:
|
||||||
self.import_from_file(
|
self.import_from_file(
|
||||||
self.config.weather.provider_settings.weatherimport_file_path, key_prefix="weather"
|
self.config.weather.provider_settings.import_file_path, key_prefix="weather"
|
||||||
)
|
)
|
||||||
if self.config.weather.provider_settings.weatherimport_json is not None:
|
if self.config.weather.provider_settings.import_json is not None:
|
||||||
self.import_from_json(
|
self.import_from_json(
|
||||||
self.config.weather.provider_settings.weatherimport_json, key_prefix="weather"
|
self.config.weather.provider_settings.import_json, key_prefix="weather"
|
||||||
)
|
)
|
||||||
|
@ -33,6 +33,7 @@ from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
|||||||
from akkudoktoreos.prediction.load import LoadCommonSettings
|
from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||||
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||||
from akkudoktoreos.prediction.prediction import PredictionCommonSettings, get_prediction
|
from akkudoktoreos.prediction.prediction import PredictionCommonSettings, get_prediction
|
||||||
|
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@ -152,20 +153,16 @@ def start_eosdash() -> subprocess.Popen:
|
|||||||
|
|
||||||
if args is None:
|
if args is None:
|
||||||
# No command line arguments
|
# No command line arguments
|
||||||
host = config_eos.server.server_eosdash_host
|
host = config_eos.server.eosdash_host
|
||||||
port = config_eos.server.server_eosdash_port
|
port = config_eos.server.eosdash_port
|
||||||
eos_host = config_eos.server.server_eos_host
|
eos_host = config_eos.server.host
|
||||||
eos_port = config_eos.server.server_eos_port
|
eos_port = config_eos.server.port
|
||||||
log_level = "info"
|
log_level = "info"
|
||||||
access_log = False
|
access_log = False
|
||||||
reload = False
|
reload = False
|
||||||
else:
|
else:
|
||||||
host = args.host
|
host = args.host
|
||||||
port = (
|
port = config_eos.server.eosdash_port if config_eos.server.eosdash_port else (args.port + 1)
|
||||||
config_eos.server.server_eosdash_port
|
|
||||||
if config_eos.server.server_eosdash_port
|
|
||||||
else (args.port + 1)
|
|
||||||
)
|
|
||||||
eos_host = args.host
|
eos_host = args.host
|
||||||
eos_port = args.port
|
eos_port = args.port
|
||||||
log_level = args.log_level
|
log_level = args.log_level
|
||||||
@ -208,7 +205,7 @@ def start_eosdash() -> subprocess.Popen:
|
|||||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||||
"""Lifespan manager for the app."""
|
"""Lifespan manager for the app."""
|
||||||
# On startup
|
# On startup
|
||||||
if config_eos.server.server_eos_startup_eosdash:
|
if config_eos.server.startup_eosdash:
|
||||||
try:
|
try:
|
||||||
eosdash_process = start_eosdash()
|
eosdash_process = start_eosdash()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -235,7 +232,7 @@ app = FastAPI(
|
|||||||
|
|
||||||
|
|
||||||
# That's the problem
|
# That's the problem
|
||||||
opt_class = optimization_problem(verbose=bool(config_eos.server.server_eos_verbose))
|
opt_class = optimization_problem(verbose=bool(config_eos.server.verbose))
|
||||||
|
|
||||||
server_dir = Path(__file__).parent.resolve()
|
server_dir = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
@ -610,7 +607,7 @@ def fastapi_strompreis() -> list[float]:
|
|||||||
Electricity price charges are added.
|
Electricity price charges are added.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set ElecPriceAkkudoktor as elecprice_provider, then update data with
|
Set ElecPriceAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
||||||
@ -618,7 +615,7 @@ def fastapi_strompreis() -> list[float]:
|
|||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
elecprice=ElecPriceCommonSettings(
|
elecprice=ElecPriceCommonSettings(
|
||||||
elecprice_provider="ElecPriceAkkudoktor",
|
provider="ElecPriceAkkudoktor",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
config_eos.merge_settings(settings=settings)
|
config_eos.merge_settings(settings=settings)
|
||||||
@ -670,10 +667,10 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
|||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
prediction=PredictionCommonSettings(
|
prediction=PredictionCommonSettings(
|
||||||
prediction_hours=request.hours,
|
hours=request.hours,
|
||||||
),
|
),
|
||||||
load=LoadCommonSettings(
|
load=LoadCommonSettings(
|
||||||
load_provider="LoadAkkudoktor",
|
provider="LoadAkkudoktor",
|
||||||
provider_settings=LoadAkkudoktorCommonSettings(
|
provider_settings=LoadAkkudoktorCommonSettings(
|
||||||
loadakkudoktor_year_energy=request.year_energy,
|
loadakkudoktor_year_energy=request.year_energy,
|
||||||
),
|
),
|
||||||
@ -684,7 +681,7 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
|||||||
|
|
||||||
# Insert measured data into EOS measurement
|
# Insert measured data into EOS measurement
|
||||||
# Convert from energy per interval to dummy energy meter readings
|
# Convert from energy per interval to dummy energy meter readings
|
||||||
measurement_key = "measurement_load0_mr"
|
measurement_key = "load0_mr"
|
||||||
measurement_eos.key_delete_by_datetime(key=measurement_key) # delete all load0_mr measurements
|
measurement_eos.key_delete_by_datetime(key=measurement_key) # delete all load0_mr measurements
|
||||||
energy = {}
|
energy = {}
|
||||||
try:
|
try:
|
||||||
@ -747,14 +744,14 @@ def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
|||||||
year_energy (float): Yearly energy consumption in Wh.
|
year_energy (float): Yearly energy consumption in Wh.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set LoadAkkudoktor as load_provider, then update data with
|
Set LoadAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=load_mean' instead.
|
'/v1/prediction/list?key=load_mean' instead.
|
||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
load=LoadCommonSettings(
|
load=LoadCommonSettings(
|
||||||
load_provider="LoadAkkudoktor",
|
provider="LoadAkkudoktor",
|
||||||
provider_settings=LoadAkkudoktorCommonSettings(
|
provider_settings=LoadAkkudoktorCommonSettings(
|
||||||
loadakkudoktor_year_energy=year_energy / 1000, # Convert to kWh
|
loadakkudoktor_year_energy=year_energy / 1000, # Convert to kWh
|
||||||
),
|
),
|
||||||
@ -800,21 +797,25 @@ def fastapi_pvforecast() -> ForecastResponse:
|
|||||||
filled with the first available forecast value.
|
filled with the first available forecast value.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set PVForecastAkkudoktor as pvforecast_provider, then update data with
|
Set PVForecastAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=pvforecast_ac_power' and
|
'/v1/prediction/list?key=pvforecast_ac_power' and
|
||||||
'/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.
|
'/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.
|
||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(pvforecast=PVForecastCommonSettings(provider="PVForecastAkkudoktor"))
|
||||||
elecprice_provider="PVForecastAkkudoktor",
|
|
||||||
)
|
|
||||||
config_eos.merge_settings(settings=settings)
|
config_eos.merge_settings(settings=settings)
|
||||||
|
|
||||||
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
||||||
|
|
||||||
# Create PV forecast
|
# Create PV forecast
|
||||||
prediction_eos.update_data(force_update=True)
|
try:
|
||||||
|
prediction_eos.update_data(force_update=True)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404,
|
||||||
|
detail=f"Can not get the PV forecast: {e}",
|
||||||
|
)
|
||||||
|
|
||||||
# Get the forcast starting at start of day
|
# Get the forcast starting at start of day
|
||||||
start_datetime = to_datetime().start_of("day")
|
start_datetime = to_datetime().start_of("day")
|
||||||
@ -901,9 +902,9 @@ async def proxy_put(request: Request, path: str) -> Response:
|
|||||||
|
|
||||||
|
|
||||||
async def proxy(request: Request, path: str) -> Union[Response | RedirectResponse | HTMLResponse]:
|
async def proxy(request: Request, path: str) -> Union[Response | RedirectResponse | HTMLResponse]:
|
||||||
if config_eos.server.server_eosdash_host and config_eos.server.server_eosdash_port:
|
if config_eos.server.eosdash_host and config_eos.server.eosdash_port:
|
||||||
# Proxy to EOSdash server
|
# Proxy to EOSdash server
|
||||||
url = f"http://{config_eos.server.server_eosdash_host}:{config_eos.server.server_eosdash_port}/{path}"
|
url = f"http://{config_eos.server.eosdash_host}:{config_eos.server.eosdash_port}/{path}"
|
||||||
headers = dict(request.headers)
|
headers = dict(request.headers)
|
||||||
|
|
||||||
data = await request.body()
|
data = await request.body()
|
||||||
@ -925,9 +926,9 @@ async def proxy(request: Request, path: str) -> Union[Response | RedirectRespons
|
|||||||
error_message=f"""<pre>
|
error_message=f"""<pre>
|
||||||
EOSdash server not reachable: '{url}'
|
EOSdash server not reachable: '{url}'
|
||||||
Did you start the EOSdash server
|
Did you start the EOSdash server
|
||||||
or set 'server_eos_startup_eosdash'?
|
or set 'startup_eosdash'?
|
||||||
If there is no application server intended please
|
If there is no application server intended please
|
||||||
set 'server_eosdash_host' or 'server_eosdash_port' to None.
|
set 'eosdash_host' or 'eosdash_port' to None.
|
||||||
</pre>
|
</pre>
|
||||||
""",
|
""",
|
||||||
error_details=f"{e}",
|
error_details=f"{e}",
|
||||||
@ -991,8 +992,8 @@ def main() -> None:
|
|||||||
it starts the EOS server with the specified configurations.
|
it starts the EOS server with the specified configurations.
|
||||||
|
|
||||||
Command-line Arguments:
|
Command-line Arguments:
|
||||||
--host (str): Host for the EOS server (default: value from config_eos).
|
--host (str): Host for the EOS server (default: value from config).
|
||||||
--port (int): Port for the EOS server (default: value from config_eos).
|
--port (int): Port for the EOS server (default: value from config).
|
||||||
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
||||||
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
||||||
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
||||||
@ -1003,13 +1004,13 @@ def main() -> None:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--host",
|
"--host",
|
||||||
type=str,
|
type=str,
|
||||||
default=str(config_eos.server.server_eos_host),
|
default=str(config_eos.server.host),
|
||||||
help="Host for the EOS server (default: value from config)",
|
help="Host for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--port",
|
"--port",
|
||||||
type=int,
|
type=int,
|
||||||
default=config_eos.server.server_eos_port,
|
default=config_eos.server.port,
|
||||||
help="Port for the EOS server (default: value from config)",
|
help="Port for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1038,7 +1039,7 @@ def main() -> None:
|
|||||||
try:
|
try:
|
||||||
run_eos(args.host, args.port, args.log_level, args.access_log, args.reload)
|
run_eos(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||||
except:
|
except:
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
|
|
||||||
@ -165,10 +166,10 @@ def main() -> None:
|
|||||||
it starts the EOSdash server with the specified configurations.
|
it starts the EOSdash server with the specified configurations.
|
||||||
|
|
||||||
Command-line Arguments:
|
Command-line Arguments:
|
||||||
--host (str): Host for the EOSdash server (default: value from config_eos).
|
--host (str): Host for the EOSdash server (default: value from config).
|
||||||
--port (int): Port for the EOSdash server (default: value from config_eos).
|
--port (int): Port for the EOSdash server (default: value from config).
|
||||||
--eos-host (str): Host for the EOS server (default: value from config_eos).
|
--eos-host (str): Host for the EOS server (default: value from config).
|
||||||
--eos-port (int): Port for the EOS server (default: value from config_eos).
|
--eos-port (int): Port for the EOS server (default: value from config).
|
||||||
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
||||||
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
||||||
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
||||||
@ -179,28 +180,28 @@ def main() -> None:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--host",
|
"--host",
|
||||||
type=str,
|
type=str,
|
||||||
default=str(config_eos.server.server_eosdash_host),
|
default=str(config_eos.server.eosdash_host),
|
||||||
help="Host for the EOSdash server (default: value from config_eos)",
|
help="Host for the EOSdash server (default: value from config)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--port",
|
"--port",
|
||||||
type=int,
|
type=int,
|
||||||
default=config_eos.server.server_eosdash_port,
|
default=config_eos.server.eosdash_port,
|
||||||
help="Port for the EOSdash server (default: value from config_eos)",
|
help="Port for the EOSdash server (default: value from config)",
|
||||||
)
|
)
|
||||||
|
|
||||||
# EOS Host and port arguments with defaults from config_eos
|
# EOS Host and port arguments with defaults from config_eos
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--eos-host",
|
"--eos-host",
|
||||||
type=str,
|
type=str,
|
||||||
default=str(config_eos.server.server_eos_host),
|
default=str(config_eos.server.host),
|
||||||
help="Host for the EOS server (default: value from config_eos)",
|
help="Host for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--eos-port",
|
"--eos-port",
|
||||||
type=int,
|
type=int,
|
||||||
default=config_eos.server.server_eos_port,
|
default=config_eos.server.port,
|
||||||
help="Port for the EOS server (default: value from config_eos)",
|
help="Port for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Optional arguments for log_level, access_log, and reload
|
# Optional arguments for log_level, access_log, and reload
|
||||||
@ -228,7 +229,7 @@ def main() -> None:
|
|||||||
try:
|
try:
|
||||||
run_eosdash(args.host, args.port, args.log_level, args.access_log, args.reload)
|
run_eosdash(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||||
except:
|
except:
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -17,22 +17,18 @@ class ServerCommonSettings(SettingsBaseModel):
|
|||||||
To be added
|
To be added
|
||||||
"""
|
"""
|
||||||
|
|
||||||
server_eos_host: Optional[IPvAnyAddress] = Field(
|
host: Optional[IPvAnyAddress] = Field(default="0.0.0.0", description="EOS server IP address.")
|
||||||
default="0.0.0.0", description="EOS server IP address."
|
port: Optional[int] = Field(default=8503, description="EOS server IP port number.")
|
||||||
)
|
verbose: Optional[bool] = Field(default=False, description="Enable debug output")
|
||||||
server_eos_port: Optional[int] = Field(default=8503, description="EOS server IP port number.")
|
startup_eosdash: Optional[bool] = Field(
|
||||||
server_eos_verbose: Optional[bool] = Field(default=False, description="Enable debug output")
|
|
||||||
server_eos_startup_eosdash: Optional[bool] = Field(
|
|
||||||
default=True, description="EOS server to start EOSdash server."
|
default=True, description="EOS server to start EOSdash server."
|
||||||
)
|
)
|
||||||
server_eosdash_host: Optional[IPvAnyAddress] = Field(
|
eosdash_host: Optional[IPvAnyAddress] = Field(
|
||||||
default="0.0.0.0", description="EOSdash server IP address."
|
default="0.0.0.0", description="EOSdash server IP address."
|
||||||
)
|
)
|
||||||
server_eosdash_port: Optional[int] = Field(
|
eosdash_port: Optional[int] = Field(default=8504, description="EOSdash server IP port number.")
|
||||||
default=8504, description="EOSdash server IP port number."
|
|
||||||
)
|
|
||||||
|
|
||||||
@field_validator("server_eos_port", "server_eosdash_port")
|
@field_validator("port", "eosdash_port")
|
||||||
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
||||||
if value is not None and not (1024 <= value <= 49151):
|
if value is not None and not (1024 <= value <= 49151):
|
||||||
raise ValueError("Server port number must be between 1024 and 49151.")
|
raise ValueError("Server port number must be between 1024 and 49151.")
|
||||||
|
@ -57,6 +57,6 @@ class NumpyEncoder(json.JSONEncoder):
|
|||||||
# # Example usage
|
# # Example usage
|
||||||
# start_date = datetime.datetime(2024, 3, 31) # Date of the DST change
|
# start_date = datetime.datetime(2024, 3, 31) # Date of the DST change
|
||||||
# if ist_dst_wechsel(start_date):
|
# if ist_dst_wechsel(start_date):
|
||||||
# prediction_hours = 23 # Adjust to 23 hours for DST change days
|
# hours = 23 # Adjust to 23 hours for DST change days
|
||||||
# else:
|
# else:
|
||||||
# prediction_hours = 24 # Default value for days without DST change
|
# hours = 24 # Default value for days without DST change
|
||||||
|
@ -24,9 +24,9 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
|||||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||||
# Assure configuration holds the correct values
|
# Assure configuration holds the correct values
|
||||||
config_eos.merge_settings_from_dict(
|
config_eos.merge_settings_from_dict(
|
||||||
{"prediction": {"prediction_hours": 48}, "optimization": {"optimization_hours": 24}}
|
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
|
||||||
)
|
)
|
||||||
assert config_eos.prediction.prediction_hours == 48
|
assert config_eos.prediction.hours == 48
|
||||||
|
|
||||||
# Initialize the battery and the inverter
|
# Initialize the battery and the inverter
|
||||||
akku = Battery(
|
akku = Battery(
|
||||||
@ -41,7 +41,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
|||||||
devices_eos.add_device(akku)
|
devices_eos.add_device(akku)
|
||||||
|
|
||||||
inverter = Inverter(
|
inverter = Inverter(
|
||||||
InverterParameters(device_id="inverter1", max_power_wh=10000, battery=akku.device_id)
|
InverterParameters(device_id="inverter1", max_power_wh=10000, battery_id=akku.device_id)
|
||||||
)
|
)
|
||||||
devices_eos.add_device(inverter)
|
devices_eos.add_device(inverter)
|
||||||
|
|
||||||
@ -62,7 +62,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
|||||||
device_id="ev1", capacity_wh=26400, initial_soc_percentage=10, min_soc_percentage=10
|
device_id="ev1", capacity_wh=26400, initial_soc_percentage=10, min_soc_percentage=10
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
eauto.set_charge_per_hour(np.full(config_eos.prediction.prediction_hours, 1))
|
eauto.set_charge_per_hour(np.full(config_eos.prediction.hours, 1))
|
||||||
devices_eos.add_device(eauto)
|
devices_eos.add_device(eauto)
|
||||||
|
|
||||||
devices_eos.post_setup()
|
devices_eos.post_setup()
|
||||||
|
@ -23,9 +23,9 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
|||||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||||
# Assure configuration holds the correct values
|
# Assure configuration holds the correct values
|
||||||
config_eos.merge_settings_from_dict(
|
config_eos.merge_settings_from_dict(
|
||||||
{"prediction": {"prediction_hours": 48}, "optimization": {"optimization_hours": 24}}
|
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
|
||||||
)
|
)
|
||||||
assert config_eos.prediction.prediction_hours == 48
|
assert config_eos.prediction.hours == 48
|
||||||
|
|
||||||
# Initialize the battery and the inverter
|
# Initialize the battery and the inverter
|
||||||
akku = Battery(
|
akku = Battery(
|
||||||
@ -37,7 +37,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
|||||||
devices_eos.add_device(akku)
|
devices_eos.add_device(akku)
|
||||||
|
|
||||||
inverter = Inverter(
|
inverter = Inverter(
|
||||||
InverterParameters(device_id="iv1", max_power_wh=10000, battery=akku.device_id)
|
InverterParameters(device_id="iv1", max_power_wh=10000, battery_id=akku.device_id)
|
||||||
)
|
)
|
||||||
devices_eos.add_device(inverter)
|
devices_eos.add_device(inverter)
|
||||||
|
|
||||||
@ -63,11 +63,11 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
|||||||
devices_eos.post_setup()
|
devices_eos.post_setup()
|
||||||
|
|
||||||
# Parameters based on previous example data
|
# Parameters based on previous example data
|
||||||
pv_prognose_wh = [0.0] * config_eos.prediction.prediction_hours
|
pv_prognose_wh = [0.0] * config_eos.prediction.hours
|
||||||
pv_prognose_wh[10] = 5000.0
|
pv_prognose_wh[10] = 5000.0
|
||||||
pv_prognose_wh[11] = 5000.0
|
pv_prognose_wh[11] = 5000.0
|
||||||
|
|
||||||
strompreis_euro_pro_wh = [0.001] * config_eos.prediction.prediction_hours
|
strompreis_euro_pro_wh = [0.001] * config_eos.prediction.hours
|
||||||
strompreis_euro_pro_wh[0:10] = [0.00001] * 10
|
strompreis_euro_pro_wh[0:10] = [0.00001] * 10
|
||||||
strompreis_euro_pro_wh[11:15] = [0.00005] * 4
|
strompreis_euro_pro_wh[11:15] = [0.00005] * 4
|
||||||
strompreis_euro_pro_wh[20] = 0.00001
|
strompreis_euro_pro_wh[20] = 0.00001
|
||||||
@ -141,10 +141,10 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
|||||||
home_appliance=home_appliance,
|
home_appliance=home_appliance,
|
||||||
)
|
)
|
||||||
|
|
||||||
ac = np.full(config_eos.prediction.prediction_hours, 0.0)
|
ac = np.full(config_eos.prediction.hours, 0.0)
|
||||||
ac[20] = 1
|
ac[20] = 1
|
||||||
ems.set_akku_ac_charge_hours(ac)
|
ems.set_akku_ac_charge_hours(ac)
|
||||||
dc = np.full(config_eos.prediction.prediction_hours, 0.0)
|
dc = np.full(config_eos.prediction.hours, 0.0)
|
||||||
dc[11] = 1
|
dc[11] = 1
|
||||||
ems.set_akku_dc_charge_hours(dc)
|
ems.set_akku_dc_charge_hours(dc)
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ def test_optimize(
|
|||||||
"""Test optimierung_ems."""
|
"""Test optimierung_ems."""
|
||||||
# Assure configuration holds the correct values
|
# Assure configuration holds the correct values
|
||||||
config_eos.merge_settings_from_dict(
|
config_eos.merge_settings_from_dict(
|
||||||
{"prediction": {"prediction_hours": 48}, "optimization": {"optimization_hours": 48}}
|
{"prediction": {"hours": 48}, "optimization": {"hours": 48}}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Load input and output data
|
# Load input and output data
|
||||||
|
@ -23,7 +23,7 @@ FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON = DIR_TESTDATA.joinpath(
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def elecprice_provider(monkeypatch, config_eos):
|
def provider(monkeypatch, config_eos):
|
||||||
"""Fixture to create a ElecPriceProvider instance."""
|
"""Fixture to create a ElecPriceProvider instance."""
|
||||||
monkeypatch.setenv("EOS_ELECPRICE__ELECPRICE_PROVIDER", "ElecPriceAkkudoktor")
|
monkeypatch.setenv("EOS_ELECPRICE__ELECPRICE_PROVIDER", "ElecPriceAkkudoktor")
|
||||||
config_eos.reset_settings()
|
config_eos.reset_settings()
|
||||||
@ -49,17 +49,17 @@ def cache_store():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(elecprice_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that ElecPriceForecast behaves as a singleton."""
|
"""Test that ElecPriceForecast behaves as a singleton."""
|
||||||
another_instance = ElecPriceAkkudoktor()
|
another_instance = ElecPriceAkkudoktor()
|
||||||
assert elecprice_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(elecprice_provider, monkeypatch):
|
def test_invalid_provider(provider, monkeypatch):
|
||||||
"""Test requesting an unsupported elecprice_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
monkeypatch.setenv("EOS_ELECPRICE__ELECPRICE_PROVIDER", "<invalid>")
|
monkeypatch.setenv("EOS_ELECPRICE__ELECPRICE_PROVIDER", "<invalid>")
|
||||||
elecprice_provider.config.reset_settings()
|
provider.config.reset_settings()
|
||||||
assert not elecprice_provider.enabled()
|
assert not provider.enabled()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@ -68,16 +68,16 @@ def test_invalid_provider(elecprice_provider, monkeypatch):
|
|||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.prediction.elecpriceakkudoktor.logger.error")
|
@patch("akkudoktoreos.prediction.elecpriceakkudoktor.logger.error")
|
||||||
def test_validate_data_invalid_format(mock_logger, elecprice_provider):
|
def test_validate_data_invalid_format(mock_logger, provider):
|
||||||
"""Test validation for invalid Akkudoktor data."""
|
"""Test validation for invalid Akkudoktor data."""
|
||||||
invalid_data = '{"invalid": "data"}'
|
invalid_data = '{"invalid": "data"}'
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
elecprice_provider._validate_data(invalid_data)
|
provider._validate_data(invalid_data)
|
||||||
mock_logger.assert_called_once_with(mock_logger.call_args[0][0])
|
mock_logger.assert_called_once_with(mock_logger.call_args[0][0])
|
||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_request_forecast(mock_get, elecprice_provider, sample_akkudoktor_1_json):
|
def test_request_forecast(mock_get, provider, sample_akkudoktor_1_json):
|
||||||
"""Test requesting forecast from Akkudoktor."""
|
"""Test requesting forecast from Akkudoktor."""
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@ -86,10 +86,10 @@ def test_request_forecast(mock_get, elecprice_provider, sample_akkudoktor_1_json
|
|||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
# Preset, as this is usually done by update()
|
# Preset, as this is usually done by update()
|
||||||
elecprice_provider.config.update()
|
provider.config.update()
|
||||||
|
|
||||||
# Test function
|
# Test function
|
||||||
akkudoktor_data = elecprice_provider._request_forecast()
|
akkudoktor_data = provider._request_forecast()
|
||||||
|
|
||||||
assert isinstance(akkudoktor_data, AkkudoktorElecPrice)
|
assert isinstance(akkudoktor_data, AkkudoktorElecPrice)
|
||||||
assert akkudoktor_data.values[0] == AkkudoktorElecPriceValue(
|
assert akkudoktor_data.values[0] == AkkudoktorElecPriceValue(
|
||||||
@ -104,7 +104,7 @@ def test_request_forecast(mock_get, elecprice_provider, sample_akkudoktor_1_json
|
|||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_update_data(mock_get, elecprice_provider, sample_akkudoktor_1_json, cache_store):
|
def test_update_data(mock_get, provider, sample_akkudoktor_1_json, cache_store):
|
||||||
"""Test fetching forecast from Akkudoktor."""
|
"""Test fetching forecast from Akkudoktor."""
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@ -117,28 +117,28 @@ def test_update_data(mock_get, elecprice_provider, sample_akkudoktor_1_json, cac
|
|||||||
# Call the method
|
# Call the method
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime("2024-12-11 00:00:00", in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime("2024-12-11 00:00:00", in_timezone="Europe/Berlin"))
|
||||||
elecprice_provider.update_data(force_enable=True, force_update=True)
|
provider.update_data(force_enable=True, force_update=True)
|
||||||
|
|
||||||
# Assert: Verify the result is as expected
|
# Assert: Verify the result is as expected
|
||||||
mock_get.assert_called_once()
|
mock_get.assert_called_once()
|
||||||
assert (
|
assert (
|
||||||
len(elecprice_provider) == 73
|
len(provider) == 73
|
||||||
) # we have 48 datasets in the api response, we want to know 48h into the future. The data we get has already 23h into the future so we need only 25h more. 48+25=73
|
) # we have 48 datasets in the api response, we want to know 48h into the future. The data we get has already 23h into the future so we need only 25h more. 48+25=73
|
||||||
|
|
||||||
# Assert we get prediction_hours prioce values by resampling
|
# Assert we get hours prioce values by resampling
|
||||||
np_price_array = elecprice_provider.key_to_array(
|
np_price_array = provider.key_to_array(
|
||||||
key="elecprice_marketprice_wh",
|
key="elecprice_marketprice_wh",
|
||||||
start_datetime=elecprice_provider.start_datetime,
|
start_datetime=provider.start_datetime,
|
||||||
end_datetime=elecprice_provider.end_datetime,
|
end_datetime=provider.end_datetime,
|
||||||
)
|
)
|
||||||
assert len(np_price_array) == elecprice_provider.total_hours
|
assert len(np_price_array) == provider.total_hours
|
||||||
|
|
||||||
# with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_2_JSON, "w") as f_out:
|
# with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_2_JSON, "w") as f_out:
|
||||||
# f_out.write(elecprice_provider.to_json())
|
# f_out.write(provider.to_json())
|
||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_update_data_with_incomplete_forecast(mock_get, elecprice_provider):
|
def test_update_data_with_incomplete_forecast(mock_get, provider):
|
||||||
"""Test `_update_data` with incomplete or missing forecast data."""
|
"""Test `_update_data` with incomplete or missing forecast data."""
|
||||||
incomplete_data: dict = {"meta": {}, "values": []}
|
incomplete_data: dict = {"meta": {}, "values": []}
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@ -146,7 +146,7 @@ def test_update_data_with_incomplete_forecast(mock_get, elecprice_provider):
|
|||||||
mock_response.content = json.dumps(incomplete_data)
|
mock_response.content = json.dumps(incomplete_data)
|
||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
elecprice_provider._update_data(force_update=True)
|
provider._update_data(force_update=True)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@ -155,7 +155,7 @@ def test_update_data_with_incomplete_forecast(mock_get, elecprice_provider):
|
|||||||
)
|
)
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_request_forecast_status_codes(
|
def test_request_forecast_status_codes(
|
||||||
mock_get, elecprice_provider, sample_akkudoktor_1_json, status_code, exception
|
mock_get, provider, sample_akkudoktor_1_json, status_code, exception
|
||||||
):
|
):
|
||||||
"""Test handling of various API status codes."""
|
"""Test handling of various API status codes."""
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@ -167,31 +167,31 @@ def test_request_forecast_status_codes(
|
|||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
if exception:
|
if exception:
|
||||||
with pytest.raises(exception):
|
with pytest.raises(exception):
|
||||||
elecprice_provider._request_forecast()
|
provider._request_forecast()
|
||||||
else:
|
else:
|
||||||
elecprice_provider._request_forecast()
|
provider._request_forecast()
|
||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.utils.cacheutil.CacheFileStore")
|
@patch("akkudoktoreos.utils.cacheutil.CacheFileStore")
|
||||||
def test_cache_integration(mock_cache, elecprice_provider):
|
def test_cache_integration(mock_cache, provider):
|
||||||
"""Test caching of 8-day electricity price data."""
|
"""Test caching of 8-day electricity price data."""
|
||||||
mock_cache_instance = mock_cache.return_value
|
mock_cache_instance = mock_cache.return_value
|
||||||
mock_cache_instance.get.return_value = None # Simulate no cache
|
mock_cache_instance.get.return_value = None # Simulate no cache
|
||||||
elecprice_provider._update_data(force_update=True)
|
provider._update_data(force_update=True)
|
||||||
mock_cache_instance.create.assert_called_once()
|
mock_cache_instance.create.assert_called_once()
|
||||||
mock_cache_instance.get.assert_called_once()
|
mock_cache_instance.get.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
def test_key_to_array_resampling(elecprice_provider):
|
def test_key_to_array_resampling(provider):
|
||||||
"""Test resampling of forecast data to NumPy array."""
|
"""Test resampling of forecast data to NumPy array."""
|
||||||
elecprice_provider.update_data(force_update=True)
|
provider.update_data(force_update=True)
|
||||||
array = elecprice_provider.key_to_array(
|
array = provider.key_to_array(
|
||||||
key="elecprice_marketprice_wh",
|
key="elecprice_marketprice_wh",
|
||||||
start_datetime=elecprice_provider.start_datetime,
|
start_datetime=provider.start_datetime,
|
||||||
end_datetime=elecprice_provider.end_datetime,
|
end_datetime=provider.end_datetime,
|
||||||
)
|
)
|
||||||
assert isinstance(array, np.ndarray)
|
assert isinstance(array, np.ndarray)
|
||||||
assert len(array) == elecprice_provider.total_hours
|
assert len(array) == provider.total_hours
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@ -200,12 +200,12 @@ def test_key_to_array_resampling(elecprice_provider):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="For development only")
|
@pytest.mark.skip(reason="For development only")
|
||||||
def test_akkudoktor_development_forecast_data(elecprice_provider):
|
def test_akkudoktor_development_forecast_data(provider):
|
||||||
"""Fetch data from real Akkudoktor server."""
|
"""Fetch data from real Akkudoktor server."""
|
||||||
# Preset, as this is usually done by update_data()
|
# Preset, as this is usually done by update_data()
|
||||||
elecprice_provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
||||||
|
|
||||||
akkudoktor_data = elecprice_provider._request_forecast()
|
akkudoktor_data = provider._request_forecast()
|
||||||
|
|
||||||
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "w") as f_out:
|
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "w") as f_out:
|
||||||
json.dump(akkudoktor_data, f_out, indent=4)
|
json.dump(akkudoktor_data, f_out, indent=4)
|
||||||
|
@ -13,14 +13,14 @@ FILE_TESTDATA_ELECPRICEIMPORT_1_JSON = DIR_TESTDATA.joinpath("import_input_1.jso
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def elecprice_provider(sample_import_1_json, config_eos):
|
def provider(sample_import_1_json, config_eos):
|
||||||
"""Fixture to create a ElecPriceProvider instance."""
|
"""Fixture to create a ElecPriceProvider instance."""
|
||||||
settings = {
|
settings = {
|
||||||
"elecprice": {
|
"elecprice": {
|
||||||
"elecprice_provider": "ElecPriceImport",
|
"provider": "ElecPriceImport",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"elecpriceimport_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
"import_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
||||||
"elecpriceimport_json": json.dumps(sample_import_1_json),
|
"import_json": json.dumps(sample_import_1_json),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -43,24 +43,24 @@ def sample_import_1_json():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(elecprice_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that ElecPriceForecast behaves as a singleton."""
|
"""Test that ElecPriceForecast behaves as a singleton."""
|
||||||
another_instance = ElecPriceImport()
|
another_instance = ElecPriceImport()
|
||||||
assert elecprice_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(elecprice_provider, config_eos):
|
def test_invalid_provider(provider, config_eos):
|
||||||
"""Test requesting an unsupported elecprice_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
settings = {
|
settings = {
|
||||||
"elecprice": {
|
"elecprice": {
|
||||||
"elecprice_provider": "<invalid>",
|
"provider": "<invalid>",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"elecpriceimport_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
"import_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
assert not elecprice_provider.enabled()
|
assert not provider.enabled()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@ -81,35 +81,33 @@ def test_invalid_provider(elecprice_provider, config_eos):
|
|||||||
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_import(elecprice_provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
def test_import(provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
||||||
"""Test fetching forecast from Import."""
|
"""Test fetching forecast from Import."""
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
if from_file:
|
if from_file:
|
||||||
config_eos.elecprice.provider_settings.elecpriceimport_json = None
|
config_eos.elecprice.provider_settings.import_json = None
|
||||||
assert config_eos.elecprice.provider_settings.elecpriceimport_json is None
|
assert config_eos.elecprice.provider_settings.import_json is None
|
||||||
else:
|
else:
|
||||||
config_eos.elecprice.provider_settings.elecpriceimport_file_path = None
|
config_eos.elecprice.provider_settings.import_file_path = None
|
||||||
assert config_eos.elecprice.provider_settings.elecpriceimport_file_path is None
|
assert config_eos.elecprice.provider_settings.import_file_path is None
|
||||||
elecprice_provider.clear()
|
provider.clear()
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
elecprice_provider.update_data()
|
provider.update_data()
|
||||||
|
|
||||||
# Assert: Verify the result is as expected
|
# Assert: Verify the result is as expected
|
||||||
assert elecprice_provider.start_datetime is not None
|
assert provider.start_datetime is not None
|
||||||
assert elecprice_provider.total_hours is not None
|
assert provider.total_hours is not None
|
||||||
assert compare_datetimes(elecprice_provider.start_datetime, ems_eos.start_datetime).equal
|
assert compare_datetimes(provider.start_datetime, ems_eos.start_datetime).equal
|
||||||
values = sample_import_1_json["elecprice_marketprice_wh"]
|
values = sample_import_1_json["elecprice_marketprice_wh"]
|
||||||
value_datetime_mapping = elecprice_provider.import_datetimes(
|
value_datetime_mapping = provider.import_datetimes(ems_eos.start_datetime, len(values))
|
||||||
ems_eos.start_datetime, len(values)
|
|
||||||
)
|
|
||||||
for i, mapping in enumerate(value_datetime_mapping):
|
for i, mapping in enumerate(value_datetime_mapping):
|
||||||
assert i < len(elecprice_provider.records)
|
assert i < len(provider.records)
|
||||||
expected_datetime, expected_value_index = mapping
|
expected_datetime, expected_value_index = mapping
|
||||||
expected_value = values[expected_value_index]
|
expected_value = values[expected_value_index]
|
||||||
result_datetime = elecprice_provider.records[i].date_time
|
result_datetime = provider.records[i].date_time
|
||||||
result_value = elecprice_provider.records[i]["elecprice_marketprice_wh"]
|
result_value = provider.records[i]["elecprice_marketprice_wh"]
|
||||||
|
|
||||||
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
||||||
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
||||||
|
@ -24,7 +24,9 @@ def inverter(mock_battery, devices_eos) -> Inverter:
|
|||||||
return_value=mock_self_consumption_predictor,
|
return_value=mock_self_consumption_predictor,
|
||||||
):
|
):
|
||||||
iv = Inverter(
|
iv = Inverter(
|
||||||
InverterParameters(device_id="iv1", max_power_wh=500.0, battery=mock_battery.device_id),
|
InverterParameters(
|
||||||
|
device_id="iv1", max_power_wh=500.0, battery_id=mock_battery.device_id
|
||||||
|
),
|
||||||
)
|
)
|
||||||
devices_eos.add_device(iv)
|
devices_eos.add_device(iv)
|
||||||
devices_eos.post_setup()
|
devices_eos.post_setup()
|
||||||
|
@ -14,11 +14,11 @@ from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def load_provider(config_eos):
|
def provider(config_eos):
|
||||||
"""Fixture to initialise the LoadAkkudoktor instance."""
|
"""Fixture to initialise the LoadAkkudoktor instance."""
|
||||||
settings = {
|
settings = {
|
||||||
"load": {
|
"load": {
|
||||||
"load_provider": "LoadAkkudoktor",
|
"provider": "LoadAkkudoktor",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"load_name": "Akkudoktor Profile",
|
"load_name": "Akkudoktor Profile",
|
||||||
"loadakkudoktor_year_energy": "1000",
|
"loadakkudoktor_year_energy": "1000",
|
||||||
@ -41,8 +41,8 @@ def measurement_eos():
|
|||||||
measurement.records.append(
|
measurement.records.append(
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=dt,
|
date_time=dt,
|
||||||
measurement_load0_mr=load0_mr,
|
load0_mr=load0_mr,
|
||||||
measurement_load1_mr=load1_mr,
|
load1_mr=load1_mr,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
dt += interval
|
dt += interval
|
||||||
@ -76,13 +76,13 @@ def test_loadakkudoktor_settings_validator():
|
|||||||
assert settings.loadakkudoktor_year_energy == 1234.56
|
assert settings.loadakkudoktor_year_energy == 1234.56
|
||||||
|
|
||||||
|
|
||||||
def test_loadakkudoktor_provider_id(load_provider):
|
def test_loadakkudoktor_provider_id(provider):
|
||||||
"""Test the `provider_id` class method."""
|
"""Test the `provider_id` class method."""
|
||||||
assert load_provider.provider_id() == "LoadAkkudoktor"
|
assert provider.provider_id() == "LoadAkkudoktor"
|
||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.prediction.loadakkudoktor.np.load")
|
@patch("akkudoktoreos.prediction.loadakkudoktor.np.load")
|
||||||
def test_load_data_from_mock(mock_np_load, mock_load_profiles_file, load_provider):
|
def test_load_data_from_mock(mock_np_load, mock_load_profiles_file, provider):
|
||||||
"""Test the `load_data` method."""
|
"""Test the `load_data` method."""
|
||||||
# Mock numpy load to return data similar to what would be in the file
|
# Mock numpy load to return data similar to what would be in the file
|
||||||
mock_np_load.return_value = {
|
mock_np_load.return_value = {
|
||||||
@ -91,19 +91,19 @@ def test_load_data_from_mock(mock_np_load, mock_load_profiles_file, load_provide
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Test data loading
|
# Test data loading
|
||||||
data_year_energy = load_provider.load_data()
|
data_year_energy = provider.load_data()
|
||||||
assert data_year_energy is not None
|
assert data_year_energy is not None
|
||||||
assert data_year_energy.shape == (365, 2, 24)
|
assert data_year_energy.shape == (365, 2, 24)
|
||||||
|
|
||||||
|
|
||||||
def test_load_data_from_file(load_provider):
|
def test_load_data_from_file(provider):
|
||||||
"""Test `load_data` loads data from the profiles file."""
|
"""Test `load_data` loads data from the profiles file."""
|
||||||
data_year_energy = load_provider.load_data()
|
data_year_energy = provider.load_data()
|
||||||
assert data_year_energy is not None
|
assert data_year_energy is not None
|
||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor.load_data")
|
@patch("akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor.load_data")
|
||||||
def test_update_data(mock_load_data, load_provider):
|
def test_update_data(mock_load_data, provider):
|
||||||
"""Test the `_update` method."""
|
"""Test the `_update` method."""
|
||||||
mock_load_data.return_value = np.random.rand(365, 2, 24)
|
mock_load_data.return_value = np.random.rand(365, 2, 24)
|
||||||
|
|
||||||
@ -112,27 +112,27 @@ def test_update_data(mock_load_data, load_provider):
|
|||||||
ems_eos.set_start_datetime(pendulum.datetime(2024, 1, 1))
|
ems_eos.set_start_datetime(pendulum.datetime(2024, 1, 1))
|
||||||
|
|
||||||
# Assure there are no prediction records
|
# Assure there are no prediction records
|
||||||
load_provider.clear()
|
provider.clear()
|
||||||
assert len(load_provider) == 0
|
assert len(provider) == 0
|
||||||
|
|
||||||
# Execute the method
|
# Execute the method
|
||||||
load_provider._update_data()
|
provider._update_data()
|
||||||
|
|
||||||
# Validate that update_value is called
|
# Validate that update_value is called
|
||||||
assert len(load_provider) > 0
|
assert len(provider) > 0
|
||||||
|
|
||||||
|
|
||||||
def test_calculate_adjustment(load_provider, measurement_eos):
|
def test_calculate_adjustment(provider, measurement_eos):
|
||||||
"""Test `_calculate_adjustment` for various scenarios."""
|
"""Test `_calculate_adjustment` for various scenarios."""
|
||||||
data_year_energy = np.random.rand(365, 2, 24)
|
data_year_energy = np.random.rand(365, 2, 24)
|
||||||
|
|
||||||
# Call the method and validate results
|
# Call the method and validate results
|
||||||
weekday_adjust, weekend_adjust = load_provider._calculate_adjustment(data_year_energy)
|
weekday_adjust, weekend_adjust = provider._calculate_adjustment(data_year_energy)
|
||||||
assert weekday_adjust.shape == (24,)
|
assert weekday_adjust.shape == (24,)
|
||||||
assert weekend_adjust.shape == (24,)
|
assert weekend_adjust.shape == (24,)
|
||||||
|
|
||||||
data_year_energy = np.zeros((365, 2, 24))
|
data_year_energy = np.zeros((365, 2, 24))
|
||||||
weekday_adjust, weekend_adjust = load_provider._calculate_adjustment(data_year_energy)
|
weekday_adjust, weekend_adjust = provider._calculate_adjustment(data_year_energy)
|
||||||
|
|
||||||
assert weekday_adjust.shape == (24,)
|
assert weekday_adjust.shape == (24,)
|
||||||
expected = np.array(
|
expected = np.array(
|
||||||
@ -197,7 +197,7 @@ def test_calculate_adjustment(load_provider, measurement_eos):
|
|||||||
np.testing.assert_array_equal(weekend_adjust, expected)
|
np.testing.assert_array_equal(weekend_adjust, expected)
|
||||||
|
|
||||||
|
|
||||||
def test_load_provider_adjustments_with_mock_data(load_provider):
|
def test_provider_adjustments_with_mock_data(provider):
|
||||||
"""Test full integration of adjustments with mock data."""
|
"""Test full integration of adjustments with mock data."""
|
||||||
with patch(
|
with patch(
|
||||||
"akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor._calculate_adjustment"
|
"akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor._calculate_adjustment"
|
||||||
@ -205,5 +205,5 @@ def test_load_provider_adjustments_with_mock_data(load_provider):
|
|||||||
mock_adjust.return_value = (np.zeros(24), np.zeros(24))
|
mock_adjust.return_value = (np.zeros(24), np.zeros(24))
|
||||||
|
|
||||||
# Test execution
|
# Test execution
|
||||||
load_provider._update_data()
|
provider._update_data()
|
||||||
assert mock_adjust.called
|
assert mock_adjust.called
|
||||||
|
@ -17,33 +17,33 @@ def measurement_eos():
|
|||||||
measurement.records = [
|
measurement.records = [
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=0),
|
date_time=datetime(2023, 1, 1, hour=0),
|
||||||
measurement_load0_mr=100,
|
load0_mr=100,
|
||||||
measurement_load1_mr=200,
|
load1_mr=200,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=1),
|
date_time=datetime(2023, 1, 1, hour=1),
|
||||||
measurement_load0_mr=150,
|
load0_mr=150,
|
||||||
measurement_load1_mr=250,
|
load1_mr=250,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=2),
|
date_time=datetime(2023, 1, 1, hour=2),
|
||||||
measurement_load0_mr=200,
|
load0_mr=200,
|
||||||
measurement_load1_mr=300,
|
load1_mr=300,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=3),
|
date_time=datetime(2023, 1, 1, hour=3),
|
||||||
measurement_load0_mr=250,
|
load0_mr=250,
|
||||||
measurement_load1_mr=350,
|
load1_mr=350,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=4),
|
date_time=datetime(2023, 1, 1, hour=4),
|
||||||
measurement_load0_mr=300,
|
load0_mr=300,
|
||||||
measurement_load1_mr=400,
|
load1_mr=400,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=5),
|
date_time=datetime(2023, 1, 1, hour=5),
|
||||||
measurement_load0_mr=350,
|
load0_mr=350,
|
||||||
measurement_load1_mr=450,
|
load1_mr=450,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
return measurement
|
return measurement
|
||||||
@ -79,7 +79,7 @@ def test_interval_count_invalid_non_positive_interval(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_valid_input(measurement_eos):
|
def test_energy_from_meter_readings_valid_input(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with valid inputs and proper alignment of load data."""
|
"""Test _energy_from_meter_readings with valid inputs and proper alignment of load data."""
|
||||||
key = "measurement_load0_mr"
|
key = "load0_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@ -94,7 +94,7 @@ def test_energy_from_meter_readings_valid_input(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_empty_array(measurement_eos):
|
def test_energy_from_meter_readings_empty_array(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with no data (empty array)."""
|
"""Test _energy_from_meter_readings with no data (empty array)."""
|
||||||
key = "measurement_load0_mr"
|
key = "load0_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@ -116,7 +116,7 @@ def test_energy_from_meter_readings_empty_array(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_misaligned_array(measurement_eos):
|
def test_energy_from_meter_readings_misaligned_array(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with misaligned array size."""
|
"""Test _energy_from_meter_readings with misaligned array size."""
|
||||||
key = "measurement_load1_mr"
|
key = "load1_mr"
|
||||||
start_datetime = measurement_eos.min_datetime
|
start_datetime = measurement_eos.min_datetime
|
||||||
end_datetime = measurement_eos.max_datetime
|
end_datetime = measurement_eos.max_datetime
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@ -134,7 +134,7 @@ def test_energy_from_meter_readings_misaligned_array(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_partial_data(measurement_eos, caplog):
|
def test_energy_from_meter_readings_partial_data(measurement_eos, caplog):
|
||||||
"""Test _energy_from_meter_readings with partial data (misaligned but empty array)."""
|
"""Test _energy_from_meter_readings with partial data (misaligned but empty array)."""
|
||||||
key = "measurement_load2_mr"
|
key = "load2_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@ -153,7 +153,7 @@ def test_energy_from_meter_readings_partial_data(measurement_eos, caplog):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_negative_interval(measurement_eos):
|
def test_energy_from_meter_readings_negative_interval(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with a negative interval."""
|
"""Test _energy_from_meter_readings with a negative interval."""
|
||||||
key = "measurement_load3_mr"
|
key = "load3_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=-1)
|
interval = duration(hours=-1)
|
||||||
@ -191,23 +191,23 @@ def test_name_to_key(measurement_eos):
|
|||||||
"""Test name_to_key functionality."""
|
"""Test name_to_key functionality."""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
measurement=MeasurementCommonSettings(
|
measurement=MeasurementCommonSettings(
|
||||||
measurement_load0_name="Household",
|
load0_name="Household",
|
||||||
measurement_load1_name="Heat Pump",
|
load1_name="Heat Pump",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
measurement_eos.config.merge_settings(settings)
|
measurement_eos.config.merge_settings(settings)
|
||||||
|
|
||||||
assert measurement_eos.name_to_key("Household", "measurement_load") == "measurement_load0_mr"
|
assert measurement_eos.name_to_key("Household", "load") == "load0_mr"
|
||||||
assert measurement_eos.name_to_key("Heat Pump", "measurement_load") == "measurement_load1_mr"
|
assert measurement_eos.name_to_key("Heat Pump", "load") == "load1_mr"
|
||||||
assert measurement_eos.name_to_key("Unknown", "measurement_load") is None
|
assert measurement_eos.name_to_key("Unknown", "load") is None
|
||||||
|
|
||||||
|
|
||||||
def test_name_to_key_invalid_topic(measurement_eos):
|
def test_name_to_key_invalid_topic(measurement_eos):
|
||||||
"""Test name_to_key with an invalid topic."""
|
"""Test name_to_key with an invalid topic."""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
MeasurementCommonSettings(
|
MeasurementCommonSettings(
|
||||||
measurement_load0_name="Household",
|
load0_name="Household",
|
||||||
measurement_load1_name="Heat Pump",
|
load1_name="Heat Pump",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
measurement_eos.config.merge_settings(settings)
|
measurement_eos.config.merge_settings(settings)
|
||||||
|
@ -17,25 +17,6 @@ from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
|||||||
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_settings(config_eos):
|
|
||||||
"""Fixture that adds settings data to the global config."""
|
|
||||||
settings = {
|
|
||||||
"prediction_hours": 48,
|
|
||||||
"prediction_historic_hours": 24,
|
|
||||||
"latitude": 52.52,
|
|
||||||
"longitude": 13.405,
|
|
||||||
"weather_provider": None,
|
|
||||||
"pvforecast_provider": None,
|
|
||||||
"load_provider": None,
|
|
||||||
"elecprice_provider": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Merge settings to config
|
|
||||||
config_eos.merge_settings_from_dict(settings)
|
|
||||||
return config_eos
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def prediction():
|
def prediction():
|
||||||
"""All EOS predictions."""
|
"""All EOS predictions."""
|
||||||
@ -59,7 +40,7 @@ def forecast_providers():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"prediction_hours, prediction_historic_hours, latitude, longitude, expected_timezone",
|
"hours, historic_hours, latitude, longitude, expected_timezone",
|
||||||
[
|
[
|
||||||
(48, 24, 40.7128, -74.0060, "America/New_York"), # Valid latitude/longitude
|
(48, 24, 40.7128, -74.0060, "America/New_York"), # Valid latitude/longitude
|
||||||
(0, 0, None, None, None), # No location
|
(0, 0, None, None, None), # No location
|
||||||
@ -67,17 +48,17 @@ def forecast_providers():
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_prediction_common_settings_valid(
|
def test_prediction_common_settings_valid(
|
||||||
prediction_hours, prediction_historic_hours, latitude, longitude, expected_timezone
|
hours, historic_hours, latitude, longitude, expected_timezone
|
||||||
):
|
):
|
||||||
"""Test valid settings for PredictionCommonSettings."""
|
"""Test valid settings for PredictionCommonSettings."""
|
||||||
settings = PredictionCommonSettings(
|
settings = PredictionCommonSettings(
|
||||||
prediction_hours=prediction_hours,
|
hours=hours,
|
||||||
prediction_historic_hours=prediction_historic_hours,
|
historic_hours=historic_hours,
|
||||||
latitude=latitude,
|
latitude=latitude,
|
||||||
longitude=longitude,
|
longitude=longitude,
|
||||||
)
|
)
|
||||||
assert settings.prediction_hours == prediction_hours
|
assert settings.hours == hours
|
||||||
assert settings.prediction_historic_hours == prediction_historic_hours
|
assert settings.historic_hours == historic_hours
|
||||||
assert settings.latitude == latitude
|
assert settings.latitude == latitude
|
||||||
assert settings.longitude == longitude
|
assert settings.longitude == longitude
|
||||||
assert settings.timezone == expected_timezone
|
assert settings.timezone == expected_timezone
|
||||||
@ -86,8 +67,8 @@ def test_prediction_common_settings_valid(
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"field_name, invalid_value, expected_error",
|
"field_name, invalid_value, expected_error",
|
||||||
[
|
[
|
||||||
("prediction_hours", -1, "Input should be greater than or equal to 0"),
|
("hours", -1, "Input should be greater than or equal to 0"),
|
||||||
("prediction_historic_hours", -5, "Input should be greater than or equal to 0"),
|
("historic_hours", -5, "Input should be greater than or equal to 0"),
|
||||||
("latitude", -91.0, "Input should be greater than or equal to -90"),
|
("latitude", -91.0, "Input should be greater than or equal to -90"),
|
||||||
("latitude", 91.0, "Input should be less than or equal to 90"),
|
("latitude", 91.0, "Input should be less than or equal to 90"),
|
||||||
("longitude", -181.0, "Input should be greater than or equal to -180"),
|
("longitude", -181.0, "Input should be greater than or equal to -180"),
|
||||||
@ -97,11 +78,12 @@ def test_prediction_common_settings_valid(
|
|||||||
def test_prediction_common_settings_invalid(field_name, invalid_value, expected_error):
|
def test_prediction_common_settings_invalid(field_name, invalid_value, expected_error):
|
||||||
"""Test invalid settings for PredictionCommonSettings."""
|
"""Test invalid settings for PredictionCommonSettings."""
|
||||||
valid_data = {
|
valid_data = {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 40.7128,
|
"latitude": 40.7128,
|
||||||
"longitude": -74.0060,
|
"longitude": -74.0060,
|
||||||
}
|
}
|
||||||
|
assert PredictionCommonSettings(**valid_data) is not None
|
||||||
valid_data[field_name] = invalid_value
|
valid_data[field_name] = invalid_value
|
||||||
|
|
||||||
with pytest.raises(ValidationError, match=expected_error):
|
with pytest.raises(ValidationError, match=expected_error):
|
||||||
@ -110,16 +92,14 @@ def test_prediction_common_settings_invalid(field_name, invalid_value, expected_
|
|||||||
|
|
||||||
def test_prediction_common_settings_no_location():
|
def test_prediction_common_settings_no_location():
|
||||||
"""Test that timezone is None when latitude and longitude are not provided."""
|
"""Test that timezone is None when latitude and longitude are not provided."""
|
||||||
settings = PredictionCommonSettings(
|
settings = PredictionCommonSettings(hours=48, historic_hours=24, latitude=None, longitude=None)
|
||||||
prediction_hours=48, prediction_historic_hours=24, latitude=None, longitude=None
|
|
||||||
)
|
|
||||||
assert settings.timezone is None
|
assert settings.timezone is None
|
||||||
|
|
||||||
|
|
||||||
def test_prediction_common_settings_with_location():
|
def test_prediction_common_settings_with_location():
|
||||||
"""Test that timezone is correctly computed when latitude and longitude are provided."""
|
"""Test that timezone is correctly computed when latitude and longitude are provided."""
|
||||||
settings = PredictionCommonSettings(
|
settings = PredictionCommonSettings(
|
||||||
prediction_hours=48, prediction_historic_hours=24, latitude=34.0522, longitude=-118.2437
|
hours=48, historic_hours=24, latitude=34.0522, longitude=-118.2437
|
||||||
)
|
)
|
||||||
assert settings.timezone == "America/Los_Angeles"
|
assert settings.timezone == "America/Los_Angeles"
|
||||||
|
|
||||||
|
@ -101,14 +101,14 @@ class TestPredictionBase:
|
|||||||
assert base.config.prediction.latitude == 2.5
|
assert base.config.prediction.latitude == 2.5
|
||||||
|
|
||||||
def test_config_value_from_field_default(self, base, monkeypatch):
|
def test_config_value_from_field_default(self, base, monkeypatch):
|
||||||
assert base.config.prediction.model_fields["prediction_hours"].default == 48
|
assert base.config.prediction.model_fields["hours"].default == 48
|
||||||
assert base.config.prediction.prediction_hours == 48
|
assert base.config.prediction.hours == 48
|
||||||
monkeypatch.setenv("EOS_PREDICTION__PREDICTION_HOURS", "128")
|
monkeypatch.setenv("EOS_PREDICTION__HOURS", "128")
|
||||||
base.config.reset_settings()
|
base.config.reset_settings()
|
||||||
assert base.config.prediction.prediction_hours == 128
|
assert base.config.prediction.hours == 128
|
||||||
monkeypatch.delenv("EOS_PREDICTION__PREDICTION_HOURS")
|
monkeypatch.delenv("EOS_PREDICTION__HOURS")
|
||||||
base.config.reset_settings()
|
base.config.reset_settings()
|
||||||
assert base.config.prediction.prediction_hours == 48
|
assert base.config.prediction.hours == 48
|
||||||
|
|
||||||
def test_get_config_value_key_error(self, base):
|
def test_get_config_value_key_error(self, base):
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
@ -159,14 +159,14 @@ class TestPredictionProvider:
|
|||||||
"""Test that computed fields `end_datetime` and `keep_datetime` are correctly calculated."""
|
"""Test that computed fields `end_datetime` and `keep_datetime` are correctly calculated."""
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(sample_start_datetime)
|
ems_eos.set_start_datetime(sample_start_datetime)
|
||||||
provider.config.prediction.prediction_hours = 24 # 24 hours into the future
|
provider.config.prediction.hours = 24 # 24 hours into the future
|
||||||
provider.config.prediction.prediction_historic_hours = 48 # 48 hours into the past
|
provider.config.prediction.historic_hours = 48 # 48 hours into the past
|
||||||
|
|
||||||
expected_end_datetime = sample_start_datetime + to_duration(
|
expected_end_datetime = sample_start_datetime + to_duration(
|
||||||
provider.config.prediction.prediction_hours * 3600
|
provider.config.prediction.hours * 3600
|
||||||
)
|
)
|
||||||
expected_keep_datetime = sample_start_datetime - to_duration(
|
expected_keep_datetime = sample_start_datetime - to_duration(
|
||||||
provider.config.prediction.prediction_historic_hours * 3600
|
provider.config.prediction.historic_hours * 3600
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@ -183,8 +183,8 @@ class TestPredictionProvider:
|
|||||||
# EOS config supersedes
|
# EOS config supersedes
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
# The following values are currently not set in EOS config, we can override
|
# The following values are currently not set in EOS config, we can override
|
||||||
monkeypatch.setenv("EOS_PREDICTION__PREDICTION_HISTORIC_HOURS", "2")
|
monkeypatch.setenv("EOS_PREDICTION__HISTORIC_HOURS", "2")
|
||||||
assert os.getenv("EOS_PREDICTION__PREDICTION_HISTORIC_HOURS") == "2"
|
assert os.getenv("EOS_PREDICTION__HISTORIC_HOURS") == "2"
|
||||||
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "37.7749")
|
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "37.7749")
|
||||||
assert os.getenv("EOS_PREDICTION__LATITUDE") == "37.7749"
|
assert os.getenv("EOS_PREDICTION__LATITUDE") == "37.7749"
|
||||||
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "-122.4194")
|
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "-122.4194")
|
||||||
@ -194,13 +194,13 @@ class TestPredictionProvider:
|
|||||||
ems_eos.set_start_datetime(sample_start_datetime)
|
ems_eos.set_start_datetime(sample_start_datetime)
|
||||||
provider.update_data()
|
provider.update_data()
|
||||||
|
|
||||||
assert provider.config.prediction.prediction_hours == config_eos.prediction.prediction_hours
|
assert provider.config.prediction.hours == config_eos.prediction.hours
|
||||||
assert provider.config.prediction.prediction_historic_hours == 2
|
assert provider.config.prediction.historic_hours == 2
|
||||||
assert provider.config.prediction.latitude == 37.7749
|
assert provider.config.prediction.latitude == 37.7749
|
||||||
assert provider.config.prediction.longitude == -122.4194
|
assert provider.config.prediction.longitude == -122.4194
|
||||||
assert provider.start_datetime == sample_start_datetime
|
assert provider.start_datetime == sample_start_datetime
|
||||||
assert provider.end_datetime == sample_start_datetime + to_duration(
|
assert provider.end_datetime == sample_start_datetime + to_duration(
|
||||||
f"{provider.config.prediction.prediction_hours} hours"
|
f"{provider.config.prediction.hours} hours"
|
||||||
)
|
)
|
||||||
assert provider.keep_datetime == sample_start_datetime - to_duration("2 hours")
|
assert provider.keep_datetime == sample_start_datetime - to_duration("2 hours")
|
||||||
|
|
||||||
@ -290,7 +290,7 @@ class TestPredictionContainer:
|
|||||||
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": hours,
|
"hours": hours,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
container.config.merge_settings_from_dict(settings)
|
container.config.merge_settings_from_dict(settings)
|
||||||
@ -320,7 +320,7 @@ class TestPredictionContainer:
|
|||||||
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_historic_hours": historic_hours,
|
"historic_hours": historic_hours,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
container.config.merge_settings_from_dict(settings)
|
container.config.merge_settings_from_dict(settings)
|
||||||
@ -328,7 +328,7 @@ class TestPredictionContainer:
|
|||||||
assert compare_datetimes(container.keep_datetime, expected).equal
|
assert compare_datetimes(container.keep_datetime, expected).equal
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"start, prediction_hours, expected_hours",
|
"start, hours, expected_hours",
|
||||||
[
|
[
|
||||||
("2024-11-10 00:00:00", 24, 24), # No DST in Germany
|
("2024-11-10 00:00:00", 24, 24), # No DST in Germany
|
||||||
("2024-08-10 00:00:00", 24, 24), # DST in Germany
|
("2024-08-10 00:00:00", 24, 24), # DST in Germany
|
||||||
@ -336,13 +336,13 @@ class TestPredictionContainer:
|
|||||||
("2024-10-27 00:00:00", 24, 25), # DST change in Germany (25 hours/ day)
|
("2024-10-27 00:00:00", 24, 25), # DST change in Germany (25 hours/ day)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_total_hours(self, container, start, prediction_hours, expected_hours):
|
def test_total_hours(self, container, start, hours, expected_hours):
|
||||||
"""Test the `total_hours` property."""
|
"""Test the `total_hours` property."""
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": prediction_hours,
|
"hours": hours,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
container.config.merge_settings_from_dict(settings)
|
container.config.merge_settings_from_dict(settings)
|
||||||
@ -363,7 +363,7 @@ class TestPredictionContainer:
|
|||||||
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_historic_hours": historic_hours,
|
"historic_hours": historic_hours,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
container.config.merge_settings_from_dict(settings)
|
container.config.merge_settings_from_dict(settings)
|
||||||
|
@ -26,13 +26,13 @@ def sample_settings(config_eos):
|
|||||||
"""Fixture that adds settings data to the global config."""
|
"""Fixture that adds settings data to the global config."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
},
|
},
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_peakpower": 5.0,
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"pvforecast0_surface_azimuth": -10,
|
||||||
"pvforecast0_surface_tilt": 7,
|
"pvforecast0_surface_tilt": 7,
|
||||||
@ -59,7 +59,7 @@ def sample_settings(config_eos):
|
|||||||
|
|
||||||
# Merge settings to config
|
# Merge settings to config
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
assert config_eos.pvforecast.pvforecast_provider == "PVForecastAkkudoktor"
|
assert config_eos.pvforecast.provider == "PVForecastAkkudoktor"
|
||||||
return config_eos
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
@ -147,13 +147,13 @@ sample_value = AkkudoktorForecastValue(
|
|||||||
)
|
)
|
||||||
sample_config_data = {
|
sample_config_data = {
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
},
|
},
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_peakpower": 5.0,
|
"pvforecast0_peakpower": 5.0,
|
||||||
"pvforecast0_surface_azimuth": 180,
|
"pvforecast0_surface_azimuth": 180,
|
||||||
"pvforecast0_surface_tilt": 30,
|
"pvforecast0_surface_tilt": 30,
|
||||||
|
@ -13,11 +13,11 @@ FILE_TESTDATA_PVFORECASTIMPORT_1_JSON = DIR_TESTDATA.joinpath("import_input_1.js
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def pvforecast_provider(sample_import_1_json, config_eos):
|
def provider(sample_import_1_json, config_eos):
|
||||||
"""Fixture to create a PVForecastProvider instance."""
|
"""Fixture to create a PVForecastProvider instance."""
|
||||||
settings = {
|
settings = {
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "PVForecastImport",
|
"provider": "PVForecastImport",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"pvforecastimport_file_path": str(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON),
|
"pvforecastimport_file_path": str(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON),
|
||||||
"pvforecastimport_json": json.dumps(sample_import_1_json),
|
"pvforecastimport_json": json.dumps(sample_import_1_json),
|
||||||
@ -43,24 +43,24 @@ def sample_import_1_json():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(pvforecast_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that PVForecastForecast behaves as a singleton."""
|
"""Test that PVForecastForecast behaves as a singleton."""
|
||||||
another_instance = PVForecastImport()
|
another_instance = PVForecastImport()
|
||||||
assert pvforecast_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(pvforecast_provider, config_eos):
|
def test_invalid_provider(provider, config_eos):
|
||||||
"""Test requesting an unsupported pvforecast_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
settings = {
|
settings = {
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"pvforecast_provider": "<invalid>",
|
"provider": "<invalid>",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"pvforecastimport_file_path": str(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON),
|
"pvforecastimport_file_path": str(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
assert not pvforecast_provider.enabled()
|
assert not provider.enabled()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@ -81,7 +81,7 @@ def test_invalid_provider(pvforecast_provider, config_eos):
|
|||||||
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_import(pvforecast_provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
def test_import(provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
||||||
"""Test fetching forecast from import."""
|
"""Test fetching forecast from import."""
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
@ -91,25 +91,23 @@ def test_import(pvforecast_provider, sample_import_1_json, start_datetime, from_
|
|||||||
else:
|
else:
|
||||||
config_eos.pvforecast.provider_settings.pvforecastimport_file_path = None
|
config_eos.pvforecast.provider_settings.pvforecastimport_file_path = None
|
||||||
assert config_eos.pvforecast.provider_settings.pvforecastimport_file_path is None
|
assert config_eos.pvforecast.provider_settings.pvforecastimport_file_path is None
|
||||||
pvforecast_provider.clear()
|
provider.clear()
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
pvforecast_provider.update_data()
|
provider.update_data()
|
||||||
|
|
||||||
# Assert: Verify the result is as expected
|
# Assert: Verify the result is as expected
|
||||||
assert pvforecast_provider.start_datetime is not None
|
assert provider.start_datetime is not None
|
||||||
assert pvforecast_provider.total_hours is not None
|
assert provider.total_hours is not None
|
||||||
assert compare_datetimes(pvforecast_provider.start_datetime, ems_eos.start_datetime).equal
|
assert compare_datetimes(provider.start_datetime, ems_eos.start_datetime).equal
|
||||||
values = sample_import_1_json["pvforecast_ac_power"]
|
values = sample_import_1_json["pvforecast_ac_power"]
|
||||||
value_datetime_mapping = pvforecast_provider.import_datetimes(
|
value_datetime_mapping = provider.import_datetimes(ems_eos.start_datetime, len(values))
|
||||||
ems_eos.start_datetime, len(values)
|
|
||||||
)
|
|
||||||
for i, mapping in enumerate(value_datetime_mapping):
|
for i, mapping in enumerate(value_datetime_mapping):
|
||||||
assert i < len(pvforecast_provider.records)
|
assert i < len(provider.records)
|
||||||
expected_datetime, expected_value_index = mapping
|
expected_datetime, expected_value_index = mapping
|
||||||
expected_value = values[expected_value_index]
|
expected_value = values[expected_value_index]
|
||||||
result_datetime = pvforecast_provider.records[i].date_time
|
result_datetime = provider.records[i].date_time
|
||||||
result_value = pvforecast_provider.records[i]["pvforecast_ac_power"]
|
result_value = provider.records[i]["pvforecast_ac_power"]
|
||||||
|
|
||||||
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
||||||
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
||||||
|
@ -17,7 +17,7 @@ FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON = DIR_TESTDATA.joinpath("weatherforecast_b
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def weather_provider(monkeypatch):
|
def provider(monkeypatch):
|
||||||
"""Fixture to create a WeatherProvider instance."""
|
"""Fixture to create a WeatherProvider instance."""
|
||||||
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "BrightSky")
|
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "BrightSky")
|
||||||
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "50.0")
|
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "50.0")
|
||||||
@ -52,27 +52,27 @@ def cache_store():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(weather_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that WeatherForecast behaves as a singleton."""
|
"""Test that WeatherForecast behaves as a singleton."""
|
||||||
another_instance = WeatherBrightSky()
|
another_instance = WeatherBrightSky()
|
||||||
assert weather_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(weather_provider, monkeypatch):
|
def test_invalid_provider(provider, monkeypatch):
|
||||||
"""Test requesting an unsupported weather_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "<invalid>")
|
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "<invalid>")
|
||||||
weather_provider.config.reset_settings()
|
provider.config.reset_settings()
|
||||||
assert not weather_provider.enabled()
|
assert not provider.enabled()
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_coordinates(weather_provider, monkeypatch):
|
def test_invalid_coordinates(provider, monkeypatch):
|
||||||
"""Test invalid coordinates raise ValueError."""
|
"""Test invalid coordinates raise ValueError."""
|
||||||
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "1000")
|
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "1000")
|
||||||
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "1000")
|
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "1000")
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
ValueError, # match="Latitude '1000' and/ or longitude `1000` out of valid range."
|
ValueError, # match="Latitude '1000' and/ or longitude `1000` out of valid range."
|
||||||
):
|
):
|
||||||
weather_provider.config.reset_settings()
|
provider.config.reset_settings()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@ -80,15 +80,13 @@ def test_invalid_coordinates(weather_provider, monkeypatch):
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_irridiance_estimate_from_cloud_cover(weather_provider):
|
def test_irridiance_estimate_from_cloud_cover(provider):
|
||||||
"""Test cloud cover to irradiance estimation."""
|
"""Test cloud cover to irradiance estimation."""
|
||||||
cloud_cover_data = pd.Series(
|
cloud_cover_data = pd.Series(
|
||||||
data=[20, 50, 80], index=pd.date_range("2023-10-22", periods=3, freq="h")
|
data=[20, 50, 80], index=pd.date_range("2023-10-22", periods=3, freq="h")
|
||||||
)
|
)
|
||||||
|
|
||||||
ghi, dni, dhi = weather_provider.estimate_irradiance_from_cloud_cover(
|
ghi, dni, dhi = provider.estimate_irradiance_from_cloud_cover(50.0, 10.0, cloud_cover_data)
|
||||||
50.0, 10.0, cloud_cover_data
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ghi == [0, 0, 0]
|
assert ghi == [0, 0, 0]
|
||||||
assert dhi == [0, 0, 0]
|
assert dhi == [0, 0, 0]
|
||||||
@ -101,7 +99,7 @@ def test_irridiance_estimate_from_cloud_cover(weather_provider):
|
|||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_request_forecast(mock_get, weather_provider, sample_brightsky_1_json):
|
def test_request_forecast(mock_get, provider, sample_brightsky_1_json):
|
||||||
"""Test requesting forecast from BrightSky."""
|
"""Test requesting forecast from BrightSky."""
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@ -110,10 +108,10 @@ def test_request_forecast(mock_get, weather_provider, sample_brightsky_1_json):
|
|||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
# Preset, as this is usually done by update()
|
# Preset, as this is usually done by update()
|
||||||
weather_provider.config.update()
|
provider.config.update()
|
||||||
|
|
||||||
# Test function
|
# Test function
|
||||||
brightsky_data = weather_provider._request_forecast()
|
brightsky_data = provider._request_forecast()
|
||||||
|
|
||||||
assert isinstance(brightsky_data, dict)
|
assert isinstance(brightsky_data, dict)
|
||||||
assert brightsky_data["weather"][0] == {
|
assert brightsky_data["weather"][0] == {
|
||||||
@ -150,7 +148,7 @@ def test_request_forecast(mock_get, weather_provider, sample_brightsky_1_json):
|
|||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_update_data(mock_get, weather_provider, sample_brightsky_1_json, cache_store):
|
def test_update_data(mock_get, provider, sample_brightsky_1_json, cache_store):
|
||||||
"""Test fetching forecast from BrightSky."""
|
"""Test fetching forecast from BrightSky."""
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@ -163,14 +161,14 @@ def test_update_data(mock_get, weather_provider, sample_brightsky_1_json, cache_
|
|||||||
# Call the method
|
# Call the method
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin"))
|
||||||
weather_provider.update_data(force_enable=True, force_update=True)
|
provider.update_data(force_enable=True, force_update=True)
|
||||||
|
|
||||||
# Assert: Verify the result is as expected
|
# Assert: Verify the result is as expected
|
||||||
mock_get.assert_called_once()
|
mock_get.assert_called_once()
|
||||||
assert len(weather_provider) == 338
|
assert len(provider) == 338
|
||||||
|
|
||||||
# with open(FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON, "w") as f_out:
|
# with open(FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON, "w") as f_out:
|
||||||
# f_out.write(weather_provider.to_json())
|
# f_out.write(provider.to_json())
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@ -179,14 +177,14 @@ def test_update_data(mock_get, weather_provider, sample_brightsky_1_json, cache_
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="For development only")
|
@pytest.mark.skip(reason="For development only")
|
||||||
def test_brightsky_development_forecast_data(weather_provider):
|
def test_brightsky_development_forecast_data(provider):
|
||||||
"""Fetch data from real BrightSky server."""
|
"""Fetch data from real BrightSky server."""
|
||||||
# Preset, as this is usually done by update_data()
|
# Preset, as this is usually done by update_data()
|
||||||
weather_provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
||||||
weather_provider.latitude = 50.0
|
provider.latitude = 50.0
|
||||||
weather_provider.longitude = 10.0
|
provider.longitude = 10.0
|
||||||
|
|
||||||
brightsky_data = weather_provider._request_forecast()
|
brightsky_data = provider._request_forecast()
|
||||||
|
|
||||||
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "w") as f_out:
|
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "w") as f_out:
|
||||||
json.dump(brightsky_data, f_out, indent=4)
|
json.dump(brightsky_data, f_out, indent=4)
|
||||||
|
@ -21,11 +21,11 @@ FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA = DIR_TESTDATA.joinpath("weatherforecas
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def weather_provider(config_eos):
|
def provider(config_eos):
|
||||||
"""Fixture to create a WeatherProvider instance."""
|
"""Fixture to create a WeatherProvider instance."""
|
||||||
settings = {
|
settings = {
|
||||||
"weather": {
|
"weather": {
|
||||||
"weather_provider": "ClearOutside",
|
"provider": "ClearOutside",
|
||||||
},
|
},
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"latitude": 50.0,
|
"latitude": 50.0,
|
||||||
@ -64,28 +64,28 @@ def cache_store():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(weather_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that WeatherForecast behaves as a singleton."""
|
"""Test that WeatherForecast behaves as a singleton."""
|
||||||
another_instance = WeatherClearOutside()
|
another_instance = WeatherClearOutside()
|
||||||
assert weather_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(weather_provider, config_eos):
|
def test_invalid_provider(provider, config_eos):
|
||||||
"""Test requesting an unsupported weather_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
settings = {
|
settings = {
|
||||||
"weather": {
|
"weather": {
|
||||||
"weather_provider": "<invalid>",
|
"provider": "<invalid>",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
assert not weather_provider.enabled()
|
assert not provider.enabled()
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_coordinates(weather_provider, config_eos):
|
def test_invalid_coordinates(provider, config_eos):
|
||||||
"""Test invalid coordinates raise ValueError."""
|
"""Test invalid coordinates raise ValueError."""
|
||||||
settings = {
|
settings = {
|
||||||
"weather": {
|
"weather": {
|
||||||
"weather_provider": "ClearOutside",
|
"provider": "ClearOutside",
|
||||||
},
|
},
|
||||||
"prediction": {
|
"prediction": {
|
||||||
"latitude": 1000.0,
|
"latitude": 1000.0,
|
||||||
@ -103,15 +103,13 @@ def test_invalid_coordinates(weather_provider, config_eos):
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_irridiance_estimate_from_cloud_cover(weather_provider):
|
def test_irridiance_estimate_from_cloud_cover(provider):
|
||||||
"""Test cloud cover to irradiance estimation."""
|
"""Test cloud cover to irradiance estimation."""
|
||||||
cloud_cover_data = pd.Series(
|
cloud_cover_data = pd.Series(
|
||||||
data=[20, 50, 80], index=pd.date_range("2023-10-22", periods=3, freq="h")
|
data=[20, 50, 80], index=pd.date_range("2023-10-22", periods=3, freq="h")
|
||||||
)
|
)
|
||||||
|
|
||||||
ghi, dni, dhi = weather_provider.estimate_irradiance_from_cloud_cover(
|
ghi, dni, dhi = provider.estimate_irradiance_from_cloud_cover(50.0, 10.0, cloud_cover_data)
|
||||||
50.0, 10.0, cloud_cover_data
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ghi == [0, 0, 0]
|
assert ghi == [0, 0, 0]
|
||||||
assert dhi == [0, 0, 0]
|
assert dhi == [0, 0, 0]
|
||||||
@ -124,7 +122,7 @@ def test_irridiance_estimate_from_cloud_cover(weather_provider):
|
|||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_request_forecast(mock_get, weather_provider, sample_clearout_1_html, config_eos):
|
def test_request_forecast(mock_get, provider, sample_clearout_1_html, config_eos):
|
||||||
"""Test fetching forecast from ClearOutside."""
|
"""Test fetching forecast from ClearOutside."""
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@ -136,14 +134,14 @@ def test_request_forecast(mock_get, weather_provider, sample_clearout_1_html, co
|
|||||||
config_eos.update()
|
config_eos.update()
|
||||||
|
|
||||||
# Test function
|
# Test function
|
||||||
response = weather_provider._request_forecast()
|
response = provider._request_forecast()
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.content == sample_clearout_1_html
|
assert response.content == sample_clearout_1_html
|
||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_update_data(mock_get, weather_provider, sample_clearout_1_html, sample_clearout_1_data):
|
def test_update_data(mock_get, provider, sample_clearout_1_html, sample_clearout_1_data):
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
mock_response.status_code = 200
|
mock_response.status_code = 200
|
||||||
@ -157,17 +155,17 @@ def test_update_data(mock_get, weather_provider, sample_clearout_1_html, sample_
|
|||||||
# Call the method
|
# Call the method
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(expected_start)
|
ems_eos.set_start_datetime(expected_start)
|
||||||
weather_provider.update_data()
|
provider.update_data()
|
||||||
|
|
||||||
# Check for correct prediction time window
|
# Check for correct prediction time window
|
||||||
assert weather_provider.config.prediction.prediction_hours == 48
|
assert provider.config.prediction.hours == 48
|
||||||
assert weather_provider.config.prediction.prediction_historic_hours == 48
|
assert provider.config.prediction.historic_hours == 48
|
||||||
assert compare_datetimes(weather_provider.start_datetime, expected_start).equal
|
assert compare_datetimes(provider.start_datetime, expected_start).equal
|
||||||
assert compare_datetimes(weather_provider.end_datetime, expected_end).equal
|
assert compare_datetimes(provider.end_datetime, expected_end).equal
|
||||||
assert compare_datetimes(weather_provider.keep_datetime, expected_keep).equal
|
assert compare_datetimes(provider.keep_datetime, expected_keep).equal
|
||||||
|
|
||||||
# Verify the data
|
# Verify the data
|
||||||
assert len(weather_provider) == 165 # 6 days, 24 hours per day - 7th day 21 hours
|
assert len(provider) == 165 # 6 days, 24 hours per day - 7th day 21 hours
|
||||||
|
|
||||||
# Check that specific values match the expected output
|
# Check that specific values match the expected output
|
||||||
# for i, record in enumerate(weather_data.records):
|
# for i, record in enumerate(weather_data.records):
|
||||||
@ -179,7 +177,7 @@ def test_update_data(mock_get, weather_provider, sample_clearout_1_html, sample_
|
|||||||
|
|
||||||
@pytest.mark.skip(reason="Test fixture to be improved")
|
@pytest.mark.skip(reason="Test fixture to be improved")
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_cache_forecast(mock_get, weather_provider, sample_clearout_1_html, cache_store):
|
def test_cache_forecast(mock_get, provider, sample_clearout_1_html, cache_store):
|
||||||
"""Test that ClearOutside forecast data is cached with TTL.
|
"""Test that ClearOutside forecast data is cached with TTL.
|
||||||
|
|
||||||
This can not be tested with mock_get. Mock objects are not pickable and therefor can not be
|
This can not be tested with mock_get. Mock objects are not pickable and therefor can not be
|
||||||
@ -193,12 +191,12 @@ def test_cache_forecast(mock_get, weather_provider, sample_clearout_1_html, cach
|
|||||||
|
|
||||||
cache_store.clear(clear_all=True)
|
cache_store.clear(clear_all=True)
|
||||||
|
|
||||||
weather_provider.update_data()
|
provider.update_data()
|
||||||
mock_get.assert_called_once()
|
mock_get.assert_called_once()
|
||||||
forecast_data_first = weather_provider.to_json()
|
forecast_data_first = provider.to_json()
|
||||||
|
|
||||||
weather_provider.update_data()
|
provider.update_data()
|
||||||
forecast_data_second = weather_provider.to_json()
|
forecast_data_second = provider.to_json()
|
||||||
# Verify that cache returns the same object without calling the method again
|
# Verify that cache returns the same object without calling the method again
|
||||||
assert forecast_data_first == forecast_data_second
|
assert forecast_data_first == forecast_data_second
|
||||||
# A mock object is not pickable and therefor can not be chached to file
|
# A mock object is not pickable and therefor can not be chached to file
|
||||||
@ -212,7 +210,7 @@ def test_cache_forecast(mock_get, weather_provider, sample_clearout_1_html, cach
|
|||||||
|
|
||||||
@pytest.mark.skip(reason="For development only")
|
@pytest.mark.skip(reason="For development only")
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_development_forecast_data(mock_get, weather_provider, sample_clearout_1_html):
|
def test_development_forecast_data(mock_get, provider, sample_clearout_1_html):
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
mock_response.status_code = 200
|
mock_response.status_code = 200
|
||||||
@ -220,14 +218,14 @@ def test_development_forecast_data(mock_get, weather_provider, sample_clearout_1
|
|||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
# Fill the instance
|
# Fill the instance
|
||||||
weather_provider.update_data(force_enable=True)
|
provider.update_data(force_enable=True)
|
||||||
|
|
||||||
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "w", encoding="utf8") as f_out:
|
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "w", encoding="utf8") as f_out:
|
||||||
f_out.write(weather_provider.to_json())
|
f_out.write(provider.to_json())
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="For development only")
|
@pytest.mark.skip(reason="For development only")
|
||||||
def test_clearoutsides_development_scraper(weather_provider, sample_clearout_1_html):
|
def test_clearoutsides_development_scraper(provider, sample_clearout_1_html):
|
||||||
"""Test scraping from ClearOutside."""
|
"""Test scraping from ClearOutside."""
|
||||||
soup = BeautifulSoup(sample_clearout_1_html, "html.parser")
|
soup = BeautifulSoup(sample_clearout_1_html, "html.parser")
|
||||||
|
|
||||||
|
@ -13,14 +13,14 @@ FILE_TESTDATA_WEATHERIMPORT_1_JSON = DIR_TESTDATA.joinpath("import_input_1.json"
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def weather_provider(sample_import_1_json, config_eos):
|
def provider(sample_import_1_json, config_eos):
|
||||||
"""Fixture to create a WeatherProvider instance."""
|
"""Fixture to create a WeatherProvider instance."""
|
||||||
settings = {
|
settings = {
|
||||||
"weather": {
|
"weather": {
|
||||||
"weather_provider": "WeatherImport",
|
"provider": "WeatherImport",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"weatherimport_file_path": str(FILE_TESTDATA_WEATHERIMPORT_1_JSON),
|
"import_file_path": str(FILE_TESTDATA_WEATHERIMPORT_1_JSON),
|
||||||
"weatherimport_json": json.dumps(sample_import_1_json),
|
"import_json": json.dumps(sample_import_1_json),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -43,24 +43,24 @@ def sample_import_1_json():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(weather_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that WeatherForecast behaves as a singleton."""
|
"""Test that WeatherForecast behaves as a singleton."""
|
||||||
another_instance = WeatherImport()
|
another_instance = WeatherImport()
|
||||||
assert weather_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(weather_provider, config_eos, monkeypatch):
|
def test_invalid_provider(provider, config_eos, monkeypatch):
|
||||||
"""Test requesting an unsupported weather_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
settings = {
|
settings = {
|
||||||
"weather": {
|
"weather": {
|
||||||
"weather_provider": "<invalid>",
|
"provider": "<invalid>",
|
||||||
"provider_settings": {
|
"provider_settings": {
|
||||||
"weatherimport_file_path": str(FILE_TESTDATA_WEATHERIMPORT_1_JSON),
|
"import_file_path": str(FILE_TESTDATA_WEATHERIMPORT_1_JSON),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
assert weather_provider.enabled() == False
|
assert provider.enabled() == False
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@ -81,33 +81,33 @@ def test_invalid_provider(weather_provider, config_eos, monkeypatch):
|
|||||||
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_import(weather_provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
def test_import(provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
||||||
"""Test fetching forecast from Import."""
|
"""Test fetching forecast from Import."""
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
if from_file:
|
if from_file:
|
||||||
config_eos.weather.provider_settings.weatherimport_json = None
|
config_eos.weather.provider_settings.import_json = None
|
||||||
assert config_eos.weather.provider_settings.weatherimport_json is None
|
assert config_eos.weather.provider_settings.import_json is None
|
||||||
else:
|
else:
|
||||||
config_eos.weather.provider_settings.weatherimport_file_path = None
|
config_eos.weather.provider_settings.import_file_path = None
|
||||||
assert config_eos.weather.provider_settings.weatherimport_file_path is None
|
assert config_eos.weather.provider_settings.import_file_path is None
|
||||||
weather_provider.clear()
|
provider.clear()
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
weather_provider.update_data()
|
provider.update_data()
|
||||||
|
|
||||||
# Assert: Verify the result is as expected
|
# Assert: Verify the result is as expected
|
||||||
assert weather_provider.start_datetime is not None
|
assert provider.start_datetime is not None
|
||||||
assert weather_provider.total_hours is not None
|
assert provider.total_hours is not None
|
||||||
assert compare_datetimes(weather_provider.start_datetime, ems_eos.start_datetime).equal
|
assert compare_datetimes(provider.start_datetime, ems_eos.start_datetime).equal
|
||||||
values = sample_import_1_json["weather_temp_air"]
|
values = sample_import_1_json["weather_temp_air"]
|
||||||
value_datetime_mapping = weather_provider.import_datetimes(ems_eos.start_datetime, len(values))
|
value_datetime_mapping = provider.import_datetimes(ems_eos.start_datetime, len(values))
|
||||||
for i, mapping in enumerate(value_datetime_mapping):
|
for i, mapping in enumerate(value_datetime_mapping):
|
||||||
assert i < len(weather_provider.records)
|
assert i < len(provider.records)
|
||||||
expected_datetime, expected_value_index = mapping
|
expected_datetime, expected_value_index = mapping
|
||||||
expected_value = values[expected_value_index]
|
expected_value = values[expected_value_index]
|
||||||
result_datetime = weather_provider.records[i].date_time
|
result_datetime = provider.records[i].date_time
|
||||||
result_value = weather_provider.records[i]["weather_temp_air"]
|
result_value = provider.records[i]["weather_temp_air"]
|
||||||
|
|
||||||
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
||||||
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
||||||
|
2
tests/testdata/optimize_input_1.json
vendored
2
tests/testdata/optimize_input_1.json
vendored
@ -35,7 +35,7 @@
|
|||||||
"inverter": {
|
"inverter": {
|
||||||
"device_id": "inverter1",
|
"device_id": "inverter1",
|
||||||
"max_power_wh": 10000,
|
"max_power_wh": 10000,
|
||||||
"battery": "battery1"
|
"battery_id": "battery1"
|
||||||
},
|
},
|
||||||
"eauto": {
|
"eauto": {
|
||||||
"device_id": "ev1",
|
"device_id": "ev1",
|
||||||
|
2
tests/testdata/optimize_input_2.json
vendored
2
tests/testdata/optimize_input_2.json
vendored
@ -173,7 +173,7 @@
|
|||||||
"inverter": {
|
"inverter": {
|
||||||
"device_id": "inverter1",
|
"device_id": "inverter1",
|
||||||
"max_power_wh": 10000,
|
"max_power_wh": 10000,
|
||||||
"battery": "battery1"
|
"battery_id": "battery1"
|
||||||
},
|
},
|
||||||
"dishwasher": {
|
"dishwasher": {
|
||||||
"device_id": "dishwasher1",
|
"device_id": "dishwasher1",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user