mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-06-27 16:36:53 +00:00
Fix config and prediction revamp. (#259)
Extend single_test_optimization.py to be able to use real world data from new prediction classes. - .venv/bin/python single_test_optimization.py --real_world --verbose Can also be run with profiling "--profile". Add single_test_prediction.py to fetch predictions from remote prediction providers - .venv/bin/python single_test_prediction.py --verbose --provider-id PVForecastAkkudoktor | more - .venv/bin/python single_test_prediction.py --verbose --provider-id LoadAkkudoktor | more - .venv/bin/python single_test_prediction.py --verbose --provider-id ElecPriceAkkudoktor | more - .venv/bin/python single_test_prediction.py --verbose --provider-id BrightSky | more - .venv/bin/python single_test_prediction.py --verbose --provider-id ClearOutside | more Can also be run with profiling "--profile". single_test_optimization.py is an example on how to retrieve prediction data for optimization and use it to set up the optimization parameters. Changes: - load: Only one load provider at a time (vs. 5 before) Bug fixes: - prediction: only use providers that are enabled to retrieve or set data. - prediction: fix pre pendulum format strings - dataabc: Prevent error when resampling data with no datasets. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
parent
810cc17c0b
commit
31bd2de18b
@ -605,8 +605,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Weather Provider",
|
||||
"description": "Weather provider id of provider to be used.",
|
||||
"default": "ClearOutside"
|
||||
"description": "Weather provider id of provider to be used."
|
||||
},
|
||||
"pvforecastimport_file_path": {
|
||||
"anyOf": [
|
||||
@ -1865,7 +1864,7 @@
|
||||
"title": "Pvforecast5 Strings Per Inverter",
|
||||
"description": "Number of the strings of the inverter of this plane."
|
||||
},
|
||||
"load0_import_file_path": {
|
||||
"load_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -1878,10 +1877,10 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Import File Path",
|
||||
"title": "Load Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load0_import_json": {
|
||||
"load_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -1890,119 +1889,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load1_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load1_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load2_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load2_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load3_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load3_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load4_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load4_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Import Json",
|
||||
"title": "Load Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"loadakkudoktor_year_energy": {
|
||||
@ -2017,7 +1904,7 @@
|
||||
"title": "Loadakkudoktor Year Energy",
|
||||
"description": "Yearly energy consumption (kWh)."
|
||||
},
|
||||
"load0_provider": {
|
||||
"load_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -2026,10 +1913,10 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Provider",
|
||||
"title": "Load Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load0_name": {
|
||||
"load_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -2038,103 +1925,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load1_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load1_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load2_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load2_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load3_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load3_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load4_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load4_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Name",
|
||||
"title": "Load Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"elecpriceimport_file_path": {
|
||||
@ -2175,8 +1966,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Elecprice Provider",
|
||||
"description": "Electicity price provider id of provider to be used.",
|
||||
"default": "ElecPriceAkkudoktor"
|
||||
"description": "Electicity price provider id of provider to be used."
|
||||
},
|
||||
"prediction_hours": {
|
||||
"anyOf": [
|
||||
@ -2630,22 +2420,6 @@
|
||||
"description": "Compute a list of the maximum power rating of the inverter per active planes.",
|
||||
"readOnly": true
|
||||
},
|
||||
"load_count": {
|
||||
"type": "integer",
|
||||
"title": "Load Count",
|
||||
"description": "Maximum number of loads.",
|
||||
"readOnly": true
|
||||
},
|
||||
"load_providers": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array",
|
||||
"uniqueItems": true,
|
||||
"title": "Load Providers",
|
||||
"description": "Load providers.",
|
||||
"readOnly": true
|
||||
},
|
||||
"timezone": {
|
||||
"anyOf": [
|
||||
{
|
||||
@ -2703,8 +2477,6 @@
|
||||
"pvforecast_planes_tilt",
|
||||
"pvforecast_planes_userhorizon",
|
||||
"pvforecast_planes_inverter_paco",
|
||||
"load_count",
|
||||
"load_providers",
|
||||
"timezone",
|
||||
"data_output_path",
|
||||
"data_cache_path",
|
||||
@ -3310,8 +3082,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Weather Provider",
|
||||
"description": "Weather provider id of provider to be used.",
|
||||
"default": "ClearOutside"
|
||||
"description": "Weather provider id of provider to be used."
|
||||
},
|
||||
"pvforecastimport_file_path": {
|
||||
"anyOf": [
|
||||
@ -4570,7 +4341,7 @@
|
||||
"title": "Pvforecast5 Strings Per Inverter",
|
||||
"description": "Number of the strings of the inverter of this plane."
|
||||
},
|
||||
"load0_import_file_path": {
|
||||
"load_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -4583,10 +4354,10 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Import File Path",
|
||||
"title": "Load Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load0_import_json": {
|
||||
"load_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -4595,119 +4366,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load1_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load1_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load2_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load2_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load3_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load3_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"load4_import_file_path": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "path"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Import File Path",
|
||||
"description": "Path to the file to import load data from."
|
||||
},
|
||||
"load4_import_json": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Import Json",
|
||||
"title": "Load Import Json",
|
||||
"description": "JSON string, dictionary of load forecast value lists."
|
||||
},
|
||||
"loadakkudoktor_year_energy": {
|
||||
@ -4722,7 +4381,7 @@
|
||||
"title": "Loadakkudoktor Year Energy",
|
||||
"description": "Yearly energy consumption (kWh)."
|
||||
},
|
||||
"load0_provider": {
|
||||
"load_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -4731,10 +4390,10 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Provider",
|
||||
"title": "Load Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load0_name": {
|
||||
"load_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
@ -4743,103 +4402,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load0 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load1_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load1_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load1 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load2_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load2_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load2 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load3_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load3_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load3 Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"load4_provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Provider",
|
||||
"description": "Load provider id of provider to be used."
|
||||
},
|
||||
"load4_name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Load4 Name",
|
||||
"title": "Load Name",
|
||||
"description": "Name of the load source."
|
||||
},
|
||||
"elecpriceimport_file_path": {
|
||||
@ -4880,8 +4443,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Elecprice Provider",
|
||||
"description": "Electicity price provider id of provider to be used.",
|
||||
"default": "ElecPriceAkkudoktor"
|
||||
"description": "Electicity price provider id of provider to be used."
|
||||
},
|
||||
"prediction_hours": {
|
||||
"anyOf": [
|
||||
|
@ -9,10 +9,137 @@ import time
|
||||
import numpy as np
|
||||
|
||||
from akkudoktoreos.config.config import get_config
|
||||
from akkudoktoreos.core.ems import get_ems
|
||||
from akkudoktoreos.optimization.genetic import (
|
||||
OptimizationParameters,
|
||||
optimization_problem,
|
||||
)
|
||||
from akkudoktoreos.prediction.prediction import get_prediction
|
||||
|
||||
|
||||
def prepare_optimization_real_parameters() -> OptimizationParameters:
|
||||
"""Prepare and return optimization parameters with real world data.
|
||||
|
||||
Returns:
|
||||
OptimizationParameters: Configured optimization parameters
|
||||
"""
|
||||
# Make a config
|
||||
settings = {
|
||||
# -- General --
|
||||
"prediction_hours": 48,
|
||||
"prediction_historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
# -- Predictions --
|
||||
# PV Forecast
|
||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||
"pvforecast0_peakpower": 5.0,
|
||||
"pvforecast0_surface_azimuth": -10,
|
||||
"pvforecast0_surface_tilt": 7,
|
||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||
"pvforecast0_inverter_paco": 10000,
|
||||
"pvforecast1_peakpower": 4.8,
|
||||
"pvforecast1_surface_azimuth": -90,
|
||||
"pvforecast1_surface_tilt": 7,
|
||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||
"pvforecast1_inverter_paco": 10000,
|
||||
"pvforecast2_peakpower": 1.4,
|
||||
"pvforecast2_surface_azimuth": -40,
|
||||
"pvforecast2_surface_tilt": 60,
|
||||
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||
"pvforecast2_inverter_paco": 2000,
|
||||
"pvforecast3_peakpower": 1.6,
|
||||
"pvforecast3_surface_azimuth": 5,
|
||||
"pvforecast3_surface_tilt": 45,
|
||||
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||
"pvforecast3_inverter_paco": 1400,
|
||||
"pvforecast4_peakpower": None,
|
||||
# Weather Forecast
|
||||
"weather_provider": "ClearOutside",
|
||||
# Electricity Price Forecast
|
||||
"elecprice_provider": "ElecPriceAkkudoktor",
|
||||
# Load Forecast
|
||||
"load_provider": "LoadAkkudoktor",
|
||||
"loadakkudoktor_year_energy": 5000, # Energy consumption per year in kWh
|
||||
# -- Simulations --
|
||||
}
|
||||
config_eos = get_config()
|
||||
prediction_eos = get_prediction()
|
||||
ems_eos = get_ems()
|
||||
|
||||
# Update/ set configuration
|
||||
config_eos.merge_settings_from_dict(settings)
|
||||
|
||||
# Get current prediction data for optimization run
|
||||
ems_eos.set_start_datetime()
|
||||
print(
|
||||
f"Real data prediction from {prediction_eos.start_datetime} to {prediction_eos.end_datetime}"
|
||||
)
|
||||
prediction_eos.update_data()
|
||||
|
||||
# PV Forecast (in W)
|
||||
pv_forecast = prediction_eos.key_to_array(
|
||||
key="pvforecast_ac_power",
|
||||
start_datetime=prediction_eos.start_datetime,
|
||||
end_datetime=prediction_eos.end_datetime,
|
||||
)
|
||||
print(f"pv_forecast: {pv_forecast}")
|
||||
|
||||
# Temperature Forecast (in degree C)
|
||||
temperature_forecast = prediction_eos.key_to_array(
|
||||
key="weather_temp_air",
|
||||
start_datetime=prediction_eos.start_datetime,
|
||||
end_datetime=prediction_eos.end_datetime,
|
||||
)
|
||||
print(f"temperature_forecast: {temperature_forecast}")
|
||||
|
||||
# Electricity Price (in Euro per Wh)
|
||||
electricity_market_price_euros_per_kwh = prediction_eos.key_to_array(
|
||||
key="elecprice_marketprice",
|
||||
start_datetime=prediction_eos.start_datetime,
|
||||
end_datetime=prediction_eos.end_datetime,
|
||||
)
|
||||
strompreis_euro_pro_wh = electricity_market_price_euros_per_kwh * 0.001
|
||||
print(f"strompreis_euro_pro_wh: {strompreis_euro_pro_wh}")
|
||||
|
||||
# Overall System Load (in W)
|
||||
gesamtlast = prediction_eos.key_to_array(
|
||||
key="load_mean",
|
||||
start_datetime=prediction_eos.start_datetime,
|
||||
end_datetime=prediction_eos.end_datetime,
|
||||
)
|
||||
print(f"gesamtlast: {gesamtlast}")
|
||||
|
||||
# Start Solution (binary)
|
||||
start_solution = None
|
||||
print(f"start_solution: {start_solution}")
|
||||
|
||||
# Define parameters for the optimization problem
|
||||
return OptimizationParameters(
|
||||
**{
|
||||
"ems": {
|
||||
"preis_euro_pro_wh_akku": 0e-05,
|
||||
"einspeiseverguetung_euro_pro_wh": 7e-05,
|
||||
"gesamtlast": gesamtlast,
|
||||
"pv_prognose_wh": pv_forecast,
|
||||
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
||||
},
|
||||
"pv_akku": {
|
||||
"kapazitaet_wh": 26400,
|
||||
"start_soc_prozent": 15,
|
||||
"min_soc_prozent": 15,
|
||||
},
|
||||
"eauto": {
|
||||
"min_soc_prozent": 50,
|
||||
"kapazitaet_wh": 60000,
|
||||
"lade_effizienz": 0.95,
|
||||
"max_ladeleistung_w": 11040,
|
||||
"start_soc_prozent": 5,
|
||||
},
|
||||
"temperature_forecast": temperature_forecast,
|
||||
"start_solution": start_solution,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def prepare_optimization_parameters() -> OptimizationParameters:
|
||||
@ -166,7 +293,7 @@ def prepare_optimization_parameters() -> OptimizationParameters:
|
||||
)
|
||||
|
||||
|
||||
def run_optimization(start_hour: int = 0, verbose: bool = False) -> dict:
|
||||
def run_optimization(real_world: bool = False, start_hour: int = 0, verbose: bool = False) -> dict:
|
||||
"""Run the optimization problem.
|
||||
|
||||
Args:
|
||||
@ -176,14 +303,21 @@ def run_optimization(start_hour: int = 0, verbose: bool = False) -> dict:
|
||||
Returns:
|
||||
dict: Optimization result as a dictionary
|
||||
"""
|
||||
# Prepare parameters
|
||||
if real_world:
|
||||
parameters = prepare_optimization_real_parameters()
|
||||
else:
|
||||
parameters = prepare_optimization_parameters()
|
||||
|
||||
if verbose:
|
||||
print("\nOptimization Parameters:")
|
||||
print(parameters.model_dump_json(indent=4))
|
||||
|
||||
# Initialize the optimization problem using the default configuration
|
||||
config_eos = get_config()
|
||||
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 24})
|
||||
opt_class = optimization_problem(verbose=verbose, fixed_seed=42)
|
||||
|
||||
# Prepare parameters
|
||||
parameters = prepare_optimization_parameters()
|
||||
|
||||
# Perform the optimisation based on the provided parameters and start hour
|
||||
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
|
||||
|
||||
@ -197,6 +331,9 @@ def main():
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Enable verbose output during optimization"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--real-world", action="store_true", help="Use real world data for predictions"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--start-hour", type=int, default=0, help="Starting hour for optimization (default: 0)"
|
||||
)
|
||||
@ -208,7 +345,10 @@ def main():
|
||||
profiler = cProfile.Profile()
|
||||
try:
|
||||
result = profiler.runcall(
|
||||
run_optimization, start_hour=args.start_hour, verbose=args.verbose
|
||||
run_optimization,
|
||||
real_world=args.real_world,
|
||||
start_hour=args.start_hour,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
# Print profiling statistics
|
||||
stats = pstats.Stats(profiler)
|
||||
@ -224,7 +364,9 @@ def main():
|
||||
# Run without profiling
|
||||
try:
|
||||
start_time = time.time()
|
||||
result = run_optimization(start_hour=args.start_hour, verbose=args.verbose)
|
||||
result = run_optimization(
|
||||
real_world=args.real_world, start_hour=args.start_hour, verbose=args.verbose
|
||||
)
|
||||
end_time = time.time()
|
||||
elapsed_time = end_time - start_time
|
||||
print(f"\nElapsed time: {elapsed_time:.4f} seconds.")
|
||||
|
170
single_test_prediction.py
Normal file
170
single_test_prediction.py
Normal file
@ -0,0 +1,170 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import cProfile
|
||||
import pstats
|
||||
import sys
|
||||
import time
|
||||
|
||||
from akkudoktoreos.config.config import get_config
|
||||
from akkudoktoreos.prediction.prediction import get_prediction
|
||||
|
||||
config_eos = get_config()
|
||||
prediction_eos = get_prediction()
|
||||
|
||||
|
||||
def config_pvforecast() -> dict:
|
||||
"""Configure settings for PV forecast."""
|
||||
settings = {
|
||||
"prediction_hours": 48,
|
||||
"prediction_historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||
"pvforecast0_peakpower": 5.0,
|
||||
"pvforecast0_surface_azimuth": -10,
|
||||
"pvforecast0_surface_tilt": 7,
|
||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||
"pvforecast0_inverter_paco": 10000,
|
||||
"pvforecast1_peakpower": 4.8,
|
||||
"pvforecast1_surface_azimuth": -90,
|
||||
"pvforecast1_surface_tilt": 7,
|
||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||
"pvforecast1_inverter_paco": 10000,
|
||||
"pvforecast2_peakpower": 1.4,
|
||||
"pvforecast2_surface_azimuth": -40,
|
||||
"pvforecast2_surface_tilt": 60,
|
||||
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||
"pvforecast2_inverter_paco": 2000,
|
||||
"pvforecast3_peakpower": 1.6,
|
||||
"pvforecast3_surface_azimuth": 5,
|
||||
"pvforecast3_surface_tilt": 45,
|
||||
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||
"pvforecast3_inverter_paco": 1400,
|
||||
"pvforecast4_peakpower": None,
|
||||
}
|
||||
return settings
|
||||
|
||||
|
||||
def config_weather() -> dict:
|
||||
"""Configure settings for weather forecast."""
|
||||
settings = {
|
||||
"prediction_hours": 48,
|
||||
"prediction_historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
}
|
||||
return settings
|
||||
|
||||
|
||||
def config_elecprice() -> dict:
|
||||
"""Configure settings for electricity price forecast."""
|
||||
settings = {
|
||||
"prediction_hours": 48,
|
||||
"prediction_historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
}
|
||||
return settings
|
||||
|
||||
|
||||
def config_load() -> dict:
|
||||
"""Configure settings for load forecast."""
|
||||
settings = {
|
||||
"prediction_hours": 48,
|
||||
"prediction_historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
}
|
||||
return settings
|
||||
|
||||
|
||||
def run_prediction(provider_id: str, verbose: bool = False) -> str:
|
||||
"""Run the prediction.
|
||||
|
||||
Args:
|
||||
provider_id (str): ID of prediction provider.
|
||||
verbose (bool, optional): Whether to print verbose output. Defaults to False.
|
||||
|
||||
Returns:
|
||||
dict: Prediction result as a dictionary
|
||||
"""
|
||||
# Initialize the oprediction
|
||||
config_eos = get_config()
|
||||
prediction_eos = get_prediction()
|
||||
if verbose:
|
||||
print(f"\nProvider ID: {provider_id}")
|
||||
if provider_id in ("PVForecastAkkudoktor",):
|
||||
settings = config_pvforecast()
|
||||
settings["pvforecast_provider"] = provider_id
|
||||
elif provider_id in ("BrightSky", "ClearOutside"):
|
||||
settings = config_weather()
|
||||
settings["weather_provider"] = provider_id
|
||||
elif provider_id in ("ElecPriceAkkudoktor",):
|
||||
settings = config_elecprice()
|
||||
settings["elecprice_provider"] = provider_id
|
||||
elif provider_id in ("LoadAkkudoktor",):
|
||||
settings = config_elecprice()
|
||||
settings["loadakkudoktor_year_energy"] = 1000
|
||||
settings["load_provider"] = provider_id
|
||||
else:
|
||||
raise ValueError(f"Unknown provider '{provider_id}'.")
|
||||
config_eos.merge_settings_from_dict(settings)
|
||||
|
||||
prediction_eos.update_data()
|
||||
|
||||
# Return result of prediction
|
||||
provider = prediction_eos.provider_by_id(provider_id)
|
||||
if verbose:
|
||||
for key in provider.record_keys:
|
||||
print(f"\n{key}\n----------")
|
||||
print(f"Array: {provider.key_to_array(key)}")
|
||||
return provider.model_dump_json(indent=4)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to run the optimization script with optional profiling."""
|
||||
parser = argparse.ArgumentParser(description="Run Energy Optimization Simulation")
|
||||
parser.add_argument("--profile", action="store_true", help="Enable performance profiling")
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Enable verbose output during optimization"
|
||||
)
|
||||
parser.add_argument("--provider-id", type=str, default=0, help="Provider ID of prediction")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.profile:
|
||||
# Run with profiling
|
||||
profiler = cProfile.Profile()
|
||||
try:
|
||||
result = profiler.runcall(
|
||||
run_prediction, provider_id=args.provider_id, verbose=args.verbose
|
||||
)
|
||||
# Print profiling statistics
|
||||
stats = pstats.Stats(profiler)
|
||||
stats.strip_dirs().sort_stats("cumulative").print_stats(200)
|
||||
# Print result
|
||||
print("\nPrediction Result:")
|
||||
print(result)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during prediction: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Run without profiling
|
||||
try:
|
||||
start_time = time.time()
|
||||
result = run_prediction(provider_id=args.provider_id, verbose=args.verbose)
|
||||
end_time = time.time()
|
||||
elapsed_time = end_time - start_time
|
||||
print(f"\nElapsed time: {elapsed_time:.4f} seconds.")
|
||||
print("\nPrediction Result:")
|
||||
print(result)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during prediction: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -382,7 +382,7 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
|
||||
json_str = super().to_json()
|
||||
# Write to file
|
||||
f_out.write(json_str)
|
||||
# Also remeber as actual settings
|
||||
# Also remember as actual settings
|
||||
ConfigEOS._file_settings = SettingsEOS.model_validate_json(json_str)
|
||||
except ValidationError as exc:
|
||||
raise ValueError(f"Could not update '{self.config_file_path}': {exc}")
|
||||
|
@ -758,9 +758,9 @@ class DataSequence(DataBase, MutableSequence):
|
||||
raise ValueError(f"Unsupported fill method for non-numeric data: {fill_method}")
|
||||
|
||||
# Convert the resampled series to a NumPy array
|
||||
if start_datetime is not None:
|
||||
if start_datetime is not None and len(resampled) > 0:
|
||||
resampled = resampled.truncate(before=start_datetime)
|
||||
if end_datetime is not None:
|
||||
if end_datetime is not None and len(resampled) > 0:
|
||||
resampled = resampled.truncate(after=end_datetime.subtract(seconds=1))
|
||||
array = resampled.values
|
||||
return array
|
||||
@ -1120,6 +1120,15 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
)
|
||||
return value
|
||||
|
||||
@property
|
||||
def enabled_providers(self) -> List[Any]:
|
||||
"""List of providers that are currently enabled."""
|
||||
enab = []
|
||||
for provider in self.providers:
|
||||
if provider.enabled():
|
||||
enab.append(provider)
|
||||
return enab
|
||||
|
||||
def __getitem__(self, key: str) -> pd.Series:
|
||||
"""Retrieve a Pandas Series for a specified key from the data in each DataProvider.
|
||||
|
||||
@ -1135,7 +1144,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
KeyError: If no provider contains data for the specified key.
|
||||
"""
|
||||
series = None
|
||||
for provider in self.providers:
|
||||
for provider in self.enabled_providers:
|
||||
try:
|
||||
series = provider.key_to_series(key)
|
||||
break
|
||||
@ -1164,7 +1173,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
if not isinstance(value, pd.Series):
|
||||
raise ValueError("Value must be an instance of pd.Series.")
|
||||
|
||||
for provider in self.providers:
|
||||
for provider in self.enabled_providers:
|
||||
try:
|
||||
provider.key_from_series(key, value)
|
||||
break
|
||||
@ -1182,7 +1191,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
Raises:
|
||||
KeyError: If the key is not found in any provider.
|
||||
"""
|
||||
for provider in self.providers:
|
||||
for provider in self.enabled_providers:
|
||||
try:
|
||||
provider.key_delete_by_datetime(key)
|
||||
break
|
||||
@ -1197,7 +1206,9 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
Returns:
|
||||
Iterator[str]: An iterator over the unique keys from all providers.
|
||||
"""
|
||||
return iter(set(chain.from_iterable(provider.record_keys for provider in self.providers)))
|
||||
return iter(
|
||||
set(chain.from_iterable(provider.record_keys for provider in self.enabled_providers))
|
||||
)
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the number of keys in the container.
|
||||
@ -1205,7 +1216,9 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
Returns:
|
||||
int: The total number of keys in this container.
|
||||
"""
|
||||
return len(list(chain.from_iterable(provider.record_keys for provider in self.providers)))
|
||||
return len(
|
||||
list(chain.from_iterable(provider.record_keys for provider in self.enabled_providers))
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Provide a string representation of the DataContainer instance.
|
||||
@ -1226,7 +1239,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
force_enable (bool, optional): If True, forces the update even if a provider is disabled.
|
||||
force_update (bool, optional): If True, forces the providers to update the data even if still cached.
|
||||
"""
|
||||
for provider in self.providers:
|
||||
for provider in self.enabled_providers:
|
||||
provider.update_data(force_enable=force_enable, force_update=force_update)
|
||||
|
||||
def key_to_array(
|
||||
@ -1262,7 +1275,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
Cache the result in memory until the next `update_data` call.
|
||||
"""
|
||||
array = None
|
||||
for provider in self.providers:
|
||||
for provider in self.enabled_providers:
|
||||
try:
|
||||
array = provider.key_to_array(
|
||||
key,
|
||||
@ -1283,7 +1296,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
||||
def provider_by_id(self, provider_id: str) -> DataProvider:
|
||||
"""Retrieves a data provider by its unique identifier.
|
||||
|
||||
This method searches through the list of available providers and
|
||||
This method searches through the list of all available providers and
|
||||
returns the first provider whose `provider_id` matches the given
|
||||
`provider_id`. If no matching provider is found, the method returns `None`.
|
||||
|
||||
|
@ -9,21 +9,9 @@
|
||||
"elecprice_provider": null,
|
||||
"elecpriceimport_file_path": null,
|
||||
"latitude": null,
|
||||
"load0_import_file_path": null,
|
||||
"load0_name": null,
|
||||
"load0_provider": null,
|
||||
"load1_import_file_path": null,
|
||||
"load1_name": null,
|
||||
"load1_provider": null,
|
||||
"load2_import_file_path": null,
|
||||
"load2_name": null,
|
||||
"load2_provider": null,
|
||||
"load3_import_file_path": null,
|
||||
"load3_name": null,
|
||||
"load3_provider": null,
|
||||
"load4_import_file_path": null,
|
||||
"load4_name": null,
|
||||
"load4_provider": null,
|
||||
"load_import_file_path": null,
|
||||
"load_name": null,
|
||||
"load_provider": null,
|
||||
"loadakkudoktor_year_energy": null,
|
||||
"longitude": null,
|
||||
"optimization_ev_available_charge_rates_percent": [],
|
||||
|
@ -7,5 +7,5 @@ from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
|
||||
class ElecPriceCommonSettings(SettingsBaseModel):
|
||||
elecprice_provider: Optional[str] = Field(
|
||||
"ElecPriceAkkudoktor", description="Electicity price provider id of provider to be used."
|
||||
default=None, description="Electicity price provider id of provider to be used."
|
||||
)
|
||||
|
@ -66,7 +66,7 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||
@classmethod
|
||||
def provider_id(cls) -> str:
|
||||
"""Return the unique identifier for the Akkudoktor provider."""
|
||||
return "Akkudoktor"
|
||||
return "ElecPriceAkkudoktor"
|
||||
|
||||
@classmethod
|
||||
def _validate_data(cls, json_str: Union[bytes, Any]) -> AkkudoktorElecPrice:
|
||||
@ -98,8 +98,8 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||
ValueError: If the API response does not include expected `electricity price` data.
|
||||
"""
|
||||
source = "https://api.akkudoktor.net"
|
||||
date = to_datetime(self.start_datetime, as_string="%Y-%m-%d")
|
||||
last_date = to_datetime(self.end_datetime, as_string="%Y-%m-%d")
|
||||
date = to_datetime(self.start_datetime, as_string="Y-M-D")
|
||||
last_date = to_datetime(self.end_datetime, as_string="Y-M-D")
|
||||
response = requests.get(
|
||||
f"{source}/prices?date={date}&last_date={last_date}&tz={self.config.timezone}"
|
||||
)
|
||||
@ -146,6 +146,10 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||
elecprice_marketprice=akkudoktor_data.values[i].marketpriceEurocentPerKWh,
|
||||
)
|
||||
self.append(record)
|
||||
if len(self) == 0:
|
||||
# Got no valid forecast data
|
||||
return
|
||||
|
||||
# Assure price starts at start_time
|
||||
if compare_datetimes(self[0].date_time, self.start_datetime).gt:
|
||||
record = ElecPriceDataRecord(
|
||||
|
@ -20,7 +20,7 @@ logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for elecprice data import from file."""
|
||||
"""Common settings for elecprice data import from file or JSON String."""
|
||||
|
||||
elecpriceimport_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import elecprice data from."
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""Load forecast module for load predictions."""
|
||||
|
||||
from typing import Optional, Set
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, computed_field
|
||||
from pydantic import Field
|
||||
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
from akkudoktoreos.utils.logutil import get_logger
|
||||
@ -12,50 +12,7 @@ logger = get_logger(__name__)
|
||||
|
||||
class LoadCommonSettings(SettingsBaseModel):
|
||||
# Load 0
|
||||
load0_provider: Optional[str] = Field(
|
||||
load_provider: Optional[str] = Field(
|
||||
default=None, description="Load provider id of provider to be used."
|
||||
)
|
||||
load0_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||
|
||||
# Load 1
|
||||
load1_provider: Optional[str] = Field(
|
||||
default=None, description="Load provider id of provider to be used."
|
||||
)
|
||||
load1_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||
|
||||
# Load 2
|
||||
load2_provider: Optional[str] = Field(
|
||||
default=None, description="Load provider id of provider to be used."
|
||||
)
|
||||
load2_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||
|
||||
# Load 3
|
||||
load3_provider: Optional[str] = Field(
|
||||
default=None, description="Load provider id of provider to be used."
|
||||
)
|
||||
load3_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||
|
||||
# Load 4
|
||||
load4_provider: Optional[str] = Field(
|
||||
default=None, description="Load provider id of provider to be used."
|
||||
)
|
||||
load4_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||
|
||||
# Computed fields
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def load_count(self) -> int:
|
||||
"""Maximum number of loads."""
|
||||
return 5
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def load_providers(self) -> Set[str]:
|
||||
"""Load providers."""
|
||||
providers = []
|
||||
for i in range(self.load_count):
|
||||
load_provider_attr = f"load{i}_provider"
|
||||
value = getattr(self, load_provider_attr)
|
||||
if value:
|
||||
providers.append(value)
|
||||
return set(providers)
|
||||
load_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||
|
@ -7,7 +7,7 @@ Notes:
|
||||
from abc import abstractmethod
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import Field, computed_field
|
||||
from pydantic import Field
|
||||
|
||||
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
||||
from akkudoktoreos.utils.logutil import get_logger
|
||||
@ -18,41 +18,8 @@ logger = get_logger(__name__)
|
||||
class LoadDataRecord(PredictionRecord):
|
||||
"""Represents a load data record containing various load attributes at a specific datetime."""
|
||||
|
||||
load0_mean: Optional[float] = Field(default=None, description="Load 0 mean value (W)")
|
||||
load0_std: Optional[float] = Field(default=None, description="Load 0 standard deviation (W)")
|
||||
load1_mean: Optional[float] = Field(default=None, description="Load 1 mean value (W)")
|
||||
load1_std: Optional[float] = Field(default=None, description="Load 1 standard deviation (W)")
|
||||
load2_mean: Optional[float] = Field(default=None, description="Load 2 mean value (W)")
|
||||
load2_std: Optional[float] = Field(default=None, description="Load 2 standard deviation (W)")
|
||||
load3_mean: Optional[float] = Field(default=None, description="Load 3 mean value (W)")
|
||||
load3_std: Optional[float] = Field(default=None, description="Load 3 standard deviation (W)")
|
||||
load4_mean: Optional[float] = Field(default=None, description="Load 4 mean value (W)")
|
||||
load4_std: Optional[float] = Field(default=None, description="Load 4 standard deviation (W)")
|
||||
|
||||
# Computed fields
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def load_total_mean(self) -> float:
|
||||
"""Total load mean value (W)."""
|
||||
total_mean = 0.0
|
||||
for i in range(5):
|
||||
load_mean_attr = f"load{i}_mean"
|
||||
value = getattr(self, load_mean_attr)
|
||||
if value:
|
||||
total_mean += value
|
||||
return total_mean
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def load_total_std(self) -> float:
|
||||
"""Total load standard deviation (W)."""
|
||||
total_std = 0.0
|
||||
for i in range(5):
|
||||
load_std_attr = f"load{i}_std"
|
||||
value = getattr(self, load_std_attr)
|
||||
if value:
|
||||
total_std += value
|
||||
return total_std
|
||||
load_mean: Optional[float] = Field(default=None, description="Load mean value (W)")
|
||||
load_std: Optional[float] = Field(default=None, description="Load standard deviation (W)")
|
||||
|
||||
|
||||
class LoadProvider(PredictionProvider):
|
||||
@ -86,17 +53,4 @@ class LoadProvider(PredictionProvider):
|
||||
return "LoadProvider"
|
||||
|
||||
def enabled(self) -> bool:
|
||||
logger.debug(
|
||||
f"LoadProvider ID {self.provider_id()} vs. config {self.config.load_providers}"
|
||||
)
|
||||
return self.provider_id() == self.config.load_providers
|
||||
|
||||
def loads(self) -> List[str]:
|
||||
"""Returns a list of key prefixes of the loads managed by this provider."""
|
||||
loads_prefix = []
|
||||
for i in range(self.config.load_count):
|
||||
load_provider_attr = f"load{i}_provider"
|
||||
value = getattr(self.config, load_provider_attr)
|
||||
if value == self.provider_id():
|
||||
loads_prefix.append(f"load{i}")
|
||||
return loads_prefix
|
||||
return self.provider_id() == self.config.load_provider
|
||||
|
@ -39,8 +39,8 @@ class LoadAkkudoktor(LoadProvider):
|
||||
profile_data = np.array(
|
||||
list(zip(file_data["yearly_profiles"], file_data["yearly_profiles_std"]))
|
||||
)
|
||||
data_year_energy = profile_data * self.config.loadakkudoktor_year_energy
|
||||
# pprint(self.data_year_energy)
|
||||
# Calculate values in W by relative profile data and yearly consumption given in kWh
|
||||
data_year_energy = profile_data * self.config.loadakkudoktor_year_energy * 1000
|
||||
except FileNotFoundError:
|
||||
error_msg = f"Error: File {load_file} not found."
|
||||
logger.error(error_msg)
|
||||
@ -54,16 +54,13 @@ class LoadAkkudoktor(LoadProvider):
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
"""Adds the load means and standard deviations."""
|
||||
data_year_energy = self.load_data()
|
||||
for load in self.loads():
|
||||
attr_load_mean = f"{load}_mean"
|
||||
attr_load_std = f"{load}_std"
|
||||
date = self.start_datetime
|
||||
for i in range(self.config.prediction_hours):
|
||||
# Extract mean and standard deviation for the given day and hour
|
||||
# Day indexing starts at 0, -1 because of that
|
||||
hourly_stats = data_year_energy[date.day_of_year - 1, :, date.hour]
|
||||
self.update_value(date, attr_load_mean, hourly_stats[0])
|
||||
self.update_value(date, attr_load_std, hourly_stats[1])
|
||||
date += to_duration("1 hour")
|
||||
date = self.start_datetime
|
||||
for i in range(self.config.prediction_hours):
|
||||
# Extract mean and standard deviation for the given day and hour
|
||||
# Day indexing starts at 0, -1 because of that
|
||||
hourly_stats = data_year_energy[date.day_of_year - 1, :, date.hour]
|
||||
self.update_value(date, "load_mean", hourly_stats[0])
|
||||
self.update_value(date, "load_std", hourly_stats[1])
|
||||
date += to_duration("1 hour")
|
||||
# We are working on fresh data (no cache), report update time
|
||||
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
||||
|
@ -20,48 +20,17 @@ logger = get_logger(__name__)
|
||||
|
||||
|
||||
class LoadImportCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for load data import from file."""
|
||||
"""Common settings for load data import from file or JSON string."""
|
||||
|
||||
load0_import_file_path: Optional[Union[str, Path]] = Field(
|
||||
load_import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import load data from."
|
||||
)
|
||||
load0_import_json: Optional[str] = Field(
|
||||
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||
)
|
||||
load1_import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import load data from."
|
||||
)
|
||||
load1_import_json: Optional[str] = Field(
|
||||
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||
)
|
||||
load2_import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import load data from."
|
||||
)
|
||||
load2_import_json: Optional[str] = Field(
|
||||
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||
)
|
||||
load3_import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import load data from."
|
||||
)
|
||||
load3_import_json: Optional[str] = Field(
|
||||
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||
)
|
||||
load4_import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import load data from."
|
||||
)
|
||||
load4_import_json: Optional[str] = Field(
|
||||
load_import_json: Optional[str] = Field(
|
||||
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||
)
|
||||
|
||||
# Validators
|
||||
@field_validator(
|
||||
"load0_import_file_path",
|
||||
"load1_import_file_path",
|
||||
"load2_import_file_path",
|
||||
"load3_import_file_path",
|
||||
"load4_import_file_path",
|
||||
mode="after",
|
||||
)
|
||||
@field_validator("load_import_file_path", mode="after")
|
||||
@classmethod
|
||||
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||
if value is None:
|
||||
@ -89,12 +58,7 @@ class LoadImport(LoadProvider, PredictionImportProvider):
|
||||
return "LoadImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
for load in self.loads():
|
||||
attr_file_path = f"{load}_import_file_path"
|
||||
attr_json = f"{load}_import_json"
|
||||
import_file_path = getattr(self.config, attr_file_path)
|
||||
if import_file_path is not None:
|
||||
self.import_from_file(import_file_path, key_prefix=load)
|
||||
import_json = getattr(self.config, attr_json)
|
||||
if import_json is not None:
|
||||
self.import_from_json(import_json, key_prefix=load)
|
||||
if self.config.load_import_file_path is not None:
|
||||
self.import_from_file(self.config.load_import_file_path, key_prefix="load")
|
||||
if self.config.load_import_json is not None:
|
||||
self.import_from_json(self.config.load_import_json, key_prefix="load")
|
||||
|
@ -20,7 +20,7 @@ logger = get_logger(__name__)
|
||||
|
||||
|
||||
class PVForecastImportCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for pvforecast data import from file."""
|
||||
"""Common settings for pvforecast data import from file or JSON string."""
|
||||
|
||||
pvforecastimport_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import pvforecast data from."
|
||||
|
@ -9,5 +9,5 @@ from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
|
||||
class WeatherCommonSettings(SettingsBaseModel):
|
||||
weather_provider: Optional[str] = Field(
|
||||
default="ClearOutside", description="Weather provider id of provider to be used."
|
||||
default=None, description="Weather provider id of provider to be used."
|
||||
)
|
||||
|
@ -96,8 +96,8 @@ class WeatherBrightSky(WeatherProvider):
|
||||
ValueError: If the API response does not include expected `weather` data.
|
||||
"""
|
||||
source = "https://api.brightsky.dev"
|
||||
date = to_datetime(self.start_datetime, as_string="%Y-%m-%d")
|
||||
last_date = to_datetime(self.end_datetime, as_string="%Y-%m-%d")
|
||||
date = to_datetime(self.start_datetime, as_string="Y-M-D")
|
||||
last_date = to_datetime(self.end_datetime, as_string="Y-M-D")
|
||||
response = requests.get(
|
||||
f"{source}/weather?lat={self.config.latitude}&lon={self.config.longitude}&date={date}&last_date={last_date}&tz={self.config.timezone}"
|
||||
)
|
||||
|
@ -20,7 +20,7 @@ logger = get_logger(__name__)
|
||||
|
||||
|
||||
class WeatherImportCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for weather data import from file."""
|
||||
"""Common settings for weather data import from file or JSON string."""
|
||||
|
||||
weatherimport_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None, description="Path to the file to import weather data from."
|
||||
|
@ -25,7 +25,7 @@ ems_eos = get_ems()
|
||||
@pytest.fixture
|
||||
def elecprice_provider(monkeypatch):
|
||||
"""Fixture to create a ElecPriceProvider instance."""
|
||||
monkeypatch.setenv("elecprice_provider", "Akkudoktor")
|
||||
monkeypatch.setenv("elecprice_provider", "ElecPriceAkkudoktor")
|
||||
return ElecPriceAkkudoktor()
|
||||
|
||||
|
||||
|
@ -19,8 +19,8 @@ ems_eos = get_ems()
|
||||
def load_provider(monkeypatch):
|
||||
"""Fixture to create a LoadAkkudoktor instance."""
|
||||
settings = {
|
||||
"load0_provider": "LoadAkkudoktor",
|
||||
"load0_name": "Akkudoktor Profile",
|
||||
"load_provider": "LoadAkkudoktor",
|
||||
"load_name": "Akkudoktor Profile",
|
||||
"loadakkudoktor_year_energy": "1000",
|
||||
}
|
||||
config_eos.merge_settings_from_dict(settings)
|
||||
|
@ -2,7 +2,6 @@ import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from akkudoktoreos.config.config import get_config
|
||||
from akkudoktoreos.core.ems import get_ems
|
||||
from akkudoktoreos.prediction.elecpriceakkudoktor import ElecPriceAkkudoktor
|
||||
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImport
|
||||
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktor
|
||||
@ -17,7 +16,6 @@ from akkudoktoreos.prediction.pvforecastimport import PVForecastImport
|
||||
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
||||
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
||||
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -28,28 +26,10 @@ def sample_settings(reset_config):
|
||||
"prediction_historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||
"pvforecast0_peakpower": 5.0,
|
||||
"pvforecast0_surface_azimuth": -10,
|
||||
"pvforecast0_surface_tilt": 7,
|
||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||
"pvforecast0_inverter_paco": 10000,
|
||||
"pvforecast1_peakpower": 4.8,
|
||||
"pvforecast1_surface_azimuth": -90,
|
||||
"pvforecast1_surface_tilt": 7,
|
||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||
"pvforecast1_inverter_paco": 10000,
|
||||
"pvforecast2_peakpower": 1.4,
|
||||
"pvforecast2_surface_azimuth": -40,
|
||||
"pvforecast2_surface_tilt": 60,
|
||||
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||
"pvforecast2_inverter_paco": 2000,
|
||||
"pvforecast3_peakpower": 1.6,
|
||||
"pvforecast3_surface_azimuth": 5,
|
||||
"pvforecast3_surface_tilt": 45,
|
||||
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||
"pvforecast3_inverter_paco": 1400,
|
||||
"pvforecast4_peakpower": None,
|
||||
"weather_provider": None,
|
||||
"pvforecast_provider": None,
|
||||
"load_provider": None,
|
||||
"elecprice_provider": None,
|
||||
}
|
||||
|
||||
# Merge settings to config
|
||||
@ -176,23 +156,6 @@ def test_provider_sequence(prediction):
|
||||
assert isinstance(prediction.providers[8], WeatherImport)
|
||||
|
||||
|
||||
def test_update_calls_providers(sample_settings, prediction):
|
||||
"""Test that the update method calls the update method for each provider in sequence."""
|
||||
# Mark the `update_datetime` method for each provider
|
||||
old_datetime = to_datetime("1970-01-01 00:00:00")
|
||||
for provider in prediction.providers:
|
||||
provider.update_datetime = old_datetime
|
||||
|
||||
ems_eos = get_ems()
|
||||
ems_eos.set_start_datetime(to_datetime())
|
||||
prediction.update_data()
|
||||
|
||||
# Verify each provider's `update` method was called
|
||||
for provider in prediction.providers:
|
||||
if provider.enabled():
|
||||
assert compare_datetimes(provider.update_datetime, old_datetime).gt
|
||||
|
||||
|
||||
def test_provider_by_id(prediction, forecast_providers):
|
||||
"""Test that provider_by_id method returns the correct provider."""
|
||||
for provider in forecast_providers:
|
||||
|
18
tests/testdata/EOS.config.json
vendored
18
tests/testdata/EOS.config.json
vendored
@ -9,21 +9,9 @@
|
||||
"elecprice_provider": null,
|
||||
"elecpriceimport_file_path": null,
|
||||
"latitude": null,
|
||||
"load0_import_file_path": null,
|
||||
"load0_name": null,
|
||||
"load0_provider": null,
|
||||
"load1_import_file_path": null,
|
||||
"load1_name": null,
|
||||
"load1_provider": null,
|
||||
"load2_import_file_path": null,
|
||||
"load2_name": null,
|
||||
"load2_provider": null,
|
||||
"load3_import_file_path": null,
|
||||
"load3_name": null,
|
||||
"load3_provider": null,
|
||||
"load4_import_file_path": null,
|
||||
"load4_name": null,
|
||||
"load4_provider": null,
|
||||
"load_import_file_path": null,
|
||||
"load_name": null,
|
||||
"load_provider": null,
|
||||
"loadakkudoktor_year_energy": null,
|
||||
"longitude": null,
|
||||
"optimization_ev_available_charge_rates_percent": [],
|
||||
|
Loading…
x
Reference in New Issue
Block a user