Fix config and prediction revamp. (#259)

Extend single_test_optimization.py to be able to use real world data from new prediction classes.
- .venv/bin/python single_test_optimization.py --real_world --verbose
Can also be run with profiling "--profile".

Add single_test_prediction.py to fetch predictions from remote prediction providers
- .venv/bin/python single_test_prediction.py --verbose --provider-id PVForecastAkkudoktor | more
- .venv/bin/python single_test_prediction.py --verbose --provider-id LoadAkkudoktor | more
- .venv/bin/python single_test_prediction.py --verbose --provider-id ElecPriceAkkudoktor | more
- .venv/bin/python single_test_prediction.py --verbose --provider-id BrightSky | more
- .venv/bin/python single_test_prediction.py --verbose --provider-id ClearOutside | more
Can also be run with profiling "--profile".

single_test_optimization.py is an example on how to retrieve prediction data for optimization and
use it to set up the optimization parameters.

Changes:
- load: Only one load provider at a time (vs. 5 before)

Bug fixes:
- prediction: only use providers that are enabled to retrieve or set data.
- prediction: fix pre pendulum format strings
- dataabc: Prevent error when resampling data with no datasets.

Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
Bobby Noelte
2024-12-16 20:26:08 +01:00
committed by GitHub
parent 810cc17c0b
commit 31bd2de18b
21 changed files with 415 additions and 713 deletions

View File

@@ -382,7 +382,7 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
json_str = super().to_json()
# Write to file
f_out.write(json_str)
# Also remeber as actual settings
# Also remember as actual settings
ConfigEOS._file_settings = SettingsEOS.model_validate_json(json_str)
except ValidationError as exc:
raise ValueError(f"Could not update '{self.config_file_path}': {exc}")

View File

@@ -758,9 +758,9 @@ class DataSequence(DataBase, MutableSequence):
raise ValueError(f"Unsupported fill method for non-numeric data: {fill_method}")
# Convert the resampled series to a NumPy array
if start_datetime is not None:
if start_datetime is not None and len(resampled) > 0:
resampled = resampled.truncate(before=start_datetime)
if end_datetime is not None:
if end_datetime is not None and len(resampled) > 0:
resampled = resampled.truncate(after=end_datetime.subtract(seconds=1))
array = resampled.values
return array
@@ -1120,6 +1120,15 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
)
return value
@property
def enabled_providers(self) -> List[Any]:
"""List of providers that are currently enabled."""
enab = []
for provider in self.providers:
if provider.enabled():
enab.append(provider)
return enab
def __getitem__(self, key: str) -> pd.Series:
"""Retrieve a Pandas Series for a specified key from the data in each DataProvider.
@@ -1135,7 +1144,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
KeyError: If no provider contains data for the specified key.
"""
series = None
for provider in self.providers:
for provider in self.enabled_providers:
try:
series = provider.key_to_series(key)
break
@@ -1164,7 +1173,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
if not isinstance(value, pd.Series):
raise ValueError("Value must be an instance of pd.Series.")
for provider in self.providers:
for provider in self.enabled_providers:
try:
provider.key_from_series(key, value)
break
@@ -1182,7 +1191,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
Raises:
KeyError: If the key is not found in any provider.
"""
for provider in self.providers:
for provider in self.enabled_providers:
try:
provider.key_delete_by_datetime(key)
break
@@ -1197,7 +1206,9 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
Returns:
Iterator[str]: An iterator over the unique keys from all providers.
"""
return iter(set(chain.from_iterable(provider.record_keys for provider in self.providers)))
return iter(
set(chain.from_iterable(provider.record_keys for provider in self.enabled_providers))
)
def __len__(self) -> int:
"""Return the number of keys in the container.
@@ -1205,7 +1216,9 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
Returns:
int: The total number of keys in this container.
"""
return len(list(chain.from_iterable(provider.record_keys for provider in self.providers)))
return len(
list(chain.from_iterable(provider.record_keys for provider in self.enabled_providers))
)
def __repr__(self) -> str:
"""Provide a string representation of the DataContainer instance.
@@ -1226,7 +1239,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
force_enable (bool, optional): If True, forces the update even if a provider is disabled.
force_update (bool, optional): If True, forces the providers to update the data even if still cached.
"""
for provider in self.providers:
for provider in self.enabled_providers:
provider.update_data(force_enable=force_enable, force_update=force_update)
def key_to_array(
@@ -1262,7 +1275,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
Cache the result in memory until the next `update_data` call.
"""
array = None
for provider in self.providers:
for provider in self.enabled_providers:
try:
array = provider.key_to_array(
key,
@@ -1283,7 +1296,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
def provider_by_id(self, provider_id: str) -> DataProvider:
"""Retrieves a data provider by its unique identifier.
This method searches through the list of available providers and
This method searches through the list of all available providers and
returns the first provider whose `provider_id` matches the given
`provider_id`. If no matching provider is found, the method returns `None`.

View File

@@ -9,21 +9,9 @@
"elecprice_provider": null,
"elecpriceimport_file_path": null,
"latitude": null,
"load0_import_file_path": null,
"load0_name": null,
"load0_provider": null,
"load1_import_file_path": null,
"load1_name": null,
"load1_provider": null,
"load2_import_file_path": null,
"load2_name": null,
"load2_provider": null,
"load3_import_file_path": null,
"load3_name": null,
"load3_provider": null,
"load4_import_file_path": null,
"load4_name": null,
"load4_provider": null,
"load_import_file_path": null,
"load_name": null,
"load_provider": null,
"loadakkudoktor_year_energy": null,
"longitude": null,
"optimization_ev_available_charge_rates_percent": [],

View File

@@ -7,5 +7,5 @@ from akkudoktoreos.config.configabc import SettingsBaseModel
class ElecPriceCommonSettings(SettingsBaseModel):
elecprice_provider: Optional[str] = Field(
"ElecPriceAkkudoktor", description="Electicity price provider id of provider to be used."
default=None, description="Electicity price provider id of provider to be used."
)

View File

@@ -66,7 +66,7 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
@classmethod
def provider_id(cls) -> str:
"""Return the unique identifier for the Akkudoktor provider."""
return "Akkudoktor"
return "ElecPriceAkkudoktor"
@classmethod
def _validate_data(cls, json_str: Union[bytes, Any]) -> AkkudoktorElecPrice:
@@ -98,8 +98,8 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
ValueError: If the API response does not include expected `electricity price` data.
"""
source = "https://api.akkudoktor.net"
date = to_datetime(self.start_datetime, as_string="%Y-%m-%d")
last_date = to_datetime(self.end_datetime, as_string="%Y-%m-%d")
date = to_datetime(self.start_datetime, as_string="Y-M-D")
last_date = to_datetime(self.end_datetime, as_string="Y-M-D")
response = requests.get(
f"{source}/prices?date={date}&last_date={last_date}&tz={self.config.timezone}"
)
@@ -146,6 +146,10 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
elecprice_marketprice=akkudoktor_data.values[i].marketpriceEurocentPerKWh,
)
self.append(record)
if len(self) == 0:
# Got no valid forecast data
return
# Assure price starts at start_time
if compare_datetimes(self[0].date_time, self.start_datetime).gt:
record = ElecPriceDataRecord(

View File

@@ -20,7 +20,7 @@ logger = get_logger(__name__)
class ElecPriceImportCommonSettings(SettingsBaseModel):
"""Common settings for elecprice data import from file."""
"""Common settings for elecprice data import from file or JSON String."""
elecpriceimport_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import elecprice data from."

View File

@@ -1,8 +1,8 @@
"""Load forecast module for load predictions."""
from typing import Optional, Set
from typing import Optional
from pydantic import Field, computed_field
from pydantic import Field
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.utils.logutil import get_logger
@@ -12,50 +12,7 @@ logger = get_logger(__name__)
class LoadCommonSettings(SettingsBaseModel):
# Load 0
load0_provider: Optional[str] = Field(
load_provider: Optional[str] = Field(
default=None, description="Load provider id of provider to be used."
)
load0_name: Optional[str] = Field(default=None, description="Name of the load source.")
# Load 1
load1_provider: Optional[str] = Field(
default=None, description="Load provider id of provider to be used."
)
load1_name: Optional[str] = Field(default=None, description="Name of the load source.")
# Load 2
load2_provider: Optional[str] = Field(
default=None, description="Load provider id of provider to be used."
)
load2_name: Optional[str] = Field(default=None, description="Name of the load source.")
# Load 3
load3_provider: Optional[str] = Field(
default=None, description="Load provider id of provider to be used."
)
load3_name: Optional[str] = Field(default=None, description="Name of the load source.")
# Load 4
load4_provider: Optional[str] = Field(
default=None, description="Load provider id of provider to be used."
)
load4_name: Optional[str] = Field(default=None, description="Name of the load source.")
# Computed fields
@computed_field # type: ignore[prop-decorator]
@property
def load_count(self) -> int:
"""Maximum number of loads."""
return 5
@computed_field # type: ignore[prop-decorator]
@property
def load_providers(self) -> Set[str]:
"""Load providers."""
providers = []
for i in range(self.load_count):
load_provider_attr = f"load{i}_provider"
value = getattr(self, load_provider_attr)
if value:
providers.append(value)
return set(providers)
load_name: Optional[str] = Field(default=None, description="Name of the load source.")

View File

@@ -7,7 +7,7 @@ Notes:
from abc import abstractmethod
from typing import List, Optional
from pydantic import Field, computed_field
from pydantic import Field
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
from akkudoktoreos.utils.logutil import get_logger
@@ -18,41 +18,8 @@ logger = get_logger(__name__)
class LoadDataRecord(PredictionRecord):
"""Represents a load data record containing various load attributes at a specific datetime."""
load0_mean: Optional[float] = Field(default=None, description="Load 0 mean value (W)")
load0_std: Optional[float] = Field(default=None, description="Load 0 standard deviation (W)")
load1_mean: Optional[float] = Field(default=None, description="Load 1 mean value (W)")
load1_std: Optional[float] = Field(default=None, description="Load 1 standard deviation (W)")
load2_mean: Optional[float] = Field(default=None, description="Load 2 mean value (W)")
load2_std: Optional[float] = Field(default=None, description="Load 2 standard deviation (W)")
load3_mean: Optional[float] = Field(default=None, description="Load 3 mean value (W)")
load3_std: Optional[float] = Field(default=None, description="Load 3 standard deviation (W)")
load4_mean: Optional[float] = Field(default=None, description="Load 4 mean value (W)")
load4_std: Optional[float] = Field(default=None, description="Load 4 standard deviation (W)")
# Computed fields
@computed_field # type: ignore[prop-decorator]
@property
def load_total_mean(self) -> float:
"""Total load mean value (W)."""
total_mean = 0.0
for i in range(5):
load_mean_attr = f"load{i}_mean"
value = getattr(self, load_mean_attr)
if value:
total_mean += value
return total_mean
@computed_field # type: ignore[prop-decorator]
@property
def load_total_std(self) -> float:
"""Total load standard deviation (W)."""
total_std = 0.0
for i in range(5):
load_std_attr = f"load{i}_std"
value = getattr(self, load_std_attr)
if value:
total_std += value
return total_std
load_mean: Optional[float] = Field(default=None, description="Load mean value (W)")
load_std: Optional[float] = Field(default=None, description="Load standard deviation (W)")
class LoadProvider(PredictionProvider):
@@ -86,17 +53,4 @@ class LoadProvider(PredictionProvider):
return "LoadProvider"
def enabled(self) -> bool:
logger.debug(
f"LoadProvider ID {self.provider_id()} vs. config {self.config.load_providers}"
)
return self.provider_id() == self.config.load_providers
def loads(self) -> List[str]:
"""Returns a list of key prefixes of the loads managed by this provider."""
loads_prefix = []
for i in range(self.config.load_count):
load_provider_attr = f"load{i}_provider"
value = getattr(self.config, load_provider_attr)
if value == self.provider_id():
loads_prefix.append(f"load{i}")
return loads_prefix
return self.provider_id() == self.config.load_provider

View File

@@ -39,8 +39,8 @@ class LoadAkkudoktor(LoadProvider):
profile_data = np.array(
list(zip(file_data["yearly_profiles"], file_data["yearly_profiles_std"]))
)
data_year_energy = profile_data * self.config.loadakkudoktor_year_energy
# pprint(self.data_year_energy)
# Calculate values in W by relative profile data and yearly consumption given in kWh
data_year_energy = profile_data * self.config.loadakkudoktor_year_energy * 1000
except FileNotFoundError:
error_msg = f"Error: File {load_file} not found."
logger.error(error_msg)
@@ -54,16 +54,13 @@ class LoadAkkudoktor(LoadProvider):
def _update_data(self, force_update: Optional[bool] = False) -> None:
"""Adds the load means and standard deviations."""
data_year_energy = self.load_data()
for load in self.loads():
attr_load_mean = f"{load}_mean"
attr_load_std = f"{load}_std"
date = self.start_datetime
for i in range(self.config.prediction_hours):
# Extract mean and standard deviation for the given day and hour
# Day indexing starts at 0, -1 because of that
hourly_stats = data_year_energy[date.day_of_year - 1, :, date.hour]
self.update_value(date, attr_load_mean, hourly_stats[0])
self.update_value(date, attr_load_std, hourly_stats[1])
date += to_duration("1 hour")
date = self.start_datetime
for i in range(self.config.prediction_hours):
# Extract mean and standard deviation for the given day and hour
# Day indexing starts at 0, -1 because of that
hourly_stats = data_year_energy[date.day_of_year - 1, :, date.hour]
self.update_value(date, "load_mean", hourly_stats[0])
self.update_value(date, "load_std", hourly_stats[1])
date += to_duration("1 hour")
# We are working on fresh data (no cache), report update time
self.update_datetime = to_datetime(in_timezone=self.config.timezone)

View File

@@ -20,48 +20,17 @@ logger = get_logger(__name__)
class LoadImportCommonSettings(SettingsBaseModel):
"""Common settings for load data import from file."""
"""Common settings for load data import from file or JSON string."""
load0_import_file_path: Optional[Union[str, Path]] = Field(
load_import_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import load data from."
)
load0_import_json: Optional[str] = Field(
default=None, description="JSON string, dictionary of load forecast value lists."
)
load1_import_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import load data from."
)
load1_import_json: Optional[str] = Field(
default=None, description="JSON string, dictionary of load forecast value lists."
)
load2_import_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import load data from."
)
load2_import_json: Optional[str] = Field(
default=None, description="JSON string, dictionary of load forecast value lists."
)
load3_import_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import load data from."
)
load3_import_json: Optional[str] = Field(
default=None, description="JSON string, dictionary of load forecast value lists."
)
load4_import_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import load data from."
)
load4_import_json: Optional[str] = Field(
load_import_json: Optional[str] = Field(
default=None, description="JSON string, dictionary of load forecast value lists."
)
# Validators
@field_validator(
"load0_import_file_path",
"load1_import_file_path",
"load2_import_file_path",
"load3_import_file_path",
"load4_import_file_path",
mode="after",
)
@field_validator("load_import_file_path", mode="after")
@classmethod
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
if value is None:
@@ -89,12 +58,7 @@ class LoadImport(LoadProvider, PredictionImportProvider):
return "LoadImport"
def _update_data(self, force_update: Optional[bool] = False) -> None:
for load in self.loads():
attr_file_path = f"{load}_import_file_path"
attr_json = f"{load}_import_json"
import_file_path = getattr(self.config, attr_file_path)
if import_file_path is not None:
self.import_from_file(import_file_path, key_prefix=load)
import_json = getattr(self.config, attr_json)
if import_json is not None:
self.import_from_json(import_json, key_prefix=load)
if self.config.load_import_file_path is not None:
self.import_from_file(self.config.load_import_file_path, key_prefix="load")
if self.config.load_import_json is not None:
self.import_from_json(self.config.load_import_json, key_prefix="load")

View File

@@ -20,7 +20,7 @@ logger = get_logger(__name__)
class PVForecastImportCommonSettings(SettingsBaseModel):
"""Common settings for pvforecast data import from file."""
"""Common settings for pvforecast data import from file or JSON string."""
pvforecastimport_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import pvforecast data from."

View File

@@ -9,5 +9,5 @@ from akkudoktoreos.config.configabc import SettingsBaseModel
class WeatherCommonSettings(SettingsBaseModel):
weather_provider: Optional[str] = Field(
default="ClearOutside", description="Weather provider id of provider to be used."
default=None, description="Weather provider id of provider to be used."
)

View File

@@ -96,8 +96,8 @@ class WeatherBrightSky(WeatherProvider):
ValueError: If the API response does not include expected `weather` data.
"""
source = "https://api.brightsky.dev"
date = to_datetime(self.start_datetime, as_string="%Y-%m-%d")
last_date = to_datetime(self.end_datetime, as_string="%Y-%m-%d")
date = to_datetime(self.start_datetime, as_string="Y-M-D")
last_date = to_datetime(self.end_datetime, as_string="Y-M-D")
response = requests.get(
f"{source}/weather?lat={self.config.latitude}&lon={self.config.longitude}&date={date}&last_date={last_date}&tz={self.config.timezone}"
)

View File

@@ -20,7 +20,7 @@ logger = get_logger(__name__)
class WeatherImportCommonSettings(SettingsBaseModel):
"""Common settings for weather data import from file."""
"""Common settings for weather data import from file or JSON string."""
weatherimport_file_path: Optional[Union[str, Path]] = Field(
default=None, description="Path to the file to import weather data from."