Add Documentation 2 (#334)

Add documentation that covers:

- configuration
- prediction

Add Python scripts that support automatic documentation generation for
configuration data defined with pydantic.

Adapt EOS configuration to provide more methods for REST API and
automatic documentation generation.

Adapt REST API to allow for EOS configuration file load and save.
Sort REST API on generation of openapi markdown for docs.

Move logutil to core/logging to allow configuration of logging by standard config.

Make Akkudoktor predictions always start extraction of prediction data at start of day.
Previously extraction started at actual hour. This is to support the code that assumes
prediction data to start at start of day.

Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
Bobby Noelte
2025-01-05 14:41:07 +01:00
committed by GitHub
parent 03ec729e50
commit d4e31d556a
52 changed files with 4517 additions and 462 deletions

View File

@@ -20,6 +20,8 @@ from pydantic import Field, ValidationError, computed_field
# settings
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.coreabc import SingletonMixin
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.logsettings import LoggingCommonSettings
from akkudoktoreos.devices.devices import DevicesCommonSettings
from akkudoktoreos.measurement.measurement import MeasurementCommonSettings
from akkudoktoreos.optimization.optimization import OptimizationCommonSettings
@@ -34,7 +36,6 @@ from akkudoktoreos.prediction.pvforecastimport import PVForecastImportCommonSett
from akkudoktoreos.prediction.weather import WeatherCommonSettings
from akkudoktoreos.prediction.weatherimport import WeatherImportCommonSettings
from akkudoktoreos.server.server import ServerCommonSettings
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.utils.utils import UtilsCommonSettings
logger = get_logger(__name__)
@@ -90,6 +91,7 @@ class ConfigCommonSettings(SettingsBaseModel):
class SettingsEOS(
ConfigCommonSettings,
LoggingCommonSettings,
DevicesCommonSettings,
MeasurementCommonSettings,
OptimizationCommonSettings,
@@ -188,7 +190,13 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
@property
def config_default_file_path(self) -> Path:
"""Compute the default config file path."""
return Path(__file__).parent.parent.joinpath("data/default.config.json")
return self.package_root_path.joinpath("data/default.config.json")
@computed_field # type: ignore[prop-decorator]
@property
def package_root_path(self) -> Path:
"""Compute the package root path."""
return Path(__file__).parent.parent.resolve()
# Computed fields
@computed_field # type: ignore[prop-decorator]
@@ -200,6 +208,15 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
key_list.extend(list(self.__pydantic_decorators__.computed_fields.keys()))
return key_list
# Computed fields
@computed_field # type: ignore[prop-decorator]
@property
def config_keys_read_only(self) -> List[str]:
"""Returns the keys of all read only fields in the configuration."""
key_list = []
key_list.extend(list(self.__pydantic_decorators__.computed_fields.keys()))
return key_list
def __init__(self) -> None:
"""Initializes the singleton ConfigEOS instance.
@@ -239,7 +256,7 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
settings (SettingsEOS): The settings to apply globally.
force (Optional[bool]): If True, overwrites the existing settings completely.
If False, the new settings are merged to the existing ones with priority for
the new ones.
the new ones. Defaults to False.
Raises:
ValueError: If settings are already set and `force` is not True or
@@ -349,14 +366,23 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
return cfile, True
return config_dirs[0].joinpath(self.CONFIG_FILE_NAME), False
def from_config_file(self) -> None:
"""Loads the configuration file settings for EOS.
def settings_from_config_file(self) -> tuple[SettingsEOS, Path]:
"""Load settings from the configuration file.
If the config file does not exist, it will be created.
Returns:
tuple of settings and path
settings (SettingsEOS): The settings defined by the EOS configuration file.
path (pathlib.Path): The path of the configuration file.
Raises:
ValueError: If the configuration file is invalid or incomplete.
"""
config_file, exists = self._get_config_file_path()
config_dir = config_file.parent
# Create config directory and copy default config if file does not exist
if not exists:
config_dir.mkdir(parents=True, exist_ok=True)
try:
@@ -366,18 +392,39 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
config_file = self.config_default_file_path
config_dir = config_file.parent
# Load and validate the configuration file
with config_file.open("r", encoding=self.ENCODING) as f_in:
try:
json_str = f_in.read()
ConfigEOS._file_settings = SettingsEOS.model_validate_json(json_str)
settings = SettingsEOS.model_validate_json(json_str)
except ValidationError as exc:
raise ValueError(f"Configuration '{config_file}' is incomplete or not valid: {exc}")
return settings, config_file
def from_config_file(self) -> tuple[SettingsEOS, Path]:
"""Load the configuration file settings for EOS.
Returns:
tuple of settings and path
settings (SettingsEOS): The settings defined by the EOS configuration file.
path (pathlib.Path): The path of the configuration file.
Raises:
ValueError: If the configuration file is invalid or incomplete.
"""
# Load settings from config file
ConfigEOS._file_settings, config_file = self.settings_from_config_file()
# Update configuration in memory
self.update()
# Everthing worked, remember the values
self._config_folder_path = config_dir
# Everything worked, remember the values
self._config_folder_path = config_file.parent
self._config_file_path = config_file
return ConfigEOS._file_settings, config_file
def to_config_file(self) -> None:
"""Saves the current configuration to the configuration file.

View File

@@ -16,7 +16,7 @@ from typing import Any, ClassVar, Dict, Optional, Type
from pendulum import DateTime
from pydantic import computed_field
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)

View File

@@ -31,13 +31,13 @@ from pydantic import (
)
from akkudoktoreos.core.coreabc import ConfigMixin, SingletonMixin, StartMixin
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.pydantic import (
PydanticBaseModel,
PydanticDateTimeData,
PydanticDateTimeDataFrame,
)
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)
@@ -583,20 +583,48 @@ class DataSequence(DataBase, MutableSequence):
# Sort the list by datetime after adding/updating
self.sort_by_datetime()
def update_value(self, date: DateTime, key: str, value: Any) -> None:
"""Updates a specific value in the data record for a given date.
@overload
def update_value(self, date: DateTime, key: str, value: Any) -> None: ...
If a record for the date exists, updates the specified attribute with the new value.
Otherwise, appends a new record with the given value and maintains chronological order.
@overload
def update_value(self, date: DateTime, values: Dict[str, Any]) -> None: ...
def update_value(self, date: DateTime, *args: Any, **kwargs: Any) -> None:
"""Updates specific values in the data record for a given date.
If a record for the date exists, updates the specified attributes with the new values.
Otherwise, appends a new record with the given values and maintains chronological order.
Args:
date (datetime): The date for which the weather value is to be added or updated.
key (str): The attribute name to be updated.
value: The new value to set for the specified attribute.
date (datetime): The date for which the values are to be added or updated.
key (str), value (Any): Single key-value pair to update
OR
values (Dict[str, Any]): Dictionary of key-value pairs to update
OR
**kwargs: Key-value pairs as keyword arguments
Examples:
>>> update_value(date, 'temperature', 25.5)
>>> update_value(date, {'temperature': 25.5, 'humidity': 80})
>>> update_value(date, temperature=25.5, humidity=80)
"""
self._validate_key_writable(key)
# Process input arguments into a dictionary
values: Dict[str, Any] = {}
if len(args) == 2: # Single key-value pair
values[args[0]] = args[1]
elif len(args) == 1 and isinstance(args[0], dict): # Dictionary input
values.update(args[0])
elif len(args) > 0: # Invalid number of arguments
raise ValueError("Expected either 2 arguments (key, value) or 1 dictionary argument")
values.update(kwargs) # Add any keyword arguments
# Validate all keys are writable
for key in values:
self._validate_key_writable(key)
# Ensure datetime objects are normalized
date = to_datetime(date, to_maxtime=False)
# Check if a record with the given date already exists
for record in self.records:
if not isinstance(record.date_time, DateTime):
@@ -604,12 +632,13 @@ class DataSequence(DataBase, MutableSequence):
f"Record date '{record.date_time}' is not a datetime, but a `{type(record.date_time).__name__}`."
)
if compare_datetimes(record.date_time, date).equal:
# Update the DataRecord with the new value for the specified key
setattr(record, key, value)
# Update the DataRecord with all new values
for key, value in values.items():
setattr(record, key, value)
break
else:
# Create a new record and append to the list
record = self.record_class()(date_time=date, **{key: value})
record = self.record_class()(date_time=date, **values)
self.records.append(record)
# Sort the list by datetime after adding/updating
self.sort_by_datetime()
@@ -841,7 +870,7 @@ class DataSequence(DataBase, MutableSequence):
if start_index == 0:
# No value before start
# Add dummy value
dates.insert(0, dates[0] - interval)
dates.insert(0, start_datetime - interval)
values.insert(0, values[0])
elif start_index > 1:
# Truncate all values before latest value before start_datetime

View File

@@ -7,12 +7,12 @@ from pydantic import ConfigDict, Field, computed_field, field_validator, model_v
from typing_extensions import Self
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin, SingletonMixin
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.pydantic import PydanticBaseModel
from akkudoktoreos.devices.battery import Battery
from akkudoktoreos.devices.generic import HomeAppliance
from akkudoktoreos.devices.inverter import Inverter
from akkudoktoreos.utils.datetimeutil import to_datetime
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.utils.utils import NumpyEncoder
logger = get_logger(__name__)

View File

@@ -0,0 +1,20 @@
"""Abstract and base classes for logging."""
import logging
def logging_str_to_level(level_str: str) -> int:
"""Convert log level string to logging level."""
if level_str == "DEBUG":
level = logging.DEBUG
elif level_str == "INFO":
level = logging.INFO
elif level_str == "WARNING":
level = logging.WARNING
elif level_str == "CRITICAL":
level = logging.CRITICAL
elif level_str == "ERROR":
level = logging.ERROR
else:
raise ValueError(f"Unknown loggin level: {level_str}")
return level

View File

@@ -15,19 +15,21 @@ Notes:
- The logger supports rotating log files to prevent excessive log file size.
"""
import logging
import logging as pylogging
import os
from logging.handlers import RotatingFileHandler
from typing import Optional
from akkudoktoreos.core.logabc import logging_str_to_level
def get_logger(
name: str,
log_file: Optional[str] = None,
logging_level: Optional[str] = "INFO",
logging_level: Optional[str] = None,
max_bytes: int = 5000000,
backup_count: int = 5,
) -> logging.Logger:
) -> pylogging.Logger:
"""Creates and configures a logger with a given name.
The logger supports logging to both the console and an optional log file. File logging is
@@ -48,31 +50,22 @@ def get_logger(
logger.info("Application started")
"""
# Create a logger with the specified name
logger = logging.getLogger(name)
logger = pylogging.getLogger(name)
logger.propagate = True
if (env_level := os.getenv("EOS_LOGGING_LEVEL")) is not None:
logging_level = env_level
if logging_level == "DEBUG":
level = logging.DEBUG
elif logging_level == "INFO":
level = logging.INFO
elif logging_level == "WARNING":
level = logging.WARNING
elif logging_level == "ERROR":
level = logging.ERROR
else:
level = logging.DEBUG
logger.setLevel(level)
if logging_level is not None:
level = logging_str_to_level(logging_level)
logger.setLevel(level)
# The log message format
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
formatter = pylogging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# Prevent loggers from being added multiple times
# There may already be a logger from pytest
if not logger.handlers:
# Create a console handler with a standard output stream
console_handler = logging.StreamHandler()
console_handler.setLevel(level)
console_handler = pylogging.StreamHandler()
if logging_level is not None:
console_handler.setLevel(level)
console_handler.setFormatter(formatter)
# Add the console handler to the logger
@@ -88,7 +81,8 @@ def get_logger(
# Create a rotating file handler
file_handler = RotatingFileHandler(log_file, maxBytes=max_bytes, backupCount=backup_count)
file_handler.setLevel(level)
if logging_level is not None:
file_handler.setLevel(level)
file_handler.setFormatter(formatter)
# Add the file handler to the logger

View File

@@ -0,0 +1,45 @@
"""Settings for logging.
Kept in an extra module to avoid cyclic dependencies on package import.
"""
import logging
import os
from typing import Optional
from pydantic import Field, computed_field, field_validator
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.logabc import logging_str_to_level
class LoggingCommonSettings(SettingsBaseModel):
"""Common settings for logging."""
logging_level_default: Optional[str] = Field(
default=None, description="EOS default logging level."
)
# Validators
@field_validator("logging_level_default", mode="after")
@classmethod
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
if isinstance(value, str) and value.upper() == "NONE":
value = None
if value is None and (env_level := os.getenv("EOS_LOGGING_LEVEL")) is not None:
# Take default logging level from special environment variable
value = env_level
if value is None:
return None
level = logging_str_to_level(value)
logging.getLogger().setLevel(level)
return value
# Computed fields
@computed_field # type: ignore[prop-decorator]
@property
def logging_level_root(self) -> str:
"""Root logger logging level."""
level = logging.getLogger().getEffectiveLevel()
level_name = logging.getLevelName(level)
return level_name

View File

@@ -275,6 +275,7 @@ class PydanticDateTimeDataFrame(PydanticBaseModel):
)
@field_validator("tz")
@classmethod
def validate_timezone(cls, v: Optional[str]) -> Optional[str]:
"""Validate that the timezone is valid."""
if v is not None:

View File

@@ -14,6 +14,7 @@
"load_name": null,
"load_provider": null,
"loadakkudoktor_year_energy": null,
"logging_level": "INFO",
"longitude": 13.4,
"optimization_ev_available_charge_rates_percent": null,
"optimization_hours": 48,

View File

@@ -3,8 +3,8 @@ from typing import Any, Optional
import numpy as np
from pydantic import BaseModel, Field, field_validator
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.devices.devicesabc import DeviceBase
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.utils.utils import NumpyEncoder
logger = get_logger(__name__)

View File

@@ -6,13 +6,13 @@ from pydantic import Field, computed_field
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.coreabc import SingletonMixin
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.devices.battery import Battery
from akkudoktoreos.devices.devicesabc import DevicesBase
from akkudoktoreos.devices.generic import HomeAppliance
from akkudoktoreos.devices.inverter import Inverter
from akkudoktoreos.prediction.interpolator import SelfConsumptionPropabilityInterpolator
from akkudoktoreos.utils.datetimeutil import to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -10,9 +10,9 @@ from akkudoktoreos.core.coreabc import (
EnergyManagementSystemMixin,
PredictionMixin,
)
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.pydantic import PydanticBaseModel
from akkudoktoreos.utils.datetimeutil import to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -3,8 +3,8 @@ from typing import Optional
import numpy as np
from pydantic import BaseModel, Field
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.devices.devicesabc import DeviceBase
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -3,9 +3,9 @@ from typing import Optional
from pydantic import BaseModel, Field
from scipy.interpolate import RegularGridInterpolator
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.devices.battery import Battery
from akkudoktoreos.devices.devicesabc import DeviceBase
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -16,8 +16,8 @@ from pydantic import Field, computed_field
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.coreabc import SingletonMixin
from akkudoktoreos.core.dataabc import DataImportMixin, DataRecord, DataSequence
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.utils.datetimeutil import to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -3,7 +3,7 @@ from typing import List, Optional
from pydantic import Field
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)

View File

@@ -3,8 +3,8 @@
from pydantic import ConfigDict
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.pydantic import PydanticBaseModel
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -9,8 +9,8 @@ from typing import List, Optional
from pydantic import Field, computed_field
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -13,11 +13,11 @@ import requests
from numpydantic import NDArray, Shape
from pydantic import Field, ValidationError
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.pydantic import PydanticBaseModel
from akkudoktoreos.prediction.elecpriceabc import ElecPriceDataRecord, ElecPriceProvider
from akkudoktoreos.utils.cacheutil import CacheFileStore, cache_in_file
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)
@@ -218,17 +218,14 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
akkudoktor_value.marketpriceEurocentPerKWh / (100 * 1000) + charges_kwh / 1000
)
if compare_datetimes(dt, self.start_datetime).lt:
# forecast data is too old
# We provide prediction starting at start of day, to be compatible to old system.
if compare_datetimes(dt, self.start_datetime.start_of("day")).lt:
# forecast data is too old - older than start_datetime with time set to 00:00:00
self.elecprice_8days[dt.hour, dt.day_of_week] = price_wh
continue
self.elecprice_8days[dt.hour, 7] = price_wh
record = ElecPriceDataRecord(
date_time=dt,
elecprice_marketprice_wh=price_wh,
)
self.append(record)
self.update_value(dt, "elecprice_marketprice_wh", price_wh)
# Update 8day cache
elecprice_cache_file.seek(0)

View File

@@ -12,9 +12,9 @@ from typing import Optional, Union
from pydantic import Field, field_validator
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -5,12 +5,14 @@ from typing import Optional
from pydantic import Field
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)
class LoadCommonSettings(SettingsBaseModel):
"""Common settings for loaod forecast providers."""
load_provider: Optional[str] = Field(
default=None, description="Load provider id of provider to be used."
)

View File

@@ -9,8 +9,8 @@ from typing import List, Optional
from pydantic import Field
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -1,15 +1,14 @@
"""Retrieves load forecast data from Akkudoktor load profiles."""
from pathlib import Path
from typing import Optional
import numpy as np
from pydantic import Field
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.loadabc import LoadProvider
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)
@@ -84,7 +83,7 @@ class LoadAkkudoktor(LoadProvider):
def load_data(self) -> np.ndarray:
"""Loads data from the Akkudoktor load file."""
load_file = Path(__file__).parent.parent.joinpath("data/load_profiles.npz")
load_file = self.config.package_root_path.joinpath("data/load_profiles.npz")
data_year_energy = None
try:
file_data = np.load(load_file)
@@ -107,23 +106,25 @@ class LoadAkkudoktor(LoadProvider):
"""Adds the load means and standard deviations."""
data_year_energy = self.load_data()
weekday_adjust, weekend_adjust = self._calculate_adjustment(data_year_energy)
date = self.start_datetime
for i in range(self.config.prediction_hours):
# We provide prediction starting at start of day, to be compatible to old system.
# End date for prediction is prediction hours from now.
date = self.start_datetime.start_of("day")
end_date = self.start_datetime.add(hours=self.config.prediction_hours)
while compare_datetimes(date, end_date).lt:
# Extract mean (index 0) and standard deviation (index 1) for the given day and hour
# Day indexing starts at 0, -1 because of that
hourly_stats = data_year_energy[date.day_of_year - 1, :, date.hour]
self.update_value(date, "load_mean", hourly_stats[0])
self.update_value(date, "load_std", hourly_stats[1])
values = {
"load_mean": hourly_stats[0],
"load_std": hourly_stats[1],
}
if date.day_of_week < 5:
# Monday to Friday (0..4)
self.update_value(
date, "load_mean_adjusted", hourly_stats[0] + weekday_adjust[date.hour]
)
values["load_mean_adjusted"] = hourly_stats[0] + weekday_adjust[date.hour]
else:
# Saturday, Sunday (5, 6)
self.update_value(
date, "load_mean_adjusted", hourly_stats[0] + weekend_adjust[date.hour]
)
values["load_mean_adjusted"] = hourly_stats[0] + weekend_adjust[date.hour]
self.update_value(date, values)
date += to_duration("1 hour")
# We are working on fresh data (no cache), report update time
self.update_datetime = to_datetime(in_timezone=self.config.timezone)

View File

@@ -12,9 +12,9 @@ from typing import Optional, Union
from pydantic import Field, field_validator
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.loadabc import LoadProvider
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -22,8 +22,8 @@ from akkudoktoreos.core.dataabc import (
DataRecord,
DataSequence,
)
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.utils.datetimeutil import to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -5,7 +5,7 @@ from typing import Any, ClassVar, List, Optional
from pydantic import Field, computed_field
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)
@@ -43,7 +43,7 @@ class PVForecastCommonSettings(SettingsBaseModel):
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
)
pvforecast0_loss: Optional[float] = Field(
default=None, description="Sum of PV system losses in percent"
default=14.0, description="Sum of PV system losses in percent"
)
pvforecast0_trackingtype: Optional[int] = Field(
default=None,
@@ -98,7 +98,9 @@ class PVForecastCommonSettings(SettingsBaseModel):
default="free",
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
)
pvforecast1_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
pvforecast1_loss: Optional[float] = Field(
default=14.0, description="Sum of PV system losses in percent"
)
pvforecast1_trackingtype: Optional[int] = Field(
default=None,
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
@@ -152,7 +154,9 @@ class PVForecastCommonSettings(SettingsBaseModel):
default="free",
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
)
pvforecast2_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
pvforecast2_loss: Optional[float] = Field(
default=14.0, description="Sum of PV system losses in percent"
)
pvforecast2_trackingtype: Optional[int] = Field(
default=None,
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
@@ -206,7 +210,9 @@ class PVForecastCommonSettings(SettingsBaseModel):
default="free",
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
)
pvforecast3_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
pvforecast3_loss: Optional[float] = Field(
default=14.0, description="Sum of PV system losses in percent"
)
pvforecast3_trackingtype: Optional[int] = Field(
default=None,
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
@@ -260,7 +266,9 @@ class PVForecastCommonSettings(SettingsBaseModel):
default="free",
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
)
pvforecast4_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
pvforecast4_loss: Optional[float] = Field(
default=14.0, description="Sum of PV system losses in percent"
)
pvforecast4_trackingtype: Optional[int] = Field(
default=None,
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
@@ -314,7 +322,9 @@ class PVForecastCommonSettings(SettingsBaseModel):
default="free",
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
)
pvforecast5_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
pvforecast5_loss: Optional[float] = Field(
default=14.0, description="Sum of PV system losses in percent"
)
pvforecast5_trackingtype: Optional[int] = Field(
default=None,
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",

View File

@@ -9,8 +9,8 @@ from typing import List, Optional
from pydantic import Field
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -68,6 +68,7 @@ from typing import Any, List, Optional, Union
import requests
from pydantic import Field, ValidationError, computed_field
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.pydantic import PydanticBaseModel
from akkudoktoreos.prediction.pvforecastabc import (
PVForecastDataRecord,
@@ -75,7 +76,6 @@ from akkudoktoreos.prediction.pvforecastabc import (
)
from akkudoktoreos.utils.cacheutil import cache_in_file
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)
@@ -283,27 +283,21 @@ class PVForecastAkkudoktor(PVForecastProvider):
original_datetime = akkudoktor_data.values[0][i].datetime
dt = to_datetime(original_datetime, in_timezone=self.config.timezone)
# iso_datetime = parser.parse(original_datetime).isoformat() # Konvertiere zu ISO-Format
# print()
# Optional: 2 Stunden abziehen, um die Zeitanpassung zu testen
# adjusted_datetime = parser.parse(original_datetime) - timedelta(hours=2)
# print(f"Angepasste Zeitstempel: {adjusted_datetime.isoformat()}")
if compare_datetimes(dt, self.start_datetime).lt:
# We provide prediction starting at start of day, to be compatible to old system.
if compare_datetimes(dt, self.start_datetime.start_of("day")).lt:
# forecast data is too old
continue
sum_dc_power = sum(values[i].dcPower for values in akkudoktor_data.values)
sum_ac_power = sum(values[i].power for values in akkudoktor_data.values)
record = PVForecastAkkudoktorDataRecord(
date_time=dt, # Verwende angepassten Zeitstempel
pvforecast_dc_power=sum_dc_power,
pvforecast_ac_power=sum_ac_power,
pvforecastakkudoktor_wind_speed_10m=akkudoktor_data.values[0][i].windspeed_10m,
pvforecastakkudoktor_temp_air=akkudoktor_data.values[0][i].temperature,
)
self.append(record)
data = {
"pvforecast_dc_power": sum_dc_power,
"pvforecast_ac_power": sum_ac_power,
"pvforecastakkudoktor_wind_speed_10m": akkudoktor_data.values[0][i].windspeed_10m,
"pvforecastakkudoktor_temp_air": akkudoktor_data.values[0][i].temperature,
}
self.update_value(dt, data)
if len(self) < self.config.prediction_hours:
raise ValueError(

View File

@@ -12,9 +12,9 @@ from typing import Optional, Union
from pydantic import Field, field_validator
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
from akkudoktoreos.prediction.pvforecastabc import PVForecastProvider
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -14,8 +14,8 @@ import pandas as pd
import pvlib
from pydantic import Field
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -13,10 +13,10 @@ import pandas as pd
import pvlib
import requests
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.weatherabc import WeatherDataRecord, WeatherProvider
from akkudoktoreos.utils.cacheutil import cache_in_file
from akkudoktoreos.utils.datetimeutil import to_datetime
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -19,10 +19,10 @@ import pandas as pd
import requests
from bs4 import BeautifulSoup
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.weatherabc import WeatherDataRecord, WeatherProvider
from akkudoktoreos.utils.cacheutil import cache_in_file
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration, to_timezone
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -12,9 +12,9 @@ from typing import Optional, Union
from pydantic import Field, field_validator
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
from akkudoktoreos.prediction.weatherabc import WeatherProvider
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -7,7 +7,6 @@ from pathlib import Path
from typing import Annotated, Any, AsyncGenerator, Dict, List, Optional, Union
import httpx
import pandas as pd
import uvicorn
from fastapi import FastAPI, Query, Request
from fastapi.exceptions import HTTPException
@@ -15,6 +14,7 @@ from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse, Resp
from akkudoktoreos.config.config import ConfigEOS, SettingsEOS, get_config
from akkudoktoreos.core.ems import get_ems
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.core.pydantic import (
PydanticBaseModel,
PydanticDateTimeData,
@@ -29,7 +29,6 @@ from akkudoktoreos.optimization.genetic import (
)
from akkudoktoreos.prediction.prediction import get_prediction
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)
config_eos = get_config()
@@ -182,33 +181,112 @@ class PdfResponse(FileResponse):
media_type = "application/pdf"
@app.put("/v1/config/value")
def fastapi_config_value_put(
key: Annotated[str, Query(description="configuration key")],
value: Annotated[Any, Query(description="configuration value")],
) -> ConfigEOS:
"""Set the configuration option in the settings.
Args:
key (str): configuration key
value (Any): configuration value
Returns:
configuration (ConfigEOS): The current configuration after the write.
"""
if key not in config_eos.config_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
if key in config_eos.config_keys_read_only:
raise HTTPException(status_code=404, detail=f"Key '{key}' is read only.")
try:
setattr(config_eos, key, value)
except Exception as e:
raise HTTPException(status_code=400, detail=f"Error on update of configuration: {e}")
return config_eos
@app.post("/v1/config/update")
def fastapi_config_update_post() -> ConfigEOS:
"""Update the configuration from the EOS configuration file.
Returns:
configuration (ConfigEOS): The current configuration after update.
"""
try:
_, config_file_path = config_eos.from_config_file()
except:
raise HTTPException(
status_code=404,
detail=f"Cannot update configuration from file '{config_file_path}'.",
)
return config_eos
@app.get("/v1/config/file")
def fastapi_config_file_get() -> SettingsEOS:
"""Get the settings as defined by the EOS configuration file.
Returns:
settings (SettingsEOS): The settings defined by the EOS configuration file.
"""
try:
settings, config_file_path = config_eos.settings_from_config_file()
except:
raise HTTPException(
status_code=404,
detail=f"Cannot read configuration from file '{config_file_path}'.",
)
return settings
@app.put("/v1/config/file")
def fastapi_config_file_put() -> ConfigEOS:
"""Save the current configuration to the EOS configuration file.
Returns:
configuration (ConfigEOS): The current configuration that was saved.
"""
try:
config_eos.to_config_file()
except:
raise HTTPException(
status_code=404,
detail=f"Cannot save configuration to file '{config_eos.config_file_path}'.",
)
return config_eos
@app.get("/v1/config")
def fastapi_config_get() -> ConfigEOS:
"""Get the current configuration."""
"""Get the current configuration.
Returns:
configuration (ConfigEOS): The current configuration.
"""
return config_eos
@app.put("/v1/config")
def fastapi_config_put(
settings: SettingsEOS,
save: Optional[bool] = None,
settings: Annotated[SettingsEOS, Query(description="settings")],
) -> ConfigEOS:
"""Merge settings into current configuration.
"""Write the provided settings into the current settings.
The existing settings are completely overwritten. Note that for any setting
value that is None, the configuration will fall back to values from other sources such as
environment variables, the EOS configuration file, or default values.
Args:
settings (SettingsEOS): The settings to merge into the current configuration.
save (Optional[bool]): Save the resulting configuration to the configuration file.
Defaults to False.
settings (SettingsEOS): The settings to write into the current settings.
Returns:
configuration (ConfigEOS): The current configuration after the write.
"""
config_eos.merge_settings(settings)
if save:
try:
config_eos.to_config_file()
except:
raise HTTPException(
status_code=404,
detail=f"Cannot save configuration to file '{config_eos.config_file_path}'.",
)
try:
config_eos.merge_settings(settings, force=True)
except Exception as e:
raise HTTPException(status_code=400, detail=f"Error on update of configuration: {e}")
return config_eos
@@ -226,10 +304,10 @@ def fastapi_measurement_load_mr_series_by_name_get(
key = measurement_eos.name_to_key(name=name, topic="measurement_load")
if key is None:
raise HTTPException(
status_code=404, detail=f"Measurement load with name '{name}' not available."
status_code=404, detail=f"Measurement load with name '{name}' is not available."
)
if key not in measurement_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
pdseries = measurement_eos.key_to_series(key=key)
return PydanticDateTimeSeries.from_series(pdseries)
@@ -244,10 +322,10 @@ def fastapi_measurement_load_mr_value_by_name_put(
key = measurement_eos.name_to_key(name=name, topic="measurement_load")
if key is None:
raise HTTPException(
status_code=404, detail=f"Measurement load with name '{name}' not available."
status_code=404, detail=f"Measurement load with name '{name}' is not available."
)
if key not in measurement_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
measurement_eos.update_value(datetime, key, value)
pdseries = measurement_eos.key_to_series(key=key)
return PydanticDateTimeSeries.from_series(pdseries)
@@ -261,10 +339,10 @@ def fastapi_measurement_load_mr_series_by_name_put(
key = measurement_eos.name_to_key(name=name, topic="measurement_load")
if key is None:
raise HTTPException(
status_code=404, detail=f"Measurement load with name '{name}' not available."
status_code=404, detail=f"Measurement load with name '{name}' is not available."
)
if key not in measurement_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
pdseries = series.to_series() # make pandas series from PydanticDateTimeSeries
measurement_eos.key_from_series(key=key, series=pdseries)
pdseries = measurement_eos.key_to_series(key=key)
@@ -277,7 +355,7 @@ def fastapi_measurement_series_get(
) -> PydanticDateTimeSeries:
"""Get the measurements of given key as series."""
if key not in measurement_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
pdseries = measurement_eos.key_to_series(key=key)
return PydanticDateTimeSeries.from_series(pdseries)
@@ -290,7 +368,7 @@ def fastapi_measurement_value_put(
) -> PydanticDateTimeSeries:
"""Merge the measurement of given key and value into EOS measurements at given datetime."""
if key not in measurement_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
measurement_eos.update_value(datetime, key, value)
pdseries = measurement_eos.key_to_series(key=key)
return PydanticDateTimeSeries.from_series(pdseries)
@@ -302,7 +380,7 @@ def fastapi_measurement_series_put(
) -> PydanticDateTimeSeries:
"""Merge measurement given as series into given key."""
if key not in measurement_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
pdseries = series.to_series() # make pandas series from PydanticDateTimeSeries
measurement_eos.key_from_series(key=key, series=pdseries)
pdseries = measurement_eos.key_to_series(key=key)
@@ -351,7 +429,7 @@ def fastapi_prediction_series_get(
Defaults to end datetime of latest prediction.
"""
if key not in prediction_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
if start_datetime is None:
start_datetime = prediction_eos.start_datetime
else:
@@ -394,7 +472,7 @@ def fastapi_prediction_list_get(
Defaults to 1 hour.
"""
if key not in prediction_eos.record_keys:
raise HTTPException(status_code=404, detail=f"Key '{key}' not available.")
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
if start_datetime is None:
start_datetime = prediction_eos.start_datetime
else:
@@ -429,7 +507,7 @@ def fastapi_prediction_update(force_update: bool = False, force_enable: bool = F
try:
prediction_eos.update_data(force_update=force_update, force_enable=force_enable)
except Exception as e:
raise HTTPException(status_code=400, detail=f"Error while trying to update provider: {e}")
raise HTTPException(status_code=400, detail=f"Error on update of provider: {e}")
return Response()
@@ -453,7 +531,7 @@ def fastapi_prediction_update_provider(
try:
provider.update_data(force_update=force_update, force_enable=force_enable)
except Exception as e:
raise HTTPException(status_code=400, detail=f"Error while trying to update provider: {e}")
raise HTTPException(status_code=400, detail=f"Error on update of provider: {e}")
return Response()
@@ -461,6 +539,13 @@ def fastapi_prediction_update_provider(
def fastapi_strompreis() -> list[float]:
"""Deprecated: Electricity Market Price Prediction per Wh (€/Wh).
Electricity prices start at 00.00.00 today and are provided for 48 hours.
If no prices are available the missing ones at the start of the series are
filled with the first available price.
Note:
Electricity price charges are added.
Note:
Set ElecPriceAkkudoktor as elecprice_provider, then update data with
'/v1/prediction/update'
@@ -479,11 +564,21 @@ def fastapi_strompreis() -> list[float]:
# Get the current date and the end date based on prediction hours
# Fetch prices for the specified date range
return prediction_eos.key_to_array(
key="elecprice_marketprice_wh",
start_datetime=prediction_eos.start_datetime,
end_datetime=prediction_eos.end_datetime,
).tolist()
start_datetime = to_datetime().start_of("day")
end_datetime = start_datetime.add(days=2)
try:
elecprice = prediction_eos.key_to_array(
key="elecprice_marketprice_wh",
start_datetime=start_datetime,
end_datetime=end_datetime,
).tolist()
except Exception as e:
raise HTTPException(
status_code=404,
detail=f"Can not get the electricity price forecast: {e}. Did you configure the electricity price forecast provider?",
)
return elecprice
class GesamtlastRequest(PydanticBaseModel):
@@ -498,6 +593,10 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
Endpoint to handle total load prediction adjusted by latest measured data.
Total load prediction starts at 00.00.00 today and is provided for 48 hours.
If no prediction values are available the missing ones at the start of the series are
filled with the first available prediction value.
Note:
Use '/v1/prediction/list?key=load_mean_adjusted' instead.
Load energy meter readings to be added to EOS measurement by:
@@ -534,11 +633,21 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
# Create load forecast
prediction_eos.update_data(force_update=True)
prediction_list = prediction_eos.key_to_array(
key="load_mean_adjusted",
start_datetime=prediction_eos.start_datetime,
end_datetime=prediction_eos.end_datetime,
).tolist()
# Get the forcast starting at start of day
start_datetime = to_datetime().start_of("day")
end_datetime = start_datetime.add(days=2)
try:
prediction_list = prediction_eos.key_to_array(
key="load_mean_adjusted",
start_datetime=start_datetime,
end_datetime=end_datetime,
).tolist()
except Exception as e:
raise HTTPException(
status_code=404,
detail=f"Can not get the total load forecast: {e}. Did you configure the load forecast provider?",
)
return prediction_list
@@ -548,6 +657,10 @@ def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
Endpoint to handle total load prediction.
Total load prediction starts at 00.00.00 today and is provided for 48 hours.
If no prediction values are available the missing ones at the start of the series are
filled with the first available prediction value.
Note:
Set LoadAkkudoktor as load_provider, then update data with
'/v1/prediction/update'
@@ -564,11 +677,21 @@ def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
# Create load forecast
prediction_eos.update_data(force_update=True)
prediction_list = prediction_eos.key_to_array(
key="load_mean",
start_datetime=prediction_eos.start_datetime,
end_datetime=prediction_eos.end_datetime,
).tolist()
# Get the forcast starting at start of day
start_datetime = to_datetime().start_of("day")
end_datetime = start_datetime.add(days=2)
try:
prediction_list = prediction_eos.key_to_array(
key="load_mean",
start_datetime=start_datetime,
end_datetime=end_datetime,
).tolist()
except Exception as e:
raise HTTPException(
status_code=404,
detail=f"Can not get the total load forecast: {e}. Did you configure the load forecast provider?",
)
return prediction_list
@@ -583,6 +706,10 @@ def fastapi_pvforecast() -> ForecastResponse:
Endpoint to handle PV forecast prediction.
PVForecast starts at 00.00.00 today and is provided for 48 hours.
If no forecast values are available the missing ones at the start of the series are
filled with the first available forecast value.
Note:
Set PVForecastAkkudoktor as pvforecast_provider, then update data with
'/v1/prediction/update'
@@ -590,41 +717,38 @@ def fastapi_pvforecast() -> ForecastResponse:
'/v1/prediction/list?key=pvforecast_ac_power' and
'/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.
"""
###############
# PV Forecast
###############
prediction_key = "pvforecast_ac_power"
pvforecast_ac_power = prediction_eos.get(prediction_key)
if pvforecast_ac_power is None:
raise HTTPException(status_code=404, detail=f"Prediction not available: {prediction_key}")
settings = SettingsEOS(
elecprice_provider="PVForecastAkkudoktor",
)
config_eos.merge_settings(settings=settings)
# On empty Series.loc TypeError: Cannot compare tz-naive and tz-aware datetime-like objects
if len(pvforecast_ac_power) == 0:
pvforecast_ac_power = pd.Series()
else:
# Fetch prices for the specified date range
pvforecast_ac_power = pvforecast_ac_power.loc[
prediction_eos.start_datetime : prediction_eos.end_datetime
]
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
prediction_key = "pvforecastakkudoktor_temp_air"
pvforecastakkudoktor_temp_air = prediction_eos.get(prediction_key)
if pvforecastakkudoktor_temp_air is None:
raise HTTPException(status_code=404, detail=f"Prediction not available: {prediction_key}")
# Create PV forecast
prediction_eos.update_data(force_update=True)
# On empty Series.loc TypeError: Cannot compare tz-naive and tz-aware datetime-like objects
if len(pvforecastakkudoktor_temp_air) == 0:
pvforecastakkudoktor_temp_air = pd.Series()
else:
# Fetch prices for the specified date range
pvforecastakkudoktor_temp_air = pvforecastakkudoktor_temp_air.loc[
prediction_eos.start_datetime : prediction_eos.end_datetime
]
# Get the forcast starting at start of day
start_datetime = to_datetime().start_of("day")
end_datetime = start_datetime.add(days=2)
try:
ac_power = prediction_eos.key_to_array(
key="pvforecast_ac_power",
start_datetime=start_datetime,
end_datetime=end_datetime,
).tolist()
temp_air = prediction_eos.key_to_array(
key="pvforecastakkudoktor_temp_air",
start_datetime=start_datetime,
end_datetime=end_datetime,
).tolist()
except Exception as e:
raise HTTPException(
status_code=404,
detail=f"Can not get the PV forecast: {e}. Did you configure the PV forecast provider?",
)
# Return both forecasts as a JSON response
return ForecastResponse(
temperature=pvforecastakkudoktor_temp_air.tolist(), pvpower=pvforecast_ac_power.tolist()
)
return ForecastResponse(temperature=temp_air, pvpower=ac_power)
@app.post("/optimize")

View File

@@ -2,7 +2,7 @@ import uvicorn
from fasthtml.common import H1, FastHTML, Table, Td, Th, Thead, Titled, Tr
from akkudoktoreos.config.config import get_config
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)

View File

@@ -5,7 +5,7 @@ from typing import Optional
from pydantic import Field, IPvAnyAddress, field_validator
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)

View File

@@ -48,8 +48,8 @@ from pendulum import DateTime, Duration
from pydantic import BaseModel, ConfigDict, Field
from akkudoktoreos.core.coreabc import ConfigMixin
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__name__)

View File

@@ -31,7 +31,7 @@ from pendulum import Date, DateTime, Duration
from pendulum.tz.timezone import Timezone
from timezonefinder import TimezoneFinder
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)

View File

@@ -4,7 +4,7 @@ from typing import Any
import numpy as np
from akkudoktoreos.config.configabc import SettingsBaseModel
from akkudoktoreos.utils.logutil import get_logger
from akkudoktoreos.core.logging import get_logger
logger = get_logger(__name__)