mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-08-25 15:01:14 +00:00
Rename settings variables (remove prefixes)
This commit is contained in:
@@ -179,7 +179,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
To initialize and access configuration attributes (only one instance is created):
|
||||
```python
|
||||
config_eos = ConfigEOS() # Always returns the same instance
|
||||
print(config_eos.prediction.prediction_hours) # Access a setting from the loaded configuration
|
||||
print(config_eos.prediction.hours) # Access a setting from the loaded configuration
|
||||
```
|
||||
|
||||
"""
|
||||
@@ -328,7 +328,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
|
||||
Example:
|
||||
>>> config = get_config()
|
||||
>>> new_data = {"prediction": {"prediction_hours": 24}, "server": {"server_eos_port": 8000}}
|
||||
>>> new_data = {"prediction": {"hours": 24}, "server": {"port": 8000}}
|
||||
>>> config.merge_settings_from_dict(new_data)
|
||||
"""
|
||||
self._setup(**merge_models(self, data))
|
||||
|
@@ -198,9 +198,9 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
||||
self.ev = ev
|
||||
self.home_appliance = home_appliance
|
||||
self.inverter = inverter
|
||||
self.ac_charge_hours = np.full(self.config.prediction.prediction_hours, 0.0)
|
||||
self.dc_charge_hours = np.full(self.config.prediction.prediction_hours, 1.0)
|
||||
self.ev_charge_hours = np.full(self.config.prediction.prediction_hours, 0.0)
|
||||
self.ac_charge_hours = np.full(self.config.prediction.hours, 0.0)
|
||||
self.dc_charge_hours = np.full(self.config.prediction.hours, 1.0)
|
||||
self.ev_charge_hours = np.full(self.config.prediction.hours, 0.0)
|
||||
|
||||
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
||||
if self.battery is not None:
|
||||
@@ -251,7 +251,7 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
||||
error_msg = "Start datetime unknown."
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
if self.config.prediction.prediction_hours is None:
|
||||
if self.config.prediction.hours is None:
|
||||
error_msg = "Prediction hours unknown."
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
@@ -4,7 +4,6 @@ Kept in an extra module to avoid cyclic dependencies on package import.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, computed_field, field_validator
|
||||
@@ -16,21 +15,18 @@ from akkudoktoreos.core.logabc import logging_str_to_level
|
||||
class LoggingCommonSettings(SettingsBaseModel):
|
||||
"""Logging Configuration."""
|
||||
|
||||
logging_level_default: Optional[str] = Field(
|
||||
level: Optional[str] = Field(
|
||||
default=None,
|
||||
description="EOS default logging level.",
|
||||
examples=["INFO", "DEBUG", "WARNING", "ERROR", "CRITICAL"],
|
||||
)
|
||||
|
||||
# Validators
|
||||
@field_validator("logging_level_default", mode="after")
|
||||
@field_validator("level", mode="after")
|
||||
@classmethod
|
||||
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
|
||||
if isinstance(value, str) and value.upper() == "NONE":
|
||||
value = None
|
||||
if value is None and (env_level := os.getenv("EOS_LOGGING_LEVEL")) is not None:
|
||||
# Take default logging level from special environment variable
|
||||
value = env_level
|
||||
if value is None:
|
||||
return None
|
||||
level = logging_str_to_level(value)
|
||||
@@ -40,7 +36,7 @@ class LoggingCommonSettings(SettingsBaseModel):
|
||||
# Computed fields
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def logging_level_root(self) -> str:
|
||||
def root_level(self) -> str:
|
||||
"""Root logger logging level."""
|
||||
level = logging.getLogger().getEffectiveLevel()
|
||||
level_name = logging.getLevelName(level)
|
||||
|
@@ -51,16 +51,16 @@ class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def end_datetime(self) -> Optional[DateTime]:
|
||||
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
||||
"""Compute the end datetime based on the `start_datetime` and `hours`.
|
||||
|
||||
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||
|
||||
Returns:
|
||||
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||
"""
|
||||
if self.ems.start_datetime and self.config.prediction.prediction_hours:
|
||||
if self.ems.start_datetime and self.config.prediction.hours:
|
||||
end_datetime = self.ems.start_datetime + to_duration(
|
||||
f"{self.config.prediction.prediction_hours} hours"
|
||||
f"{self.config.prediction.hours} hours"
|
||||
)
|
||||
dst_change = end_datetime.offset_hours - self.ems.start_datetime.offset_hours
|
||||
logger.debug(
|
||||
|
@@ -18,9 +18,9 @@ class Heatpump:
|
||||
COP_COEFFICIENT = 0.1
|
||||
"""COP increase per degree"""
|
||||
|
||||
def __init__(self, max_heat_output: int, prediction_hours: int):
|
||||
def __init__(self, max_heat_output: int, hours: int):
|
||||
self.max_heat_output = max_heat_output
|
||||
self.prediction_hours = prediction_hours
|
||||
self.hours = hours
|
||||
self.log = logging.getLogger(__name__)
|
||||
|
||||
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
||||
@@ -117,9 +117,9 @@ class Heatpump:
|
||||
"""Simulate power data for 24 hours based on provided temperatures."""
|
||||
power_data: List[float] = []
|
||||
|
||||
if len(temperatures) != self.prediction_hours:
|
||||
if len(temperatures) != self.hours:
|
||||
raise ValueError(
|
||||
f"The temperature array must contain exactly {self.prediction_hours} entries, "
|
||||
f"The temperature array must contain exactly {self.hours} entries, "
|
||||
"one for each hour of the day."
|
||||
)
|
||||
|
||||
|
@@ -14,7 +14,7 @@ class InverterParameters(DeviceParameters):
|
||||
|
||||
device_id: str = Field(description="ID of inverter", examples=["inverter1"])
|
||||
max_power_wh: float = Field(gt=0, examples=[10000])
|
||||
battery: Optional[str] = Field(
|
||||
battery_id: Optional[str] = Field(
|
||||
default=None, description="ID of battery", examples=[None, "battery1"]
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ class Inverter(DeviceBase):
|
||||
|
||||
def _setup(self) -> None:
|
||||
assert self.parameters is not None
|
||||
if self.parameters.battery is None:
|
||||
if self.parameters.battery_id is None:
|
||||
# For the moment raise exception
|
||||
# TODO: Make battery configurable by config
|
||||
error_msg = "Battery for PV inverter is mandatory."
|
||||
@@ -42,7 +42,7 @@ class Inverter(DeviceBase):
|
||||
|
||||
def _post_setup(self) -> None:
|
||||
assert self.parameters is not None
|
||||
self.battery = self.devices.get_device_by_id(self.parameters.battery)
|
||||
self.battery = self.devices.get_device_by_id(self.parameters.battery_id)
|
||||
|
||||
def process_energy(
|
||||
self, generation: float, consumption: float, hour: int
|
||||
|
@@ -25,19 +25,19 @@ logger = get_logger(__name__)
|
||||
class MeasurementCommonSettings(SettingsBaseModel):
|
||||
"""Measurement Configuration."""
|
||||
|
||||
measurement_load0_name: Optional[str] = Field(
|
||||
load0_name: Optional[str] = Field(
|
||||
default=None, description="Name of the load0 source", examples=["Household", "Heat Pump"]
|
||||
)
|
||||
measurement_load1_name: Optional[str] = Field(
|
||||
load1_name: Optional[str] = Field(
|
||||
default=None, description="Name of the load1 source", examples=[None]
|
||||
)
|
||||
measurement_load2_name: Optional[str] = Field(
|
||||
load2_name: Optional[str] = Field(
|
||||
default=None, description="Name of the load2 source", examples=[None]
|
||||
)
|
||||
measurement_load3_name: Optional[str] = Field(
|
||||
load3_name: Optional[str] = Field(
|
||||
default=None, description="Name of the load3 source", examples=[None]
|
||||
)
|
||||
measurement_load4_name: Optional[str] = Field(
|
||||
load4_name: Optional[str] = Field(
|
||||
default=None, description="Name of the load4 source", examples=[None]
|
||||
)
|
||||
|
||||
@@ -50,42 +50,42 @@ class MeasurementDataRecord(DataRecord):
|
||||
"""
|
||||
|
||||
# Single loads, to be aggregated to total load
|
||||
measurement_load0_mr: Optional[float] = Field(
|
||||
load0_mr: Optional[float] = Field(
|
||||
default=None, ge=0, description="Load0 meter reading [kWh]", examples=[40421]
|
||||
)
|
||||
measurement_load1_mr: Optional[float] = Field(
|
||||
load1_mr: Optional[float] = Field(
|
||||
default=None, ge=0, description="Load1 meter reading [kWh]", examples=[None]
|
||||
)
|
||||
measurement_load2_mr: Optional[float] = Field(
|
||||
load2_mr: Optional[float] = Field(
|
||||
default=None, ge=0, description="Load2 meter reading [kWh]", examples=[None]
|
||||
)
|
||||
measurement_load3_mr: Optional[float] = Field(
|
||||
load3_mr: Optional[float] = Field(
|
||||
default=None, ge=0, description="Load3 meter reading [kWh]", examples=[None]
|
||||
)
|
||||
measurement_load4_mr: Optional[float] = Field(
|
||||
load4_mr: Optional[float] = Field(
|
||||
default=None, ge=0, description="Load4 meter reading [kWh]", examples=[None]
|
||||
)
|
||||
|
||||
measurement_max_loads: ClassVar[int] = 5 # Maximum number of loads that can be set
|
||||
max_loads: ClassVar[int] = 5 # Maximum number of loads that can be set
|
||||
|
||||
measurement_grid_export_mr: Optional[float] = Field(
|
||||
grid_export_mr: Optional[float] = Field(
|
||||
default=None, ge=0, description="Export to grid meter reading [kWh]", examples=[1000]
|
||||
)
|
||||
|
||||
measurement_grid_import_mr: Optional[float] = Field(
|
||||
grid_import_mr: Optional[float] = Field(
|
||||
default=None, ge=0, description="Import from grid meter reading [kWh]", examples=[1000]
|
||||
)
|
||||
|
||||
# Computed fields
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def measurement_loads(self) -> List[str]:
|
||||
def loads(self) -> List[str]:
|
||||
"""Compute a list of active loads."""
|
||||
active_loads = []
|
||||
|
||||
# Loop through measurement_loadx
|
||||
for i in range(self.measurement_max_loads):
|
||||
load_attr = f"measurement_load{i}_mr"
|
||||
# Loop through loadx
|
||||
for i in range(self.max_loads):
|
||||
load_attr = f"load{i}_mr"
|
||||
|
||||
# Check if either attribute is set and add to active loads
|
||||
if getattr(self, load_attr, None):
|
||||
@@ -105,7 +105,7 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
||||
)
|
||||
|
||||
topics: ClassVar[List[str]] = [
|
||||
"measurement_load",
|
||||
"load",
|
||||
]
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
@@ -147,14 +147,16 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
||||
"""Provides measurement key for given name and topic."""
|
||||
topic = topic.lower()
|
||||
|
||||
print(self.topics)
|
||||
if topic not in self.topics:
|
||||
return None
|
||||
|
||||
topic_keys = [
|
||||
key for key in self.config.measurement.model_fields.keys() if key.startswith(topic)
|
||||
]
|
||||
print(topic_keys)
|
||||
key = None
|
||||
if topic == "measurement_load":
|
||||
if topic == "load":
|
||||
for config_key in topic_keys:
|
||||
if (
|
||||
config_key.endswith("_name")
|
||||
@@ -255,9 +257,9 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
||||
end_datetime = self[-1].date_time
|
||||
size = self._interval_count(start_datetime, end_datetime, interval)
|
||||
load_total_array = np.zeros(size)
|
||||
# Loop through measurement_load<x>_mr
|
||||
for i in range(self.record_class().measurement_max_loads):
|
||||
key = f"measurement_load{i}_mr"
|
||||
# Loop through load<x>_mr
|
||||
for i in range(self.record_class().max_loads):
|
||||
key = f"load{i}_mr"
|
||||
# Calculate load per interval
|
||||
load_array = self._energy_from_meter_readings(
|
||||
key=key, start_datetime=start_datetime, end_datetime=end_datetime, interval=interval
|
||||
|
@@ -110,12 +110,8 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
):
|
||||
"""Initialize the optimization problem with the required parameters."""
|
||||
self.opti_param: dict[str, Any] = {}
|
||||
self.fixed_eauto_hours = (
|
||||
self.config.prediction.prediction_hours - self.config.optimization.optimization_hours
|
||||
)
|
||||
self.possible_charge_values = (
|
||||
self.config.optimization.optimization_ev_available_charge_rates_percent
|
||||
)
|
||||
self.fixed_eauto_hours = self.config.prediction.hours - self.config.optimization.hours
|
||||
self.possible_charge_values = self.config.optimization.ev_available_charge_rates_percent
|
||||
self.verbose = verbose
|
||||
self.fix_seed = fixed_seed
|
||||
self.optimize_ev = True
|
||||
@@ -182,27 +178,25 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
total_states = 3 * len_ac
|
||||
|
||||
# 1. Mutating the charge_discharge part
|
||||
charge_discharge_part = individual[: self.config.prediction.prediction_hours]
|
||||
charge_discharge_part = individual[: self.config.prediction.hours]
|
||||
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
||||
|
||||
# Instead of a fixed clamping to 0..8 or 0..6 dynamically:
|
||||
charge_discharge_mutated = np.clip(charge_discharge_mutated, 0, total_states - 1)
|
||||
individual[: self.config.prediction.prediction_hours] = charge_discharge_mutated
|
||||
individual[: self.config.prediction.hours] = charge_discharge_mutated
|
||||
|
||||
# 2. Mutating the EV charge part, if active
|
||||
if self.optimize_ev:
|
||||
ev_charge_part = individual[
|
||||
self.config.prediction.prediction_hours : self.config.prediction.prediction_hours
|
||||
* 2
|
||||
self.config.prediction.hours : self.config.prediction.hours * 2
|
||||
]
|
||||
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
||||
ev_charge_part_mutated[
|
||||
self.config.prediction.prediction_hours - self.fixed_eauto_hours :
|
||||
] = [0] * self.fixed_eauto_hours
|
||||
individual[
|
||||
self.config.prediction.prediction_hours : self.config.prediction.prediction_hours
|
||||
* 2
|
||||
] = ev_charge_part_mutated
|
||||
ev_charge_part_mutated[self.config.prediction.hours - self.fixed_eauto_hours :] = [
|
||||
0
|
||||
] * self.fixed_eauto_hours
|
||||
individual[self.config.prediction.hours : self.config.prediction.hours * 2] = (
|
||||
ev_charge_part_mutated
|
||||
)
|
||||
|
||||
# 3. Mutating the appliance start time, if applicable
|
||||
if self.opti_param["home_appliance"] > 0:
|
||||
@@ -216,15 +210,13 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
def create_individual(self) -> list[int]:
|
||||
# Start with discharge states for the individual
|
||||
individual_components = [
|
||||
self.toolbox.attr_discharge_state()
|
||||
for _ in range(self.config.prediction.prediction_hours)
|
||||
self.toolbox.attr_discharge_state() for _ in range(self.config.prediction.hours)
|
||||
]
|
||||
|
||||
# Add EV charge index values if optimize_ev is True
|
||||
if self.optimize_ev:
|
||||
individual_components += [
|
||||
self.toolbox.attr_ev_charge_index()
|
||||
for _ in range(self.config.prediction.prediction_hours)
|
||||
self.toolbox.attr_ev_charge_index() for _ in range(self.config.prediction.hours)
|
||||
]
|
||||
|
||||
# Add the start time of the household appliance if it's being optimized
|
||||
@@ -257,7 +249,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
individual.extend(eautocharge_hours_index.tolist())
|
||||
elif self.optimize_ev:
|
||||
# Falls optimize_ev aktiv ist, aber keine EV-Daten vorhanden sind, fügen wir Nullen hinzu
|
||||
individual.extend([0] * self.config.prediction.prediction_hours)
|
||||
individual.extend([0] * self.config.prediction.hours)
|
||||
|
||||
# Add dishwasher start time if applicable
|
||||
if self.opti_param.get("home_appliance", 0) > 0 and washingstart_int is not None:
|
||||
@@ -279,17 +271,12 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
3. Dishwasher start time (integer if applicable).
|
||||
"""
|
||||
# Discharge hours as a NumPy array of ints
|
||||
discharge_hours_bin = np.array(
|
||||
individual[: self.config.prediction.prediction_hours], dtype=int
|
||||
)
|
||||
discharge_hours_bin = np.array(individual[: self.config.prediction.hours], dtype=int)
|
||||
|
||||
# EV charge hours as a NumPy array of ints (if optimize_ev is True)
|
||||
eautocharge_hours_index = (
|
||||
np.array(
|
||||
individual[
|
||||
self.config.prediction.prediction_hours : self.config.prediction.prediction_hours
|
||||
* 2
|
||||
],
|
||||
individual[self.config.prediction.hours : self.config.prediction.hours * 2],
|
||||
dtype=int,
|
||||
)
|
||||
if self.optimize_ev
|
||||
@@ -401,7 +388,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
)
|
||||
self.ems.set_ev_charge_hours(eautocharge_hours_float)
|
||||
else:
|
||||
self.ems.set_ev_charge_hours(np.full(self.config.prediction.prediction_hours, 0))
|
||||
self.ems.set_ev_charge_hours(np.full(self.config.prediction.hours, 0))
|
||||
|
||||
return self.ems.simulate(self.ems.start_datetime.hour)
|
||||
|
||||
@@ -463,7 +450,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
# min_length = min(battery_soc_per_hour.size, discharge_hours_bin.size)
|
||||
# battery_soc_per_hour_tail = battery_soc_per_hour[-min_length:]
|
||||
# discharge_hours_bin_tail = discharge_hours_bin[-min_length:]
|
||||
# len_ac = len(self.config.optimization.optimization_ev_available_charge_rates_percent)
|
||||
# len_ac = len(self.config.optimization.ev_available_charge_rates_percent)
|
||||
|
||||
# # # Find hours where battery SoC is 0
|
||||
# # zero_soc_mask = battery_soc_per_hour_tail == 0
|
||||
@@ -512,7 +499,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
if parameters.eauto and self.ems.ev
|
||||
else 0
|
||||
)
|
||||
* self.config.optimization.optimization_penalty,
|
||||
* self.config.optimization.penalty,
|
||||
)
|
||||
|
||||
return (gesamtbilanz,)
|
||||
@@ -580,7 +567,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
start_hour = self.ems.start_datetime.hour
|
||||
|
||||
einspeiseverguetung_euro_pro_wh = np.full(
|
||||
self.config.prediction.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
||||
self.config.prediction.hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
||||
)
|
||||
|
||||
# TODO: Refactor device setup phase out
|
||||
@@ -591,7 +578,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
if parameters.pv_akku:
|
||||
akku = Battery(parameters.pv_akku)
|
||||
self.devices.add_device(akku)
|
||||
akku.set_charge_per_hour(np.full(self.config.prediction.prediction_hours, 1))
|
||||
akku.set_charge_per_hour(np.full(self.config.prediction.hours, 1))
|
||||
|
||||
eauto: Optional[Battery] = None
|
||||
if parameters.eauto:
|
||||
@@ -599,7 +586,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
||||
parameters.eauto,
|
||||
)
|
||||
self.devices.add_device(eauto)
|
||||
eauto.set_charge_per_hour(np.full(self.config.prediction.prediction_hours, 1))
|
||||
eauto.set_charge_per_hour(np.full(self.config.prediction.hours, 1))
|
||||
self.optimize_ev = (
|
||||
parameters.eauto.min_soc_percentage - parameters.eauto.initial_soc_percentage >= 0
|
||||
)
|
||||
|
@@ -12,18 +12,16 @@ class OptimizationCommonSettings(SettingsBaseModel):
|
||||
"""General Optimization Configuration.
|
||||
|
||||
Attributes:
|
||||
optimization_hours (int): Number of hours for optimizations.
|
||||
hours (int): Number of hours for optimizations.
|
||||
"""
|
||||
|
||||
optimization_hours: Optional[int] = Field(
|
||||
hours: Optional[int] = Field(
|
||||
default=48, ge=0, description="Number of hours into the future for optimizations."
|
||||
)
|
||||
|
||||
optimization_penalty: Optional[int] = Field(
|
||||
default=10, description="Penalty factor used in optimization."
|
||||
)
|
||||
penalty: Optional[int] = Field(default=10, description="Penalty factor used in optimization.")
|
||||
|
||||
optimization_ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
||||
ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
||||
default=[
|
||||
0.0,
|
||||
6.0 / 16.0,
|
||||
|
@@ -9,12 +9,12 @@ from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettin
|
||||
class ElecPriceCommonSettings(SettingsBaseModel):
|
||||
"""Electricity Price Prediction Configuration."""
|
||||
|
||||
elecprice_provider: Optional[str] = Field(
|
||||
provider: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Electricity price provider id of provider to be used.",
|
||||
examples=["ElecPriceAkkudoktor"],
|
||||
)
|
||||
elecprice_charges_kwh: Optional[float] = Field(
|
||||
charges_kwh: Optional[float] = Field(
|
||||
default=None, ge=0, description="Electricity price charges (€/kWh).", examples=[0.21]
|
||||
)
|
||||
|
||||
|
@@ -49,15 +49,15 @@ class ElecPriceProvider(PredictionProvider):
|
||||
electricity price_provider (str): Prediction provider for electricity price.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||
calculated based on `start_datetime` and `prediction_hours`.
|
||||
calculated based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||
based on `start_datetime` and `prediction_historic_hours`.
|
||||
based on `start_datetime` and `historic_hours`.
|
||||
"""
|
||||
|
||||
# overload
|
||||
@@ -71,4 +71,4 @@ class ElecPriceProvider(PredictionProvider):
|
||||
return "ElecPriceProvider"
|
||||
|
||||
def enabled(self) -> bool:
|
||||
return self.provider_id() == self.config.elecprice.elecprice_provider
|
||||
return self.provider_id() == self.config.elecprice.provider
|
||||
|
@@ -54,11 +54,11 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||
of hours into the future and retains historical data.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
||||
hours (int, optional): Number of hours in the future for the forecast.
|
||||
historic_hours (int, optional): Number of past hours for retaining data.
|
||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||
|
||||
Methods:
|
||||
provider_id(): Returns a unique identifier for the provider.
|
||||
@@ -125,18 +125,16 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
||||
return capped_data
|
||||
|
||||
def _predict_ets(
|
||||
self, history: np.ndarray, seasonal_periods: int, prediction_hours: int
|
||||
) -> np.ndarray:
|
||||
def _predict_ets(self, history: np.ndarray, seasonal_periods: int, hours: int) -> np.ndarray:
|
||||
clean_history = self._cap_outliers(history)
|
||||
model = ExponentialSmoothing(
|
||||
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
||||
).fit()
|
||||
return model.forecast(prediction_hours)
|
||||
return model.forecast(hours)
|
||||
|
||||
def _predict_median(self, history: np.ndarray, prediction_hours: int) -> np.ndarray:
|
||||
def _predict_median(self, history: np.ndarray, hours: int) -> np.ndarray:
|
||||
clean_history = self._cap_outliers(history)
|
||||
return np.full(prediction_hours, np.median(clean_history))
|
||||
return np.full(hours, np.median(clean_history))
|
||||
|
||||
def _update_data(
|
||||
self, force_update: Optional[bool] = False
|
||||
@@ -155,8 +153,8 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||
# Assumption that all lists are the same length and are ordered chronologically
|
||||
# in ascending order and have the same timestamps.
|
||||
|
||||
# Get elecprice_charges_kwh in wh
|
||||
charges_wh = (self.config.elecprice.elecprice_charges_kwh or 0) / 1000
|
||||
# Get charges_kwh in wh
|
||||
charges_wh = (self.config.elecprice.charges_kwh or 0) / 1000
|
||||
|
||||
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
||||
series_data = pd.Series(dtype=float) # Initialize an empty series
|
||||
@@ -183,27 +181,23 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||
assert highest_orig_datetime # mypy fix
|
||||
|
||||
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
||||
needed_prediction_hours = int(
|
||||
self.config.prediction.prediction_hours
|
||||
needed_hours = int(
|
||||
self.config.prediction.hours
|
||||
- ((highest_orig_datetime - self.start_datetime).total_seconds() // 3600)
|
||||
)
|
||||
|
||||
if needed_prediction_hours <= 0:
|
||||
if needed_hours <= 0:
|
||||
logger.warning(
|
||||
f"No prediction needed. needed_prediction_hours={needed_prediction_hours}, prediction_hours={self.config.prediction.prediction_hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.start_datetime}"
|
||||
) # this might keep data longer than self.start_datetime + self.config.prediction.prediction_hours in the records
|
||||
f"No prediction needed. needed_hours={needed_hours}, hours={self.config.prediction.hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.start_datetime}"
|
||||
) # this might keep data longer than self.start_datetime + self.config.prediction.hours in the records
|
||||
return
|
||||
|
||||
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
||||
prediction = self._predict_ets(
|
||||
history, seasonal_periods=168, prediction_hours=needed_prediction_hours
|
||||
)
|
||||
prediction = self._predict_ets(history, seasonal_periods=168, hours=needed_hours)
|
||||
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
||||
prediction = self._predict_ets(
|
||||
history, seasonal_periods=24, prediction_hours=needed_prediction_hours
|
||||
)
|
||||
prediction = self._predict_ets(history, seasonal_periods=24, hours=needed_hours)
|
||||
elif amount_datasets > 0: # not enough data for ets, do median
|
||||
prediction = self._predict_median(history, prediction_hours=needed_prediction_hours)
|
||||
prediction = self._predict_median(history, hours=needed_hours)
|
||||
else:
|
||||
logger.error("No data available for prediction")
|
||||
raise ValueError("No data available")
|
||||
|
@@ -22,24 +22,22 @@ logger = get_logger(__name__)
|
||||
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for elecprice data import from file or JSON String."""
|
||||
|
||||
elecpriceimport_file_path: Optional[Union[str, Path]] = Field(
|
||||
import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None,
|
||||
description="Path to the file to import elecprice data from.",
|
||||
examples=[None, "/path/to/prices.json"],
|
||||
)
|
||||
|
||||
elecpriceimport_json: Optional[str] = Field(
|
||||
import_json: Optional[str] = Field(
|
||||
default=None,
|
||||
description="JSON string, dictionary of electricity price forecast value lists.",
|
||||
examples=['{"elecprice_marketprice_wh": [0.0003384, 0.0003318, 0.0003284]}'],
|
||||
)
|
||||
|
||||
# Validators
|
||||
@field_validator("elecpriceimport_file_path", mode="after")
|
||||
@field_validator("import_file_path", mode="after")
|
||||
@classmethod
|
||||
def validate_elecpriceimport_file_path(
|
||||
cls, value: Optional[Union[str, Path]]
|
||||
) -> Optional[Path]:
|
||||
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, str):
|
||||
@@ -65,12 +63,12 @@ class ElecPriceImport(ElecPriceProvider, PredictionImportProvider):
|
||||
return "ElecPriceImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
if self.config.elecprice.provider_settings.elecpriceimport_file_path is not None:
|
||||
if self.config.elecprice.provider_settings.import_file_path is not None:
|
||||
self.import_from_file(
|
||||
self.config.elecprice.provider_settings.elecpriceimport_file_path,
|
||||
self.config.elecprice.provider_settings.import_file_path,
|
||||
key_prefix="elecprice",
|
||||
)
|
||||
if self.config.elecprice.provider_settings.elecpriceimport_json is not None:
|
||||
if self.config.elecprice.provider_settings.import_json is not None:
|
||||
self.import_from_json(
|
||||
self.config.elecprice.provider_settings.elecpriceimport_json, key_prefix="elecprice"
|
||||
self.config.elecprice.provider_settings.import_json, key_prefix="elecprice"
|
||||
)
|
||||
|
@@ -15,7 +15,7 @@ logger = get_logger(__name__)
|
||||
class LoadCommonSettings(SettingsBaseModel):
|
||||
"""Load Prediction Configuration."""
|
||||
|
||||
load_provider: Optional[str] = Field(
|
||||
provider: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Load provider id of provider to be used.",
|
||||
examples=["LoadAkkudoktor"],
|
||||
|
@@ -33,18 +33,18 @@ class LoadProvider(PredictionProvider):
|
||||
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||
|
||||
Configuration variables:
|
||||
load_provider (str): Prediction provider for load.
|
||||
provider (str): Prediction provider for load.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||
calculated based on `start_datetime` and `prediction_hours`.
|
||||
calculated based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||
based on `start_datetime` and `prediction_historic_hours`.
|
||||
based on `start_datetime` and `historic_hours`.
|
||||
"""
|
||||
|
||||
# overload
|
||||
@@ -58,4 +58,4 @@ class LoadProvider(PredictionProvider):
|
||||
return "LoadProvider"
|
||||
|
||||
def enabled(self) -> bool:
|
||||
return self.provider_id() == self.config.load.load_provider
|
||||
return self.provider_id() == self.config.load.provider
|
||||
|
@@ -111,7 +111,7 @@ class LoadAkkudoktor(LoadProvider):
|
||||
# We provide prediction starting at start of day, to be compatible to old system.
|
||||
# End date for prediction is prediction hours from now.
|
||||
date = self.start_datetime.start_of("day")
|
||||
end_date = self.start_datetime.add(hours=self.config.prediction.prediction_hours)
|
||||
end_date = self.start_datetime.add(hours=self.config.prediction.hours)
|
||||
while compare_datetimes(date, end_date).lt:
|
||||
# Extract mean (index 0) and standard deviation (index 1) for the given day and hour
|
||||
# Day indexing starts at 0, -1 because of that
|
||||
|
@@ -22,19 +22,19 @@ logger = get_logger(__name__)
|
||||
class LoadImportCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for load data import from file or JSON string."""
|
||||
|
||||
load_import_file_path: Optional[Union[str, Path]] = Field(
|
||||
import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None,
|
||||
description="Path to the file to import load data from.",
|
||||
examples=[None, "/path/to/yearly_load.json"],
|
||||
)
|
||||
load_import_json: Optional[str] = Field(
|
||||
import_json: Optional[str] = Field(
|
||||
default=None,
|
||||
description="JSON string, dictionary of load forecast value lists.",
|
||||
examples=['{"load0_mean": [676.71, 876.19, 527.13]}'],
|
||||
)
|
||||
|
||||
# Validators
|
||||
@field_validator("load_import_file_path", mode="after")
|
||||
@field_validator("import_file_path", mode="after")
|
||||
@classmethod
|
||||
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||
if value is None:
|
||||
@@ -62,11 +62,7 @@ class LoadImport(LoadProvider, PredictionImportProvider):
|
||||
return "LoadImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
if self.config.load.provider_settings.load_import_file_path is not None:
|
||||
self.import_from_file(
|
||||
self.config.provider_settings.load_import_file_path, key_prefix="load"
|
||||
)
|
||||
if self.config.load.provider_settings.load_import_json is not None:
|
||||
self.import_from_json(
|
||||
self.config.load.provider_settings.load_import_json, key_prefix="load"
|
||||
)
|
||||
if self.config.load.provider_settings.import_file_path is not None:
|
||||
self.import_from_file(self.config.provider_settings.import_file_path, key_prefix="load")
|
||||
if self.config.load.provider_settings.import_json is not None:
|
||||
self.import_from_json(self.config.load.provider_settings.import_json, key_prefix="load")
|
||||
|
@@ -53,9 +53,9 @@ class PredictionCommonSettings(SettingsBaseModel):
|
||||
determines the time zone based on latitude and longitude.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (Optional[int]): Number of hours into the future for predictions.
|
||||
hours (Optional[int]): Number of hours into the future for predictions.
|
||||
Must be non-negative.
|
||||
prediction_historic_hours (Optional[int]): Number of hours into the past for historical data.
|
||||
historic_hours (Optional[int]): Number of hours into the past for historical data.
|
||||
Must be non-negative.
|
||||
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
||||
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
||||
@@ -65,16 +65,16 @@ class PredictionCommonSettings(SettingsBaseModel):
|
||||
and longitude.
|
||||
|
||||
Validators:
|
||||
validate_prediction_hours (int): Ensures `prediction_hours` is a non-negative integer.
|
||||
validate_prediction_historic_hours (int): Ensures `prediction_historic_hours` is a non-negative integer.
|
||||
validate_hours (int): Ensures `hours` is a non-negative integer.
|
||||
validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.
|
||||
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
||||
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
||||
"""
|
||||
|
||||
prediction_hours: Optional[int] = Field(
|
||||
hours: Optional[int] = Field(
|
||||
default=48, ge=0, description="Number of hours into the future for predictions"
|
||||
)
|
||||
prediction_historic_hours: Optional[int] = Field(
|
||||
historic_hours: Optional[int] = Field(
|
||||
default=48,
|
||||
ge=0,
|
||||
description="Number of hours into the past for historical predictions data",
|
||||
|
@@ -114,16 +114,16 @@ class PredictionStartEndKeepMixin(PredictionBase):
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def end_datetime(self) -> Optional[DateTime]:
|
||||
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
||||
"""Compute the end datetime based on the `start_datetime` and `hours`.
|
||||
|
||||
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||
|
||||
Returns:
|
||||
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||
"""
|
||||
if self.start_datetime and self.config.prediction.prediction_hours:
|
||||
if self.start_datetime and self.config.prediction.hours:
|
||||
end_datetime = self.start_datetime + to_duration(
|
||||
f"{self.config.prediction.prediction_hours} hours"
|
||||
f"{self.config.prediction.hours} hours"
|
||||
)
|
||||
dst_change = end_datetime.offset_hours - self.start_datetime.offset_hours
|
||||
logger.debug(f"Pre: {self.start_datetime}..{end_datetime}: DST change: {dst_change}")
|
||||
@@ -147,10 +147,10 @@ class PredictionStartEndKeepMixin(PredictionBase):
|
||||
return None
|
||||
historic_hours = self.historic_hours_min()
|
||||
if (
|
||||
self.config.prediction.prediction_historic_hours
|
||||
and self.config.prediction.prediction_historic_hours > historic_hours
|
||||
self.config.prediction.historic_hours
|
||||
and self.config.prediction.historic_hours > historic_hours
|
||||
):
|
||||
historic_hours = int(self.config.prediction.prediction_historic_hours)
|
||||
historic_hours = int(self.config.prediction.historic_hours)
|
||||
return self.start_datetime - to_duration(f"{historic_hours} hours")
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
|
@@ -19,7 +19,7 @@ class PVForecastCommonSettings(SettingsBaseModel):
|
||||
# Inverter Parameters
|
||||
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/inverter.html
|
||||
|
||||
pvforecast_provider: Optional[str] = Field(
|
||||
provider: Optional[str] = Field(
|
||||
default=None,
|
||||
description="PVForecast provider id of provider to be used.",
|
||||
examples=["PVForecastAkkudoktor"],
|
||||
|
@@ -28,18 +28,18 @@ class PVForecastProvider(PredictionProvider):
|
||||
PVForecastProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||
|
||||
Configuration variables:
|
||||
pvforecast_provider (str): Prediction provider for pvforecast.
|
||||
provider (str): Prediction provider for pvforecast.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||
start_datetime (datetime, optional): The starting datetime for predictions (inlcusive), defaults to the current datetime if unspecified.
|
||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range (exclusive),
|
||||
calculated based on `start_datetime` and `prediction_hours`.
|
||||
calculated based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data (inclusive), calculated
|
||||
based on `start_datetime` and `prediction_historic_hours`.
|
||||
based on `start_datetime` and `historic_hours`.
|
||||
"""
|
||||
|
||||
# overload
|
||||
@@ -54,6 +54,6 @@ class PVForecastProvider(PredictionProvider):
|
||||
|
||||
def enabled(self) -> bool:
|
||||
logger.debug(
|
||||
f"PVForecastProvider ID {self.provider_id()} vs. config {self.config.pvforecast.pvforecast_provider}"
|
||||
f"PVForecastProvider ID {self.provider_id()} vs. config {self.config.pvforecast.provider}"
|
||||
)
|
||||
return self.provider_id() == self.config.pvforecast.pvforecast_provider
|
||||
return self.provider_id() == self.config.pvforecast.provider
|
||||
|
@@ -14,21 +14,25 @@ Classes:
|
||||
Example:
|
||||
# Set up the configuration with necessary fields for URL generation
|
||||
settings_data = {
|
||||
"prediction_hours": 48,
|
||||
"prediction_historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
"pvforecast_provider": "Akkudoktor",
|
||||
"pvforecast0_peakpower": 5.0,
|
||||
"pvforecast0_surface_azimuth": -10,
|
||||
"pvforecast0_surface_tilt": 7,
|
||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||
"pvforecast0_inverter_paco": 10000,
|
||||
"pvforecast1_peakpower": 4.8,
|
||||
"pvforecast1_surface_azimuth": -90,
|
||||
"pvforecast1_surface_tilt": 7,
|
||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||
"pvforecast1_inverter_paco": 10000,
|
||||
"prediction": {
|
||||
"hours": 48,
|
||||
"historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
},
|
||||
"pvforecast": {
|
||||
"provider": "PVForecastAkkudoktor",
|
||||
"pvforecast0_peakpower": 5.0,
|
||||
"pvforecast0_surface_azimuth": -10,
|
||||
"pvforecast0_surface_tilt": 7,
|
||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||
"pvforecast0_inverter_paco": 10000,
|
||||
"pvforecast1_peakpower": 4.8,
|
||||
"pvforecast1_surface_azimuth": -90,
|
||||
"pvforecast1_surface_tilt": 7,
|
||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||
"pvforecast1_inverter_paco": 10000,
|
||||
}
|
||||
}
|
||||
|
||||
# Create the config instance from the provided data
|
||||
@@ -47,12 +51,12 @@ Example:
|
||||
print(forecast.report_ac_power_and_measurement())
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int): Number of hours into the future to forecast. Default is 48.
|
||||
prediction_historic_hours (int): Number of past hours to retain for analysis. Default is 24.
|
||||
hours (int): Number of hours into the future to forecast. Default is 48.
|
||||
historic_hours (int): Number of past hours to retain for analysis. Default is 24.
|
||||
latitude (float): Latitude for the forecast location.
|
||||
longitude (float): Longitude for the forecast location.
|
||||
start_datetime (datetime): Start time for the forecast, defaulting to current datetime.
|
||||
end_datetime (datetime): Computed end datetime based on `start_datetime` and `prediction_hours`.
|
||||
end_datetime (datetime): Computed end datetime based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime): Computed threshold datetime for retaining historical data.
|
||||
|
||||
Methods:
|
||||
@@ -159,13 +163,13 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
||||
of hours into the future and retains historical data.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
||||
hours (int, optional): Number of hours in the future for the forecast.
|
||||
historic_hours (int, optional): Number of past hours for retaining data.
|
||||
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||
|
||||
Methods:
|
||||
provider_id(): Returns a unique identifier for the provider.
|
||||
@@ -286,10 +290,10 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
||||
|
||||
# Assumption that all lists are the same length and are ordered chronologically
|
||||
# in ascending order and have the same timestamps.
|
||||
if len(akkudoktor_data.values[0]) < self.config.prediction.prediction_hours:
|
||||
if len(akkudoktor_data.values[0]) < self.config.prediction.hours:
|
||||
# Expect one value set per prediction hour
|
||||
error_msg = (
|
||||
f"The forecast must cover at least {self.config.prediction.prediction_hours} hours, "
|
||||
f"The forecast must cover at least {self.config.prediction.hours} hours, "
|
||||
f"but only {len(akkudoktor_data.values[0])} data sets are given in forecast data."
|
||||
)
|
||||
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||
@@ -318,9 +322,9 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
||||
|
||||
self.update_value(dt, data)
|
||||
|
||||
if len(self) < self.config.prediction.prediction_hours:
|
||||
if len(self) < self.config.prediction.hours:
|
||||
raise ValueError(
|
||||
f"The forecast must cover at least {self.config.prediction.prediction_hours} hours, "
|
||||
f"The forecast must cover at least {self.config.prediction.hours} hours, "
|
||||
f"but only {len(self)} hours starting from {self.start_datetime} "
|
||||
f"were predicted."
|
||||
)
|
||||
@@ -370,13 +374,13 @@ if __name__ == "__main__":
|
||||
# Set up the configuration with necessary fields for URL generation
|
||||
settings_data = {
|
||||
"prediction": {
|
||||
"prediction_hours": 48,
|
||||
"prediction_historic_hours": 24,
|
||||
"hours": 48,
|
||||
"historic_hours": 24,
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
},
|
||||
"pvforecast": {
|
||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||
"provider": "PVForecastAkkudoktor",
|
||||
"pvforecast0_peakpower": 5.0,
|
||||
"pvforecast0_surface_azimuth": -10,
|
||||
"pvforecast0_surface_tilt": 7,
|
||||
|
@@ -11,7 +11,7 @@ from akkudoktoreos.prediction.weatherimport import WeatherImportCommonSettings
|
||||
class WeatherCommonSettings(SettingsBaseModel):
|
||||
"""Weather Forecast Configuration."""
|
||||
|
||||
weather_provider: Optional[str] = Field(
|
||||
provider: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Weather provider id of provider to be used.",
|
||||
examples=["WeatherImport"],
|
||||
|
@@ -101,18 +101,18 @@ class WeatherProvider(PredictionProvider):
|
||||
WeatherProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||
|
||||
Configuration variables:
|
||||
weather_provider (str): Prediction provider for weather.
|
||||
provider (str): Prediction provider for weather.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||
calculated based on `start_datetime` and `prediction_hours`.
|
||||
calculated based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||
based on `start_datetime` and `prediction_historic_hours`.
|
||||
based on `start_datetime` and `historic_hours`.
|
||||
"""
|
||||
|
||||
# overload
|
||||
@@ -126,7 +126,7 @@ class WeatherProvider(PredictionProvider):
|
||||
return "WeatherProvider"
|
||||
|
||||
def enabled(self) -> bool:
|
||||
return self.provider_id() == self.config.weather.weather_provider
|
||||
return self.provider_id() == self.config.weather.provider
|
||||
|
||||
@classmethod
|
||||
def estimate_irradiance_from_cloud_cover(
|
||||
|
@@ -62,13 +62,13 @@ class WeatherBrightSky(WeatherProvider):
|
||||
of hours into the future and retains historical data.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
||||
hours (int, optional): Number of hours in the future for the forecast.
|
||||
historic_hours (int, optional): Number of past hours for retaining data.
|
||||
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||
|
||||
Methods:
|
||||
provider_id(): Returns a unique identifier for the provider.
|
||||
|
@@ -68,15 +68,15 @@ class WeatherClearOutside(WeatherProvider):
|
||||
WeatherClearOutside is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||
calculated based on `start_datetime` and `prediction_hours`.
|
||||
calculated based on `start_datetime` and `hours`.
|
||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||
based on `start_datetime` and `prediction_historic_hours`.
|
||||
based on `start_datetime` and `historic_hours`.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
|
@@ -22,22 +22,22 @@ logger = get_logger(__name__)
|
||||
class WeatherImportCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for weather data import from file or JSON string."""
|
||||
|
||||
weatherimport_file_path: Optional[Union[str, Path]] = Field(
|
||||
import_file_path: Optional[Union[str, Path]] = Field(
|
||||
default=None,
|
||||
description="Path to the file to import weather data from.",
|
||||
examples=[None, "/path/to/weather_data.json"],
|
||||
)
|
||||
|
||||
weatherimport_json: Optional[str] = Field(
|
||||
import_json: Optional[str] = Field(
|
||||
default=None,
|
||||
description="JSON string, dictionary of weather forecast value lists.",
|
||||
examples=['{"weather_temp_air": [18.3, 17.8, 16.9]}'],
|
||||
)
|
||||
|
||||
# Validators
|
||||
@field_validator("weatherimport_file_path", mode="after")
|
||||
@field_validator("import_file_path", mode="after")
|
||||
@classmethod
|
||||
def validate_weatherimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, str):
|
||||
@@ -63,11 +63,11 @@ class WeatherImport(WeatherProvider, PredictionImportProvider):
|
||||
return "WeatherImport"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
if self.config.weather.provider_settings.weatherimport_file_path is not None:
|
||||
if self.config.weather.provider_settings.import_file_path is not None:
|
||||
self.import_from_file(
|
||||
self.config.weather.provider_settings.weatherimport_file_path, key_prefix="weather"
|
||||
self.config.weather.provider_settings.import_file_path, key_prefix="weather"
|
||||
)
|
||||
if self.config.weather.provider_settings.weatherimport_json is not None:
|
||||
if self.config.weather.provider_settings.import_json is not None:
|
||||
self.import_from_json(
|
||||
self.config.weather.provider_settings.weatherimport_json, key_prefix="weather"
|
||||
self.config.weather.provider_settings.import_json, key_prefix="weather"
|
||||
)
|
||||
|
@@ -33,6 +33,7 @@ from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
||||
from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||
from akkudoktoreos.prediction.prediction import PredictionCommonSettings, get_prediction
|
||||
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -152,20 +153,16 @@ def start_eosdash() -> subprocess.Popen:
|
||||
|
||||
if args is None:
|
||||
# No command line arguments
|
||||
host = config_eos.server.server_eosdash_host
|
||||
port = config_eos.server.server_eosdash_port
|
||||
eos_host = config_eos.server.server_eos_host
|
||||
eos_port = config_eos.server.server_eos_port
|
||||
host = config_eos.server.eosdash_host
|
||||
port = config_eos.server.eosdash_port
|
||||
eos_host = config_eos.server.host
|
||||
eos_port = config_eos.server.port
|
||||
log_level = "info"
|
||||
access_log = False
|
||||
reload = False
|
||||
else:
|
||||
host = args.host
|
||||
port = (
|
||||
config_eos.server.server_eosdash_port
|
||||
if config_eos.server.server_eosdash_port
|
||||
else (args.port + 1)
|
||||
)
|
||||
port = config_eos.server.eosdash_port if config_eos.server.eosdash_port else (args.port + 1)
|
||||
eos_host = args.host
|
||||
eos_port = args.port
|
||||
log_level = args.log_level
|
||||
@@ -208,7 +205,7 @@ def start_eosdash() -> subprocess.Popen:
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
"""Lifespan manager for the app."""
|
||||
# On startup
|
||||
if config_eos.server.server_eos_startup_eosdash:
|
||||
if config_eos.server.startup_eosdash:
|
||||
try:
|
||||
eosdash_process = start_eosdash()
|
||||
except Exception as e:
|
||||
@@ -235,7 +232,7 @@ app = FastAPI(
|
||||
|
||||
|
||||
# That's the problem
|
||||
opt_class = optimization_problem(verbose=bool(config_eos.server.server_eos_verbose))
|
||||
opt_class = optimization_problem(verbose=bool(config_eos.server.verbose))
|
||||
|
||||
server_dir = Path(__file__).parent.resolve()
|
||||
|
||||
@@ -610,7 +607,7 @@ def fastapi_strompreis() -> list[float]:
|
||||
Electricity price charges are added.
|
||||
|
||||
Note:
|
||||
Set ElecPriceAkkudoktor as elecprice_provider, then update data with
|
||||
Set ElecPriceAkkudoktor as provider, then update data with
|
||||
'/v1/prediction/update'
|
||||
and then request data with
|
||||
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
||||
@@ -618,7 +615,7 @@ def fastapi_strompreis() -> list[float]:
|
||||
"""
|
||||
settings = SettingsEOS(
|
||||
elecprice=ElecPriceCommonSettings(
|
||||
elecprice_provider="ElecPriceAkkudoktor",
|
||||
provider="ElecPriceAkkudoktor",
|
||||
)
|
||||
)
|
||||
config_eos.merge_settings(settings=settings)
|
||||
@@ -670,10 +667,10 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
||||
"""
|
||||
settings = SettingsEOS(
|
||||
prediction=PredictionCommonSettings(
|
||||
prediction_hours=request.hours,
|
||||
hours=request.hours,
|
||||
),
|
||||
load=LoadCommonSettings(
|
||||
load_provider="LoadAkkudoktor",
|
||||
provider="LoadAkkudoktor",
|
||||
provider_settings=LoadAkkudoktorCommonSettings(
|
||||
loadakkudoktor_year_energy=request.year_energy,
|
||||
),
|
||||
@@ -684,7 +681,7 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
||||
|
||||
# Insert measured data into EOS measurement
|
||||
# Convert from energy per interval to dummy energy meter readings
|
||||
measurement_key = "measurement_load0_mr"
|
||||
measurement_key = "load0_mr"
|
||||
measurement_eos.key_delete_by_datetime(key=measurement_key) # delete all load0_mr measurements
|
||||
energy = {}
|
||||
try:
|
||||
@@ -747,14 +744,14 @@ def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
||||
year_energy (float): Yearly energy consumption in Wh.
|
||||
|
||||
Note:
|
||||
Set LoadAkkudoktor as load_provider, then update data with
|
||||
Set LoadAkkudoktor as provider, then update data with
|
||||
'/v1/prediction/update'
|
||||
and then request data with
|
||||
'/v1/prediction/list?key=load_mean' instead.
|
||||
"""
|
||||
settings = SettingsEOS(
|
||||
load=LoadCommonSettings(
|
||||
load_provider="LoadAkkudoktor",
|
||||
provider="LoadAkkudoktor",
|
||||
provider_settings=LoadAkkudoktorCommonSettings(
|
||||
loadakkudoktor_year_energy=year_energy / 1000, # Convert to kWh
|
||||
),
|
||||
@@ -800,21 +797,25 @@ def fastapi_pvforecast() -> ForecastResponse:
|
||||
filled with the first available forecast value.
|
||||
|
||||
Note:
|
||||
Set PVForecastAkkudoktor as pvforecast_provider, then update data with
|
||||
Set PVForecastAkkudoktor as provider, then update data with
|
||||
'/v1/prediction/update'
|
||||
and then request data with
|
||||
'/v1/prediction/list?key=pvforecast_ac_power' and
|
||||
'/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.
|
||||
"""
|
||||
settings = SettingsEOS(
|
||||
elecprice_provider="PVForecastAkkudoktor",
|
||||
)
|
||||
settings = SettingsEOS(pvforecast=PVForecastCommonSettings(provider="PVForecastAkkudoktor"))
|
||||
config_eos.merge_settings(settings=settings)
|
||||
|
||||
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
||||
|
||||
# Create PV forecast
|
||||
prediction_eos.update_data(force_update=True)
|
||||
try:
|
||||
prediction_eos.update_data(force_update=True)
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Can not get the PV forecast: {e}",
|
||||
)
|
||||
|
||||
# Get the forcast starting at start of day
|
||||
start_datetime = to_datetime().start_of("day")
|
||||
@@ -901,9 +902,9 @@ async def proxy_put(request: Request, path: str) -> Response:
|
||||
|
||||
|
||||
async def proxy(request: Request, path: str) -> Union[Response | RedirectResponse | HTMLResponse]:
|
||||
if config_eos.server.server_eosdash_host and config_eos.server.server_eosdash_port:
|
||||
if config_eos.server.eosdash_host and config_eos.server.eosdash_port:
|
||||
# Proxy to EOSdash server
|
||||
url = f"http://{config_eos.server.server_eosdash_host}:{config_eos.server.server_eosdash_port}/{path}"
|
||||
url = f"http://{config_eos.server.eosdash_host}:{config_eos.server.eosdash_port}/{path}"
|
||||
headers = dict(request.headers)
|
||||
|
||||
data = await request.body()
|
||||
@@ -925,9 +926,9 @@ async def proxy(request: Request, path: str) -> Union[Response | RedirectRespons
|
||||
error_message=f"""<pre>
|
||||
EOSdash server not reachable: '{url}'
|
||||
Did you start the EOSdash server
|
||||
or set 'server_eos_startup_eosdash'?
|
||||
or set 'startup_eosdash'?
|
||||
If there is no application server intended please
|
||||
set 'server_eosdash_host' or 'server_eosdash_port' to None.
|
||||
set 'eosdash_host' or 'eosdash_port' to None.
|
||||
</pre>
|
||||
""",
|
||||
error_details=f"{e}",
|
||||
@@ -991,8 +992,8 @@ def main() -> None:
|
||||
it starts the EOS server with the specified configurations.
|
||||
|
||||
Command-line Arguments:
|
||||
--host (str): Host for the EOS server (default: value from config_eos).
|
||||
--port (int): Port for the EOS server (default: value from config_eos).
|
||||
--host (str): Host for the EOS server (default: value from config).
|
||||
--port (int): Port for the EOS server (default: value from config).
|
||||
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
||||
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
||||
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
||||
@@ -1003,13 +1004,13 @@ def main() -> None:
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
type=str,
|
||||
default=str(config_eos.server.server_eos_host),
|
||||
default=str(config_eos.server.host),
|
||||
help="Host for the EOS server (default: value from config)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=int,
|
||||
default=config_eos.server.server_eos_port,
|
||||
default=config_eos.server.port,
|
||||
help="Port for the EOS server (default: value from config)",
|
||||
)
|
||||
|
||||
@@ -1038,7 +1039,7 @@ def main() -> None:
|
||||
try:
|
||||
run_eos(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||
except:
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from functools import reduce
|
||||
from typing import Any, Union
|
||||
|
||||
@@ -165,10 +166,10 @@ def main() -> None:
|
||||
it starts the EOSdash server with the specified configurations.
|
||||
|
||||
Command-line Arguments:
|
||||
--host (str): Host for the EOSdash server (default: value from config_eos).
|
||||
--port (int): Port for the EOSdash server (default: value from config_eos).
|
||||
--eos-host (str): Host for the EOS server (default: value from config_eos).
|
||||
--eos-port (int): Port for the EOS server (default: value from config_eos).
|
||||
--host (str): Host for the EOSdash server (default: value from config).
|
||||
--port (int): Port for the EOSdash server (default: value from config).
|
||||
--eos-host (str): Host for the EOS server (default: value from config).
|
||||
--eos-port (int): Port for the EOS server (default: value from config).
|
||||
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
||||
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
||||
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
||||
@@ -179,28 +180,28 @@ def main() -> None:
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
type=str,
|
||||
default=str(config_eos.server.server_eosdash_host),
|
||||
help="Host for the EOSdash server (default: value from config_eos)",
|
||||
default=str(config_eos.server.eosdash_host),
|
||||
help="Host for the EOSdash server (default: value from config)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=int,
|
||||
default=config_eos.server.server_eosdash_port,
|
||||
help="Port for the EOSdash server (default: value from config_eos)",
|
||||
default=config_eos.server.eosdash_port,
|
||||
help="Port for the EOSdash server (default: value from config)",
|
||||
)
|
||||
|
||||
# EOS Host and port arguments with defaults from config_eos
|
||||
parser.add_argument(
|
||||
"--eos-host",
|
||||
type=str,
|
||||
default=str(config_eos.server.server_eos_host),
|
||||
help="Host for the EOS server (default: value from config_eos)",
|
||||
default=str(config_eos.server.host),
|
||||
help="Host for the EOS server (default: value from config)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--eos-port",
|
||||
type=int,
|
||||
default=config_eos.server.server_eos_port,
|
||||
help="Port for the EOS server (default: value from config_eos)",
|
||||
default=config_eos.server.port,
|
||||
help="Port for the EOS server (default: value from config)",
|
||||
)
|
||||
|
||||
# Optional arguments for log_level, access_log, and reload
|
||||
@@ -228,7 +229,7 @@ def main() -> None:
|
||||
try:
|
||||
run_eosdash(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||
except:
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -17,22 +17,18 @@ class ServerCommonSettings(SettingsBaseModel):
|
||||
To be added
|
||||
"""
|
||||
|
||||
server_eos_host: Optional[IPvAnyAddress] = Field(
|
||||
default="0.0.0.0", description="EOS server IP address."
|
||||
)
|
||||
server_eos_port: Optional[int] = Field(default=8503, description="EOS server IP port number.")
|
||||
server_eos_verbose: Optional[bool] = Field(default=False, description="Enable debug output")
|
||||
server_eos_startup_eosdash: Optional[bool] = Field(
|
||||
host: Optional[IPvAnyAddress] = Field(default="0.0.0.0", description="EOS server IP address.")
|
||||
port: Optional[int] = Field(default=8503, description="EOS server IP port number.")
|
||||
verbose: Optional[bool] = Field(default=False, description="Enable debug output")
|
||||
startup_eosdash: Optional[bool] = Field(
|
||||
default=True, description="EOS server to start EOSdash server."
|
||||
)
|
||||
server_eosdash_host: Optional[IPvAnyAddress] = Field(
|
||||
eosdash_host: Optional[IPvAnyAddress] = Field(
|
||||
default="0.0.0.0", description="EOSdash server IP address."
|
||||
)
|
||||
server_eosdash_port: Optional[int] = Field(
|
||||
default=8504, description="EOSdash server IP port number."
|
||||
)
|
||||
eosdash_port: Optional[int] = Field(default=8504, description="EOSdash server IP port number.")
|
||||
|
||||
@field_validator("server_eos_port", "server_eosdash_port")
|
||||
@field_validator("port", "eosdash_port")
|
||||
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
||||
if value is not None and not (1024 <= value <= 49151):
|
||||
raise ValueError("Server port number must be between 1024 and 49151.")
|
||||
|
@@ -57,6 +57,6 @@ class NumpyEncoder(json.JSONEncoder):
|
||||
# # Example usage
|
||||
# start_date = datetime.datetime(2024, 3, 31) # Date of the DST change
|
||||
# if ist_dst_wechsel(start_date):
|
||||
# prediction_hours = 23 # Adjust to 23 hours for DST change days
|
||||
# hours = 23 # Adjust to 23 hours for DST change days
|
||||
# else:
|
||||
# prediction_hours = 24 # Default value for days without DST change
|
||||
# hours = 24 # Default value for days without DST change
|
||||
|
Reference in New Issue
Block a user