* Mypy: Initial support

 * Add to pre-commit (currently installs own deps, could maybe changed
   to poetry venv in the future to reuse environment and don't need
   duplicated types deps).
 * Add type hints.

* Mypy: Add missing annotations
This commit is contained in:
Dominique Lasserre 2024-11-26 22:28:05 +01:00 committed by GitHub
parent 2a163569bc
commit 1163ddb4ac
31 changed files with 637 additions and 531 deletions

View File

@ -24,3 +24,11 @@ repos:
args: [--fix]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.13.0'
hooks:
- id: mypy
additional_dependencies:
- "types-requests==2.32.0.20241016"
- "pandas-stubs==2.2.3.241009"
pass_filenames: false

View File

@ -1,5 +1,5 @@
# Define the targets
.PHONY: help venv pip install dist test test-full docker-run docker-build docs read-docs clean format run run-dev
.PHONY: help venv pip install dist test test-full docker-run docker-build docs read-docs clean format mypy run run-dev
# Default target
all: help
@ -11,6 +11,7 @@ help:
@echo " pip - Install dependencies from requirements.txt."
@echo " pip-dev - Install dependencies from requirements-dev.txt."
@echo " format - Format source code."
@echo " mypy - Run mypy."
@echo " install - Install EOS in editable form (development mode) into virtual environment."
@echo " docker-run - Run entire setup on docker"
@echo " docker-build - Rebuild docker image"
@ -100,6 +101,10 @@ test-full:
format:
.venv/bin/pre-commit run --all-files
# Target to format code.
mypy:
.venv/bin/mypy
# Run entire setup on docker
docker-run:
@docker compose up --remove-orphans

View File

@ -363,17 +363,17 @@
"hours": {
"type": "integer",
"title": "Hours",
"default": "Amount of hours the simulation is done for."
"description": "Amount of hours the simulation is done for."
},
"kapazitaet_wh": {
"type": "integer",
"title": "Kapazitaet Wh",
"default": "The capacity of the EV\u2019s battery in watt-hours."
"description": "The capacity of the EV\u2019s battery in watt-hours."
},
"lade_effizienz": {
"type": "number",
"title": "Lade Effizienz",
"default": "The charging efficiency as a float."
"description": "The charging efficiency as a float."
},
"max_ladeleistung_w": {
"type": "integer",
@ -396,6 +396,9 @@
"charge_array",
"discharge_array",
"entlade_effizienz",
"hours",
"kapazitaet_wh",
"lade_effizienz",
"max_ladeleistung_w",
"soc_wh",
"start_soc_prozent"
@ -533,7 +536,14 @@
}
},
"eauto": {
"$ref": "#/components/schemas/EAutoParameters"
"anyOf": [
{
"$ref": "#/components/schemas/EAutoParameters"
},
{
"type": "null"
}
]
},
"dishwasher": {
"anyOf": [
@ -546,12 +556,19 @@
]
},
"temperature_forecast": {
"items": {
"type": "number"
},
"type": "array",
"anyOf": [
{
"items": {
"type": "number"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Temperature Forecast",
"default": "An array of floats representing the temperature forecast in degrees Celsius for different time intervals."
"description": "An array of floats representing the temperature forecast in degrees Celsius for different time intervals."
},
"start_solution": {
"anyOf": [
@ -603,11 +620,33 @@
"title": "Discharge Allowed",
"description": "Array with discharge values (1 for discharge, 0 otherwise)."
},
"eautocharge_hours_float": {
"anyOf": [
{
"items": {
"type": "number"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Eautocharge Hours Float",
"description": "TBD"
},
"result": {
"$ref": "#/components/schemas/SimulationResult"
},
"eauto_obj": {
"$ref": "#/components/schemas/EAutoResult"
"anyOf": [
{
"$ref": "#/components/schemas/EAutoResult"
},
{
"type": "null"
}
]
},
"start_solution": {
"anyOf": [
@ -642,6 +681,7 @@
"ac_charge",
"dc_charge",
"discharge_allowed",
"eautocharge_hours_float",
"result",
"eauto_obj"
],

View File

@ -72,3 +72,25 @@ convention = "google"
minversion = "8.3.3"
pythonpath = [ "src", ]
testpaths = [ "tests", ]
[tool.mypy]
files = ["src", "tests"]
exclude = "class_soc_calc\\.py$"
check_untyped_defs = true
warn_unused_ignores = true
[[tool.mypy.overrides]]
module = "akkudoktoreos.*"
disallow_untyped_defs = true
[[tool.mypy.overrides]]
module = "sklearn.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "deap.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "xprocess.*"
ignore_missing_imports = true

View File

@ -8,3 +8,6 @@ pytest==8.3.3
pytest-cov==6.0.0
pytest-xprocess==1.0.2
pre-commit
mypy==1.13.0
types-requests==2.32.0.20241016
pandas-stubs==2.2.3.241009

View File

@ -7,11 +7,8 @@ import numpy as np
from akkudoktoreos.config import get_working_dir, load_config
from akkudoktoreos.optimization.genetic import (
OptimizationParameters,
OptimizeResponse,
optimization_problem,
)
from akkudoktoreos.utils import NumpyEncoder
from akkudoktoreos.visualize import visualisiere_ergebnisse
start_hour = 0
@ -299,30 +296,4 @@ elapsed_time = end_time - start_time
print(f"Elapsed time: {elapsed_time:.4f} seconds")
ac_charge, dc_charge, discharge = (
ergebnis["ac_charge"],
ergebnis["dc_charge"],
ergebnis["discharge_allowed"],
)
visualisiere_ergebnisse(
parameters.ems.gesamtlast,
parameters.ems.pv_prognose_wh,
parameters.ems.strompreis_euro_pro_wh,
ergebnis["result"],
ac_charge,
dc_charge,
discharge,
parameters.temperature_forecast,
start_hour,
einspeiseverguetung_euro_pro_wh=np.full(
config.eos.feed_in_tariff_eur_per_wh, parameters.ems.einspeiseverguetung_euro_pro_wh
),
config=config,
)
json_data = NumpyEncoder.dumps(ergebnis)
print(json_data)
OptimizeResponse(**ergebnis)
print(ergebnis.model_dump())

View File

@ -14,7 +14,7 @@ class HomeApplianceParameters(BaseModel):
class HomeAppliance:
def __init__(self, parameters: HomeApplianceParameters, hours=None):
def __init__(self, parameters: HomeApplianceParameters, hours: int):
self.hours = hours # Total duration for which the planning is done
self.consumption_wh = (
parameters.consumption_wh
@ -22,7 +22,7 @@ class HomeAppliance:
self.duration_h = parameters.duration_h # Duration of use in hours
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
def set_starting_time(self, start_hour, global_start_hour=0):
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
"""Sets the start time of the device and generates the corresponding load curve.
:param start_hour: The hour at which the device should start.
@ -40,15 +40,15 @@ class HomeAppliance:
# Set the power for the duration of use in the load curve array
self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour
def reset(self):
def reset(self) -> None:
"""Resets the load curve."""
self.load_curve = np.zeros(self.hours)
def get_load_curve(self):
def get_load_curve(self) -> np.ndarray:
"""Returns the current load curve."""
return self.load_curve
def get_load_for_hour(self, hour):
def get_load_for_hour(self, hour: int) -> float:
"""Returns the load for a specific hour.
:param hour: The hour for which the load is queried.
@ -59,6 +59,6 @@ class HomeAppliance:
return self.load_curve[hour]
def get_latest_starting_point(self):
def get_latest_starting_point(self) -> int:
"""Returns the latest possible start time at which the device can still run completely."""
return self.hours - self.duration_h

View File

@ -13,7 +13,7 @@ Key features:
import json
import os
import shutil
from datetime import datetime, timedelta
from datetime import date, datetime, timedelta
from pathlib import Path
from typing import Any, Optional
@ -273,9 +273,7 @@ def get_working_dir() -> Path:
return working_dir
def get_start_enddate(
prediction_hours: int, startdate: Optional[datetime] = None
) -> tuple[str, str]:
def get_start_enddate(prediction_hours: int, startdate: Optional[date] = None) -> tuple[str, str]:
"""Calculate the start and end dates based on the given prediction hours and optional start date.
Args:

View File

@ -1,19 +1,21 @@
from typing import Optional
from typing import Any, Optional
import numpy as np
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, field_validator
from akkudoktoreos.utils.utils import NumpyEncoder
def max_ladeleistung_w_field(default=None):
def max_ladeleistung_w_field(default: Optional[float] = None) -> Optional[float]:
return Field(
default,
default=default,
gt=0,
description="An integer representing the charging power of the battery in watts.",
)
def start_soc_prozent_field(description: str):
return Field(0, ge=0, le=100, description=description)
def start_soc_prozent_field(description: str) -> int:
return Field(default=0, ge=0, le=100, description=description)
class BaseAkkuParameters(BaseModel):
@ -21,20 +23,23 @@ class BaseAkkuParameters(BaseModel):
gt=0, description="An integer representing the capacity of the battery in watt-hours."
)
lade_effizienz: float = Field(
0.88, gt=0, le=1, description="A float representing the charging efficiency of the battery."
default=0.88,
gt=0,
le=1,
description="A float representing the charging efficiency of the battery.",
)
entlade_effizienz: float = Field(0.88, gt=0, le=1)
entlade_effizienz: float = Field(default=0.88, gt=0, le=1)
max_ladeleistung_w: Optional[float] = max_ladeleistung_w_field()
start_soc_prozent: int = start_soc_prozent_field(
"An integer representing the state of charge of the battery at the **start** of the current hour (not the current state)."
)
min_soc_prozent: int = Field(
0,
default=0,
ge=0,
le=100,
description="An integer representing the minimum state of charge (SOC) of the battery in percentage.",
)
max_soc_prozent: int = Field(100, ge=0, le=100)
max_soc_prozent: int = Field(default=100, ge=0, le=100)
class PVAkkuParameters(BaseAkkuParameters):
@ -48,6 +53,36 @@ class EAutoParameters(BaseAkkuParameters):
)
class EAutoResult(BaseModel):
"""This object contains information related to the electric vehicle and its charging and discharging behavior."""
charge_array: list[float] = Field(
description="Indicates for each hour whether the EV is charging (`0` for no charging, `1` for charging)."
)
discharge_array: list[int] = Field(
description="Indicates for each hour whether the EV is discharging (`0` for no discharging, `1` for discharging)."
)
entlade_effizienz: float = Field(description="The discharge efficiency as a float.")
hours: int = Field(description="Amount of hours the simulation is done for.")
kapazitaet_wh: int = Field(description="The capacity of the EVs battery in watt-hours.")
lade_effizienz: float = Field(description="The charging efficiency as a float.")
max_ladeleistung_w: int = Field(description="The maximum charging power of the EV in watts.")
soc_wh: float = Field(
description="The state of charge of the battery in watt-hours at the start of the simulation."
)
start_soc_prozent: int = Field(
description="The state of charge of the battery in percentage at the start of the simulation."
)
@field_validator(
"discharge_array",
"charge_array",
mode="before",
)
def convert_numpy(cls, field: Any) -> Any:
return NumpyEncoder.convert_numpy(field)[0]
class PVAkku:
def __init__(self, parameters: BaseAkkuParameters, hours: int = 24):
# Battery capacity in Wh
@ -73,40 +108,20 @@ class PVAkku:
self.min_soc_wh = (self.min_soc_prozent / 100) * self.kapazitaet_wh
self.max_soc_wh = (self.max_soc_prozent / 100) * self.kapazitaet_wh
def to_dict(self):
def to_dict(self) -> dict[str, Any]:
return {
"kapazitaet_wh": self.kapazitaet_wh,
"start_soc_prozent": self.start_soc_prozent,
"soc_wh": self.soc_wh,
"hours": self.hours,
"discharge_array": self.discharge_array.tolist(), # Convert np.array to list
"charge_array": self.charge_array.tolist(),
"discharge_array": self.discharge_array,
"charge_array": self.charge_array,
"lade_effizienz": self.lade_effizienz,
"entlade_effizienz": self.entlade_effizienz,
"max_ladeleistung_w": self.max_ladeleistung_w,
}
@classmethod
def from_dict(cls, data):
# Create a new object with basic data
obj = cls(
kapazitaet_wh=data["kapazitaet_wh"],
hours=data["hours"],
lade_effizienz=data["lade_effizienz"],
entlade_effizienz=data["entlade_effizienz"],
max_ladeleistung_w=data["max_ladeleistung_w"],
start_soc_prozent=data["start_soc_prozent"],
)
# Set arrays
obj.discharge_array = np.array(data["discharge_array"])
obj.charge_array = np.array(data["charge_array"])
obj.soc_wh = data[
"soc_wh"
] # Set current state of charge, which may differ from start_soc_prozent
return obj
def reset(self):
def reset(self) -> None:
self.soc_wh = (self.start_soc_prozent / 100) * self.kapazitaet_wh
# Ensure soc_wh is within min and max limits
self.soc_wh = min(max(self.soc_wh, self.min_soc_wh), self.max_soc_wh)
@ -114,22 +129,22 @@ class PVAkku:
self.discharge_array = np.full(self.hours, 1)
self.charge_array = np.full(self.hours, 1)
def set_discharge_per_hour(self, discharge_array):
def set_discharge_per_hour(self, discharge_array: np.ndarray) -> None:
assert len(discharge_array) == self.hours
self.discharge_array = np.array(discharge_array)
def set_charge_per_hour(self, charge_array):
def set_charge_per_hour(self, charge_array: np.ndarray) -> None:
assert len(charge_array) == self.hours
self.charge_array = np.array(charge_array)
def set_charge_allowed_for_hour(self, charge, hour):
def set_charge_allowed_for_hour(self, charge: float, hour: int) -> None:
assert hour < self.hours
self.charge_array[hour] = charge
def ladezustand_in_prozent(self):
def ladezustand_in_prozent(self) -> float:
return (self.soc_wh / self.kapazitaet_wh) * 100
def energie_abgeben(self, wh, hour):
def energie_abgeben(self, wh: float, hour: int) -> tuple[float, float]:
if self.discharge_array[hour] == 0:
return 0.0, 0.0 # No energy discharge and no losses
@ -160,9 +175,11 @@ class PVAkku:
# Return the actually discharged energy and the losses
return tatsaechlich_abgegeben_wh, verluste_wh
def energie_laden(self, wh, hour, relative_power=0.0):
def energie_laden(
self, wh: Optional[float], hour: int, relative_power: float = 0.0
) -> tuple[float, float]:
if hour is not None and self.charge_array[hour] == 0:
return 0, 0 # Charging not allowed in this hour
return 0.0, 0.0 # Charging not allowed in this hour
if relative_power > 0.0:
wh = self.max_ladeleistung_w * relative_power
# If no value for wh is given, use the maximum charging power
@ -190,7 +207,7 @@ class PVAkku:
verluste_wh = effektive_lademenge - geladene_menge
return geladene_menge, verluste_wh
def aktueller_energieinhalt(self):
def aktueller_energieinhalt(self) -> float:
"""This method returns the current remaining energy considering efficiency.
It accounts for both charging and discharging efficiency.
@ -204,11 +221,13 @@ if __name__ == "__main__":
# Test battery discharge below min_soc
print("Test: Discharge below min_soc")
akku = PVAkku(
kapazitaet_wh=10000,
PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
)
akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")
@ -222,11 +241,13 @@ if __name__ == "__main__":
# Test battery charge above max_soc
print("\nTest: Charge above max_soc")
akku = PVAkku(
kapazitaet_wh=10000,
PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
)
akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")
@ -240,11 +261,13 @@ if __name__ == "__main__":
# Test charging when battery is at max_soc
print("\nTest: Charging when at max_soc")
akku = PVAkku(
kapazitaet_wh=10000,
PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=80,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1,
start_soc_prozent=80,
min_soc_prozent=20,
max_soc_prozent=80,
)
akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")
@ -256,11 +279,13 @@ if __name__ == "__main__":
# Test discharging when battery is at min_soc
print("\nTest: Discharging when at min_soc")
akku = PVAkku(
kapazitaet_wh=10000,
PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=20,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1,
start_soc_prozent=20,
min_soc_prozent=20,
max_soc_prozent=80,
)
akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")

View File

@ -14,7 +14,7 @@ class HomeApplianceParameters(BaseModel):
class HomeAppliance:
def __init__(self, parameters: HomeApplianceParameters, hours=None):
def __init__(self, parameters: HomeApplianceParameters, hours: int = 24):
self.hours = hours # Total duration for which the planning is done
self.consumption_wh = (
parameters.consumption_wh
@ -22,7 +22,7 @@ class HomeAppliance:
self.duration_h = parameters.duration_h # Duration of use in hours
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
def set_starting_time(self, start_hour, global_start_hour=0):
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
"""Sets the start time of the device and generates the corresponding load curve.
:param start_hour: The hour at which the device should start.
@ -40,15 +40,15 @@ class HomeAppliance:
# Set the power for the duration of use in the load curve array
self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour
def reset(self):
def reset(self) -> None:
"""Resets the load curve."""
self.load_curve = np.zeros(self.hours)
def get_load_curve(self):
def get_load_curve(self) -> np.ndarray:
"""Returns the current load curve."""
return self.load_curve
def get_load_for_hour(self, hour):
def get_load_for_hour(self, hour: int) -> float:
"""Returns the load for a specific hour.
:param hour: The hour for which the load is queried.
@ -59,6 +59,6 @@ class HomeAppliance:
return self.load_curve[hour]
def get_latest_starting_point(self):
def get_latest_starting_point(self) -> int:
"""Returns the latest possible start time at which the device can still run completely."""
return self.hours - self.duration_h

View File

@ -18,7 +18,7 @@ class Heatpump:
COP_COEFFICIENT = 0.1
"""COP increase per degree"""
def __init__(self, max_heat_output, prediction_hours):
def __init__(self, max_heat_output: int, prediction_hours: int):
self.max_heat_output = max_heat_output
self.prediction_hours = prediction_hours
self.log = logging.getLogger(__name__)

View File

@ -4,7 +4,7 @@ from akkudoktoreos.devices.battery import PVAkku
class WechselrichterParameters(BaseModel):
max_leistung_wh: float = Field(10000, gt=0)
max_leistung_wh: float = Field(default=10000, gt=0)
class Wechselrichter:
@ -14,9 +14,11 @@ class Wechselrichter:
)
self.akku = akku # Connection to a battery object
def energie_verarbeiten(self, erzeugung, verbrauch, hour):
verluste = 0 # Losses during processing
netzeinspeisung = 0 # Grid feed-in
def energie_verarbeiten(
self, erzeugung: float, verbrauch: float, hour: int
) -> tuple[float, float, float, float]:
verluste = 0.0 # Losses during processing
netzeinspeisung = 0.0 # Grid feed-in
netzbezug = 0.0 # Grid draw
eigenverbrauch = 0.0 # Self-consumption

View File

@ -7,13 +7,20 @@ from pydantic import BaseModel, Field, field_validator, model_validator
from typing_extensions import Self
from akkudoktoreos.config import AppConfig
from akkudoktoreos.devices.battery import EAutoParameters, PVAkku, PVAkkuParameters
from akkudoktoreos.devices.battery import (
EAutoParameters,
EAutoResult,
PVAkku,
PVAkkuParameters,
)
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
from akkudoktoreos.prediction.ems import (
EnergieManagementSystem,
EnergieManagementSystemParameters,
SimulationResult,
)
from akkudoktoreos.utils.utils import NumpyEncoder
from akkudoktoreos.visualize import visualisiere_ergebnisse
@ -21,19 +28,20 @@ class OptimizationParameters(BaseModel):
ems: EnergieManagementSystemParameters
pv_akku: PVAkkuParameters
wechselrichter: WechselrichterParameters = WechselrichterParameters()
eauto: EAutoParameters
eauto: Optional[EAutoParameters]
dishwasher: Optional[HomeApplianceParameters] = None
temperature_forecast: list[float] = Field(
"An array of floats representing the temperature forecast in degrees Celsius for different time intervals."
temperature_forecast: Optional[list[float]] = Field(
default=None,
description="An array of floats representing the temperature forecast in degrees Celsius for different time intervals.",
)
start_solution: Optional[list[float]] = Field(
None, description="Can be `null` or contain a previous solution (if available)."
default=None, description="Can be `null` or contain a previous solution (if available)."
)
@model_validator(mode="after")
def validate_list_length(self) -> Self:
arr_length = len(self.ems.pv_prognose_wh)
if arr_length != len(self.temperature_forecast):
if self.temperature_forecast is not None and arr_length != len(self.temperature_forecast):
raise ValueError("Input lists have different lenghts")
return self
@ -46,100 +54,6 @@ class OptimizationParameters(BaseModel):
return start_solution
class EAutoResult(BaseModel):
"""This object contains information related to the electric vehicle and its charging and discharging behavior."""
charge_array: list[float] = Field(
description="Indicates for each hour whether the EV is charging (`0` for no charging, `1` for charging)."
)
discharge_array: list[int] = Field(
description="Indicates for each hour whether the EV is discharging (`0` for no discharging, `1` for discharging)."
)
entlade_effizienz: float = Field(description="The discharge efficiency as a float.")
hours: int = Field("Amount of hours the simulation is done for.")
kapazitaet_wh: int = Field("The capacity of the EVs battery in watt-hours.")
lade_effizienz: float = Field("The charging efficiency as a float.")
max_ladeleistung_w: int = Field(description="The maximum charging power of the EV in watts.")
soc_wh: float = Field(
description="The state of charge of the battery in watt-hours at the start of the simulation."
)
start_soc_prozent: int = Field(
description="The state of charge of the battery in percentage at the start of the simulation."
)
class SimulationResult(BaseModel):
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
EAuto_SoC_pro_Stunde: list[Optional[float]] = Field(
description="The state of charge of the EV for each hour."
)
Einnahmen_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The revenue from grid feed-in or other sources in euros per hour."
)
Gesamt_Verluste: float = Field(
description="The total losses in watt-hours over the entire period."
)
Gesamtbilanz_Euro: float = Field(
description="The total balance of revenues minus costs in euros."
)
Gesamteinnahmen_Euro: float = Field(description="The total revenues in euros.")
Gesamtkosten_Euro: float = Field(description="The total costs in euros.")
Home_appliance_wh_per_hour: list[Optional[float]] = Field(
description="The energy consumption of a household appliance in watt-hours per hour."
)
Kosten_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The costs in euros per hour."
)
Netzbezug_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The grid energy drawn in watt-hours per hour."
)
Netzeinspeisung_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The energy fed into the grid in watt-hours per hour."
)
Verluste_Pro_Stunde: list[Optional[float]] = Field(
description="The losses in watt-hours per hour."
)
akku_soc_pro_stunde: list[Optional[float]] = Field(
description="The state of charge of the battery (not the EV) in percentage per hour."
)
# class SimulationData(BaseModel):
# """An object containing the simulated data."""
#
# Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
# EAuto_SoC_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated state of charge of the electric car per hour.",
# )
# Einnahmen_Euro_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated income in euros per hour."
# )
# Gesamt_Verluste: float = Field(description="The total simulated losses in watt-hours.")
# Gesamtbilanz_Euro: float = Field(description="The total simulated balance in euros.")
# Gesamteinnahmen_Euro: float = Field(description="The total simulated income in euros.")
# Gesamtkosten_Euro: float = Field(description="The total simulated costs in euros.")
# Home_appliance_wh_per_hour: list[Optional[float]] = Field(
# description="An array of floats representing the simulated energy consumption of a household appliance in watt-hours per hour."
# )
# Kosten_Euro_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated costs in euros per hour."
# )
# Netzbezug_Wh_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated grid consumption in watt-hours per hour."
# )
# Netzeinspeisung_Wh_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated grid feed-in in watt-hours per hour."
# )
# Verluste_Pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated losses per hour."
# )
# akku_soc_pro_stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated state of charge of the battery in percentage per hour."
# )
class OptimizeResponse(BaseModel):
"""**Note**: The first value of "Last_Wh_pro_Stunde", "Netzeinspeisung_Wh_pro_Stunde" and "Netzbezug_Wh_pro_Stunde", will be set to null in the JSON output and represented as NaN or None in the corresponding classes' data returns. This approach is adopted to ensure that the current hour's processing remains unchanged."""
@ -152,17 +66,35 @@ class OptimizeResponse(BaseModel):
discharge_allowed: list[int] = Field(
description="Array with discharge values (1 for discharge, 0 otherwise)."
)
eautocharge_hours_float: Optional[list[float]] = Field(description="TBD")
result: SimulationResult
eauto_obj: EAutoResult
eauto_obj: Optional[EAutoResult]
start_solution: Optional[list[float]] = Field(
None,
default=None,
description="An array of binary values (0 or 1) representing a possible starting solution for the simulation.",
)
washingstart: Optional[int] = Field(
None,
default=None,
description="Can be `null` or contain an object representing the start of washing (if applicable).",
)
# simulation_data: Optional[SimulationData] = None
@field_validator(
"ac_charge",
"dc_charge",
"discharge_allowed",
mode="before",
)
def convert_numpy(cls, field: Any) -> Any:
return NumpyEncoder.convert_numpy(field)[0]
@field_validator(
"eauto_obj",
mode="before",
)
def convert_eauto(cls, field: Any) -> Any:
if isinstance(field, PVAkku):
return EAutoResult(**field.to_dict())
return field
class optimization_problem:
@ -176,7 +108,7 @@ class optimization_problem:
self._config = config
self.prediction_hours = config.eos.prediction_hours
self.strafe = config.eos.penalty
self.opti_param = None
self.opti_param: dict[str, Any] = {}
self.fixed_eauto_hours = config.eos.prediction_hours - config.eos.optimization_hours
self.possible_charge_values = config.eos.available_charging_rates_in_percentage
self.verbose = verbose
@ -189,7 +121,7 @@ class optimization_problem:
random.seed(fixed_seed)
def decode_charge_discharge(
self, discharge_hours_bin: np.ndarray
self, discharge_hours_bin: list[int]
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""Decode the input array `discharge_hours_bin` into three separate arrays for AC charging, DC charging, and discharge.
@ -209,30 +141,32 @@ class optimization_problem:
- discharge (np.ndarray): Array with discharge values (1 for discharge, 0 otherwise).
"""
# Convert the input list to a NumPy array, if it's not already
discharge_hours_bin = np.array(discharge_hours_bin)
discharge_hours_bin_np = np.array(discharge_hours_bin)
# Create ac_charge array: Only consider values between 2 and 6 (AC charging power levels), set the rest to 0
ac_charge = np.where(
(discharge_hours_bin >= 2) & (discharge_hours_bin <= 6), discharge_hours_bin - 1, 0
(discharge_hours_bin_np >= 2) & (discharge_hours_bin_np <= 6),
discharge_hours_bin_np - 1,
0,
)
ac_charge = ac_charge / 5.0 # Normalize AC charge to range between 0 and 1
# Create dc_charge array: 7 = Not allowed (mapped to 0), 8 = Allowed (mapped to 1)
# Create dc_charge array: Only if DC charge optimization is enabled
if self.optimize_dc_charge:
dc_charge = np.where(discharge_hours_bin == 8, 1, 0)
dc_charge = np.where(discharge_hours_bin_np == 8, 1, 0)
else:
dc_charge = np.ones_like(
discharge_hours_bin
discharge_hours_bin_np
) # Set DC charge to 0 if optimization is disabled
# Create discharge array: Only consider value 1 (Discharge), set the rest to 0 (binary output)
discharge = np.where(discharge_hours_bin == 1, 1, 0)
discharge = np.where(discharge_hours_bin_np == 1, 1, 0)
return ac_charge, dc_charge, discharge
# Custom mutation function that applies type-specific mutations
def mutate(self, individual):
def mutate(self, individual: list[int]) -> tuple[list[int]]:
"""Custom mutation function for the individual.
This function mutates different parts of the individual:
@ -298,7 +232,7 @@ class optimization_problem:
return (individual,)
# Method to create an individual based on the conditions
def create_individual(self):
def create_individual(self) -> list[int]:
# Start with discharge states for the individual
individual_components = [
self.toolbox.attr_discharge_state() for _ in range(self.prediction_hours)
@ -317,8 +251,8 @@ class optimization_problem:
return creator.Individual(individual_components)
def split_individual(
self, individual: list[float]
) -> Tuple[list[int], list[float], Optional[int]]:
self, individual: list[int]
) -> tuple[list[int], Optional[list[int]], Optional[int]]:
"""Split the individual solution into its components.
Components:
@ -327,18 +261,18 @@ class optimization_problem:
3. Dishwasher start time (integer if applicable).
"""
discharge_hours_bin = individual[: self.prediction_hours]
eautocharge_hours_float = (
eautocharge_hours_index = (
individual[self.prediction_hours : self.prediction_hours * 2]
if self.optimize_ev
else None
)
washingstart_int = (
individual[-1]
int(individual[-1])
if self.opti_param and self.opti_param.get("home_appliance", 0) > 0
else None
)
return discharge_hours_bin, eautocharge_hours_float, washingstart_int
return discharge_hours_bin, eautocharge_hours_index, washingstart_int
def setup_deap_environment(self, opti_param: dict[str, Any], start_hour: int) -> None:
"""Set up the DEAP environment with fitness and individual creation rules."""
@ -403,7 +337,7 @@ class optimization_problem:
self.toolbox.register("select", tools.selTournament, tournsize=3)
def evaluate_inner(
self, individual: list[float], ems: EnergieManagementSystem, start_hour: int
self, individual: list[int], ems: EnergieManagementSystem, start_hour: int
) -> dict[str, Any]:
"""Simulates the energy management system (EMS) using the provided individual solution.
@ -413,7 +347,7 @@ class optimization_problem:
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
individual
)
if self.opti_param.get("home_appliance", 0) > 0:
if washingstart_int is not None:
ems.set_home_appliance_start(washingstart_int, global_start_hour=start_hour)
ac, dc, discharge = self.decode_charge_discharge(discharge_hours_bin)
@ -424,19 +358,19 @@ class optimization_problem:
ems.set_akku_dc_charge_hours(dc)
ems.set_akku_ac_charge_hours(ac)
if self.optimize_ev:
if eautocharge_hours_index is not None:
eautocharge_hours_float = [
self._config.eos.available_charging_rates_in_percentage[i]
for i in eautocharge_hours_index
]
ems.set_ev_charge_hours(eautocharge_hours_float)
ems.set_ev_charge_hours(np.array(eautocharge_hours_float))
else:
ems.set_ev_charge_hours(np.full(self.prediction_hours, 0))
return ems.simuliere(start_hour)
def evaluate(
self,
individual: list[float],
individual: list[int],
ems: EnergieManagementSystem,
parameters: OptimizationParameters,
start_hour: int,
@ -450,7 +384,7 @@ class optimization_problem:
gesamtbilanz = o["Gesamtbilanz_Euro"] * (-1.0 if worst_case else 1.0)
discharge_hours_bin, eautocharge_hours_float, _ = self.split_individual(individual)
discharge_hours_bin, eautocharge_hours_index, _ = self.split_individual(individual)
# Small Penalty for not discharging
gesamtbilanz += sum(
@ -460,13 +394,15 @@ class optimization_problem:
# Penalty for not meeting the minimum SOC (State of Charge) requirement
# if parameters.eauto_min_soc_prozent - ems.eauto.ladezustand_in_prozent() <= 0.0 and self.optimize_ev:
# gesamtbilanz += sum(
# self.strafe for ladeleistung in eautocharge_hours_float if ladeleistung != 0.0
# self.strafe for ladeleistung in eautocharge_hours_index if ladeleistung != 0.0
# )
individual.extra_data = (
individual.extra_data = ( # type: ignore[attr-defined]
o["Gesamtbilanz_Euro"],
o["Gesamt_Verluste"],
parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent(),
parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent()
if parameters.eauto and ems.eauto
else 0,
)
# Adjust total balance with battery value and penalties for unmet SOC
@ -478,7 +414,11 @@ class optimization_problem:
if self.optimize_ev:
gesamtbilanz += max(
0,
(parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent())
(
parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent()
if parameters.eauto and ems.eauto
else 0
)
* self.strafe,
)
@ -497,7 +437,7 @@ class optimization_problem:
print("Start optimize:", start_solution)
# Insert the start solution into the population if provided
if start_solution not in [None, -1]:
if start_solution is not None:
for _ in range(3):
population.insert(0, creator.Individual(start_solution))
@ -515,7 +455,7 @@ class optimization_problem:
verbose=self.verbose,
)
member = {"bilanz": [], "verluste": [], "nebenbedingung": []}
member: dict[str, list[float]] = {"bilanz": [], "verluste": [], "nebenbedingung": []}
for ind in population:
if hasattr(ind, "extra_data"):
extra_value1, extra_value2, extra_value3 = ind.extra_data
@ -528,12 +468,10 @@ class optimization_problem:
def optimierung_ems(
self,
parameters: OptimizationParameters,
start_hour: Optional[int] = None,
start_hour: int,
worst_case: bool = False,
startdate: Optional[Any] = None, # startdate is not used!
*,
ngen: int = 600,
) -> dict[str, Any]:
) -> OptimizeResponse:
"""Perform EMS (Energy Management System) optimization and visualize results."""
einspeiseverguetung_euro_pro_wh = np.full(
self.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
@ -546,16 +484,19 @@ class optimization_problem:
)
akku.set_charge_per_hour(np.full(self.prediction_hours, 1))
self.optimize_ev = True
if parameters.eauto.min_soc_prozent - parameters.eauto.start_soc_prozent < 0:
eauto: Optional[PVAkku] = None
if parameters.eauto:
eauto = PVAkku(
parameters.eauto,
hours=self.prediction_hours,
)
eauto.set_charge_per_hour(np.full(self.prediction_hours, 1))
self.optimize_ev = (
parameters.eauto.min_soc_prozent - parameters.eauto.start_soc_prozent >= 0
)
else:
self.optimize_ev = False
eauto = PVAkku(
parameters.eauto,
hours=self.prediction_hours,
)
eauto.set_charge_per_hour(np.full(self.prediction_hours, 1))
# Initialize household appliance if applicable
dishwasher = (
HomeAppliance(
@ -571,9 +512,9 @@ class optimization_problem:
ems = EnergieManagementSystem(
self._config.eos,
parameters.ems,
wechselrichter=wr,
eauto=eauto,
home_appliance=dishwasher,
wechselrichter=wr,
)
# Setup the DEAP environment and optimization process
@ -586,14 +527,17 @@ class optimization_problem:
# Perform final evaluation on the best solution
o = self.evaluate_inner(start_solution, ems, start_hour)
discharge_hours_bin, eautocharge_hours_float, washingstart_int = self.split_individual(
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
start_solution
)
if self.optimize_ev:
eautocharge_hours_float = [
eautocharge_hours_float = (
[
self._config.eos.available_charging_rates_in_percentage[i]
for i in eautocharge_hours_float
for i in eautocharge_hours_index
]
if eautocharge_hours_index is not None
else None
)
ac_charge, dc_charge, discharge = self.decode_charge_discharge(discharge_hours_bin)
# Visualize the results
@ -612,43 +556,15 @@ class optimization_problem:
extra_data=extra_data,
)
# List output keys where the first element needs to be changed to None
keys_to_modify = [
"Last_Wh_pro_Stunde",
"Netzeinspeisung_Wh_pro_Stunde",
"akku_soc_pro_stunde",
"Netzbezug_Wh_pro_Stunde",
"Kosten_Euro_pro_Stunde",
"Einnahmen_Euro_pro_Stunde",
"EAuto_SoC_pro_Stunde",
"Verluste_Pro_Stunde",
"Home_appliance_wh_per_hour",
]
# Loop through each key in the list
for key in keys_to_modify:
# Convert the NumPy array to a list
element_list = o[key].tolist()
# Change the first value to None
# element_list[0] = None
# Change the NaN to None (JSON)
element_list = [
None if isinstance(x, (int, float)) and np.isnan(x) else x for x in element_list
]
# Assign the modified list back to the dictionary
o[key] = element_list
# Return final results as a dictionary
return {
"ac_charge": ac_charge.tolist(),
"dc_charge": dc_charge.tolist(),
"discharge_allowed": discharge.tolist(),
"eautocharge_hours_float": eautocharge_hours_float,
"result": o,
"eauto_obj": ems.eauto.to_dict(),
"start_solution": start_solution,
"washingstart": washingstart_int,
# "simulation_data": o,
}
return OptimizeResponse(
**{
"ac_charge": ac_charge,
"dc_charge": dc_charge,
"discharge_allowed": discharge,
"eautocharge_hours_float": eautocharge_hours_float,
"result": SimulationResult(**o),
"eauto_obj": ems.eauto,
"start_solution": start_solution,
"washingstart": washingstart_int,
}
)

View File

@ -1,14 +1,15 @@
from datetime import datetime
from typing import Dict, List, Optional, Union
from typing import Any, Dict, Optional, Union
import numpy as np
from pydantic import BaseModel, Field, model_validator
from pydantic import BaseModel, Field, field_validator, model_validator
from typing_extensions import Self
from akkudoktoreos.config import EOSConfig
from akkudoktoreos.devices.battery import PVAkku
from akkudoktoreos.devices.generic import HomeAppliance
from akkudoktoreos.devices.inverter import Wechselrichter
from akkudoktoreos.utils.utils import NumpyEncoder
class EnergieManagementSystemParameters(BaseModel):
@ -41,14 +42,67 @@ class EnergieManagementSystemParameters(BaseModel):
return self
class SimulationResult(BaseModel):
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
EAuto_SoC_pro_Stunde: list[Optional[float]] = Field(
description="The state of charge of the EV for each hour."
)
Einnahmen_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The revenue from grid feed-in or other sources in euros per hour."
)
Gesamt_Verluste: float = Field(
description="The total losses in watt-hours over the entire period."
)
Gesamtbilanz_Euro: float = Field(
description="The total balance of revenues minus costs in euros."
)
Gesamteinnahmen_Euro: float = Field(description="The total revenues in euros.")
Gesamtkosten_Euro: float = Field(description="The total costs in euros.")
Home_appliance_wh_per_hour: list[Optional[float]] = Field(
description="The energy consumption of a household appliance in watt-hours per hour."
)
Kosten_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The costs in euros per hour."
)
Netzbezug_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The grid energy drawn in watt-hours per hour."
)
Netzeinspeisung_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The energy fed into the grid in watt-hours per hour."
)
Verluste_Pro_Stunde: list[Optional[float]] = Field(
description="The losses in watt-hours per hour."
)
akku_soc_pro_stunde: list[Optional[float]] = Field(
description="The state of charge of the battery (not the EV) in percentage per hour."
)
@field_validator(
"Last_Wh_pro_Stunde",
"Netzeinspeisung_Wh_pro_Stunde",
"akku_soc_pro_stunde",
"Netzbezug_Wh_pro_Stunde",
"Kosten_Euro_pro_Stunde",
"Einnahmen_Euro_pro_Stunde",
"EAuto_SoC_pro_Stunde",
"Verluste_Pro_Stunde",
"Home_appliance_wh_per_hour",
mode="before",
)
def convert_numpy(cls, field: Any) -> Any:
return NumpyEncoder.convert_numpy(field)[0]
class EnergieManagementSystem:
def __init__(
self,
config: EOSConfig,
parameters: EnergieManagementSystemParameters,
wechselrichter: Wechselrichter,
eauto: Optional[PVAkku] = None,
home_appliance: Optional[HomeAppliance] = None,
wechselrichter: Optional[Wechselrichter] = None,
):
self.akku = wechselrichter.akku
self.gesamtlast = np.array(parameters.gesamtlast, float)
@ -66,7 +120,7 @@ class EnergieManagementSystem:
self.dc_charge_hours = np.full(config.prediction_hours, 1)
self.ev_charge_hours = np.full(config.prediction_hours, 0)
def set_akku_discharge_hours(self, ds: List[int]) -> None:
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
self.akku.set_discharge_per_hour(ds)
def set_akku_ac_charge_hours(self, ds: np.ndarray) -> None:
@ -75,22 +129,24 @@ class EnergieManagementSystem:
def set_akku_dc_charge_hours(self, ds: np.ndarray) -> None:
self.dc_charge_hours = ds
def set_ev_charge_hours(self, ds: List[int]) -> None:
def set_ev_charge_hours(self, ds: np.ndarray) -> None:
self.ev_charge_hours = ds
def set_home_appliance_start(self, ds: List[int], global_start_hour: int = 0) -> None:
self.home_appliance.set_starting_time(ds, global_start_hour=global_start_hour)
def set_home_appliance_start(self, start_hour: int, global_start_hour: int = 0) -> None:
assert self.home_appliance is not None
self.home_appliance.set_starting_time(start_hour, global_start_hour=global_start_hour)
def reset(self) -> None:
self.eauto.reset()
if self.eauto:
self.eauto.reset()
self.akku.reset()
def simuliere_ab_jetzt(self) -> dict:
def simuliere_ab_jetzt(self) -> dict[str, Any]:
jetzt = datetime.now()
start_stunde = jetzt.hour
return self.simuliere(start_stunde)
def simuliere(self, start_stunde: int) -> dict:
def simuliere(self, start_stunde: int) -> dict[str, Any]:
"""hour.
akku_soc_pro_stunde begin of the hour, initial hour state!

View File

@ -2,11 +2,13 @@ import numpy as np
class Gesamtlast:
def __init__(self, prediction_hours=24):
self.lasten = {} # Contains names and load arrays for different sources
def __init__(self, prediction_hours: int = 24):
self.lasten: dict[
str, np.ndarray
] = {} # Contains names and load arrays for different sources
self.prediction_hours = prediction_hours
def hinzufuegen(self, name, last_array):
def hinzufuegen(self, name: str, last_array: np.ndarray) -> None:
"""Adds an array of loads for a specific source.
:param name: Name of the load source (e.g., "Household", "Heat Pump")
@ -16,13 +18,13 @@ class Gesamtlast:
raise ValueError(f"Total load inconsistent lengths in arrays: {name} {len(last_array)}")
self.lasten[name] = last_array
def gesamtlast_berechnen(self):
def gesamtlast_berechnen(self) -> np.ndarray:
"""Calculates the total load for each hour and returns an array of total loads.
:return: Array of total loads, where each entry corresponds to an hour
"""
if not self.lasten:
return []
return np.ndarray(0)
# Assumption: All load arrays have the same length
stunden = len(next(iter(self.lasten.values())))

View File

@ -1,28 +1,30 @@
from datetime import datetime
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.metrics import mean_squared_error, r2_score
from akkudoktoreos.prediction.load_forecast import LoadForecast
class LoadPredictionAdjuster:
def __init__(self, measured_data, predicted_data, load_forecast):
def __init__(
self, measured_data: pd.DataFrame, predicted_data: pd.DataFrame, load_forecast: LoadForecast
):
self.measured_data = measured_data
self.predicted_data = predicted_data
self.load_forecast = load_forecast
self.merged_data = self._merge_data()
self.train_data = None
self.test_data = None
self.weekday_diff = None
self.weekend_diff = None
def _remove_outliers(self, data, threshold=2):
def _remove_outliers(self, data: pd.DataFrame, threshold: int = 2) -> pd.DataFrame:
# Calculate the Z-Score of the 'Last' data
data["Z-Score"] = np.abs((data["Last"] - data["Last"].mean()) / data["Last"].std())
# Filter the data based on the threshold
filtered_data = data[data["Z-Score"] < threshold]
return filtered_data.drop(columns=["Z-Score"])
def _merge_data(self):
def _merge_data(self) -> pd.DataFrame:
# Convert the time column in both DataFrames to datetime
self.predicted_data["time"] = pd.to_datetime(self.predicted_data["time"])
self.measured_data["time"] = pd.to_datetime(self.measured_data["time"])
@ -47,7 +49,9 @@ class LoadPredictionAdjuster:
merged_data["DayOfWeek"] = merged_data["time"].dt.dayofweek
return merged_data
def calculate_weighted_mean(self, train_period_weeks=9, test_period_weeks=1):
def calculate_weighted_mean(
self, train_period_weeks: int = 9, test_period_weeks: int = 1
) -> None:
self.merged_data = self._remove_outliers(self.merged_data)
train_end_date = self.merged_data["time"].max() - pd.Timedelta(weeks=test_period_weeks)
train_start_date = train_end_date - pd.Timedelta(weeks=train_period_weeks)
@ -79,27 +83,27 @@ class LoadPredictionAdjuster:
weekends_train_data.groupby("Hour").apply(self._weighted_mean_diff).dropna()
)
def _weighted_mean_diff(self, data):
def _weighted_mean_diff(self, data: pd.DataFrame) -> float:
train_end_date = self.train_data["time"].max()
weights = 1 / (train_end_date - data["time"]).dt.days.replace(0, np.nan)
weighted_mean = (data["Difference"] * weights).sum() / weights.sum()
return weighted_mean
def adjust_predictions(self):
def adjust_predictions(self) -> None:
self.train_data["Adjusted Pred"] = self.train_data.apply(self._adjust_row, axis=1)
self.test_data["Adjusted Pred"] = self.test_data.apply(self._adjust_row, axis=1)
def _adjust_row(self, row):
def _adjust_row(self, row: pd.Series) -> pd.Series:
if row["DayOfWeek"] < 5:
return row["Last Pred"] + self.weekday_diff.get(row["Hour"], 0)
else:
return row["Last Pred"] + self.weekend_diff.get(row["Hour"], 0)
def plot_results(self):
def plot_results(self) -> None:
self._plot_data(self.train_data, "Training")
self._plot_data(self.test_data, "Testing")
def _plot_data(self, data, data_type):
def _plot_data(self, data: pd.DataFrame, data_type: str) -> None:
plt.figure(figsize=(14, 7))
plt.plot(data["time"], data["Last"], label=f"Actual Last - {data_type}", color="blue")
plt.plot(
@ -123,13 +127,13 @@ class LoadPredictionAdjuster:
plt.grid(True)
plt.show()
def evaluate_model(self):
def evaluate_model(self) -> None:
mse = mean_squared_error(self.test_data["Last"], self.test_data["Adjusted Pred"])
r2 = r2_score(self.test_data["Last"], self.test_data["Adjusted Pred"])
print(f"Mean Squared Error: {mse}")
print(f"R-squared: {r2}")
def predict_next_hours(self, hours_ahead):
def predict_next_hours(self, hours_ahead: int) -> pd.DataFrame:
last_date = self.merged_data["time"].max()
future_dates = [last_date + pd.Timedelta(hours=i) for i in range(1, hours_ahead + 1)]
future_df = pd.DataFrame({"time": future_dates})
@ -139,7 +143,7 @@ class LoadPredictionAdjuster:
future_df["Adjusted Pred"] = future_df.apply(self._adjust_row, axis=1)
return future_df
def _forecast_next_hours(self, timestamp):
def _forecast_next_hours(self, timestamp: datetime) -> float:
date_str = timestamp.strftime("%Y-%m-%d")
hour = timestamp.hour
daily_forecast = self.load_forecast.get_daily_stats(date_str)

View File

@ -1,4 +1,5 @@
from datetime import datetime
from pathlib import Path
import numpy as np
@ -6,14 +7,12 @@ import numpy as np
class LoadForecast:
def __init__(self, filepath=None, year_energy=None):
def __init__(self, filepath: str | Path, year_energy: float):
self.filepath = filepath
self.data = None
self.data_year_energy = None
self.year_energy = year_energy
self.load_data()
def get_daily_stats(self, date_str):
def get_daily_stats(self, date_str: str) -> np.ndarray:
"""Returns the 24-hour profile with mean and standard deviation for a given date.
:param date_str: Date as a string in the format "YYYY-MM-DD"
@ -29,7 +28,7 @@ class LoadForecast:
daily_stats = self.data_year_energy[day_of_year - 1] # -1 because indexing starts at 0
return daily_stats
def get_hourly_stats(self, date_str, hour):
def get_hourly_stats(self, date_str: str, hour: int) -> np.ndarray:
"""Returns the mean and standard deviation for a specific hour of a given date.
:param date_str: Date as a string in the format "YYYY-MM-DD"
@ -47,7 +46,7 @@ class LoadForecast:
return hourly_stats
def get_stats_for_date_range(self, start_date_str, end_date_str):
def get_stats_for_date_range(self, start_date_str: str, end_date_str: str) -> np.ndarray:
"""Returns the means and standard deviations for a date range.
:param start_date_str: Start date as a string in the format "YYYY-MM-DD"
@ -69,7 +68,7 @@ class LoadForecast:
stats_for_range = stats_for_range.reshape(stats_for_range.shape[0], -1)
return stats_for_range
def load_data(self):
def load_data(self) -> None:
"""Loads data from the specified file."""
try:
data = np.load(self.filepath)
@ -81,11 +80,12 @@ class LoadForecast:
except Exception as e:
print(f"An error occurred while loading data: {e}")
def get_price_data(self):
def get_price_data(self) -> None:
"""Returns price data (currently not implemented)."""
return self.price_data
raise NotImplementedError
# return self.price_data
def _convert_to_datetime(self, date_str):
def _convert_to_datetime(self, date_str: str) -> datetime:
"""Converts a date string to a datetime object."""
return datetime.strptime(date_str, "%Y-%m-%d")

View File

@ -3,6 +3,7 @@ import json
import zoneinfo
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Any, Sequence
import numpy as np
import requests
@ -10,7 +11,7 @@ import requests
from akkudoktoreos.config import AppConfig, SetupIncomplete
def repeat_to_shape(array, target_shape):
def repeat_to_shape(array: np.ndarray, target_shape: Sequence[int]) -> np.ndarray:
# Check if the array fits the target shape
if len(target_shape) != array.ndim:
raise ValueError("Array and target shape must have the same number of dimensions")
@ -25,7 +26,11 @@ def repeat_to_shape(array, target_shape):
class HourlyElectricityPriceForecast:
def __init__(
self, source: str | Path, config: AppConfig, charges=0.000228, use_cache=True
self,
source: str | Path,
config: AppConfig,
charges: float = 0.000228,
use_cache: bool = True,
): # 228
self.cache_dir = config.working_dir / config.directories.cache
self.use_cache = use_cache
@ -37,7 +42,7 @@ class HourlyElectricityPriceForecast:
self.charges = charges
self.prediction_hours = config.eos.prediction_hours
def load_data(self, source: str | Path):
def load_data(self, source: str | Path) -> list[dict[str, Any]]:
cache_file = self.get_cache_file(source)
if isinstance(source, str):
if cache_file.is_file() and not self.is_cache_expired() and self.use_cache:
@ -61,12 +66,14 @@ class HourlyElectricityPriceForecast:
raise ValueError(f"Input is not a valid path: {source}")
return json_data["values"]
def get_cache_file(self, url):
def get_cache_file(self, url: str | Path) -> Path:
if isinstance(url, Path):
url = str(url)
hash_object = hashlib.sha256(url.encode())
hex_dig = hash_object.hexdigest()
return self.cache_dir / f"cache_{hex_dig}.json"
def is_cache_expired(self):
def is_cache_expired(self) -> bool:
if not self.cache_time_file.is_file():
return True
with self.cache_time_file.open("r") as file:
@ -74,11 +81,11 @@ class HourlyElectricityPriceForecast:
last_cache_time = datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S")
return datetime.now() - last_cache_time > timedelta(hours=1)
def update_cache_timestamp(self):
def update_cache_timestamp(self) -> None:
with self.cache_time_file.open("w") as file:
file.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
def get_price_for_date(self, date_str):
def get_price_for_date(self, date_str: str) -> np.ndarray:
"""Returns all prices for the specified date, including the price from 00:00 of the previous day."""
# Convert date string to datetime object
date_obj = datetime.strptime(date_str, "%Y-%m-%d")
@ -108,7 +115,7 @@ class HourlyElectricityPriceForecast:
return np.array(date_prices) / (1000.0 * 100.0) + self.charges
def get_price_for_daterange(self, start_date_str, end_date_str):
def get_price_for_daterange(self, start_date_str: str, end_date_str: str) -> np.ndarray:
"""Returns all prices between the start and end dates."""
print(start_date_str)
print(end_date_str)
@ -117,7 +124,7 @@ class HourlyElectricityPriceForecast:
start_date = start_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin"))
end_date = end_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin"))
price_list = []
price_list: list[float] = []
while start_date < end_date:
date_str = start_date.strftime("%Y-%m-%d")
@ -127,8 +134,10 @@ class HourlyElectricityPriceForecast:
price_list.extend(daily_prices)
start_date += timedelta(days=1)
price_list_np = np.array(price_list)
# If prediction hours are greater than 0, reshape the price list
if self.prediction_hours > 0:
price_list = repeat_to_shape(np.array(price_list), (self.prediction_hours,))
price_list_np = repeat_to_shape(price_list_np, (self.prediction_hours,))
return price_list
return price_list_np

View File

@ -21,7 +21,7 @@ Example:
)
# Update the AC power measurement for a specific date and time
forecast.update_ac_power_measurement(date_time=datetime.now(), ac_power_measurement=1000)
forecast.update_ac_power_measurement(ac_power_measurement=1000, date_time=datetime.now())
# Print the forecast data with DC and AC power details
forecast.print_ac_power_and_measurement()
@ -36,7 +36,8 @@ Attributes:
import json
from datetime import date, datetime
from typing import List, Optional, Union
from pathlib import Path
from typing import Any, List, Optional, Union
import numpy as np
import pandas as pd
@ -89,21 +90,20 @@ class AkkudoktorForecast(BaseModel):
values: List[List[AkkudoktorForecastValue]]
def validate_pv_forecast_data(data) -> str:
def validate_pv_forecast_data(data: dict[str, Any]) -> Optional[str]:
"""Validate PV forecast data."""
data_type = None
error_msg = ""
try:
AkkudoktorForecast.model_validate(data)
data_type = "Akkudoktor"
except ValidationError as e:
error_msg = ""
for error in e.errors():
field = " -> ".join(str(x) for x in error["loc"])
message = error["msg"]
error_type = error["type"]
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
logger.debug(f"Validation did not succeed: {error_msg}")
return None
return data_type
@ -167,7 +167,7 @@ class ForecastData:
"""
return self.dc_power
def ac_power_measurement(self) -> float:
def get_ac_power_measurement(self) -> Optional[float]:
"""Returns the measured AC power.
It returns the measured AC power if available; otherwise None.
@ -191,7 +191,7 @@ class ForecastData:
else:
return self.ac_power
def get_windspeed_10m(self) -> float:
def get_windspeed_10m(self) -> Optional[float]:
"""Returns the wind speed at 10 meters altitude.
Returns:
@ -199,7 +199,7 @@ class ForecastData:
"""
return self.windspeed_10m
def get_temperature(self) -> float:
def get_temperature(self) -> Optional[float]:
"""Returns the temperature.
Returns:
@ -227,10 +227,10 @@ class PVForecast:
def __init__(
self,
data: Optional[dict] = None,
filepath: Optional[str] = None,
data: Optional[dict[str, Any]] = None,
filepath: Optional[str | Path] = None,
url: Optional[str] = None,
forecast_start: Union[datetime, date, str, int, float] = None,
forecast_start: Union[datetime, date, str, int, float, None] = None,
prediction_hours: Optional[int] = None,
):
"""Initializes a `PVForecast` instance.
@ -253,16 +253,15 @@ class PVForecast:
Example:
forecast = PVForecast(data=my_forecast_data, forecast_start="2024-10-13", prediction_hours=72)
"""
self.meta = {}
self.forecast_data = []
self.current_measurement = None
self.meta: dict[str, Any] = {}
self.forecast_data: list[ForecastData] = []
self.current_measurement: Optional[float] = None
self.data = data
self.filepath = filepath
self.url = url
self._forecast_start: Optional[datetime] = None
if forecast_start:
self._forecast_start = to_datetime(forecast_start, to_naiv=True, to_maxtime=False)
else:
self._forecast_start = None
self.prediction_hours = prediction_hours
self._tz_name = None
@ -277,8 +276,8 @@ class PVForecast:
def update_ac_power_measurement(
self,
ac_power_measurement: float,
date_time: Union[datetime, date, str, int, float, None] = None,
ac_power_measurement=None,
) -> bool:
"""Updates the AC power measurement for a specific time.
@ -309,10 +308,10 @@ class PVForecast:
def process_data(
self,
data: Optional[dict] = None,
filepath: Optional[str] = None,
data: Optional[dict[str, Any]] = None,
filepath: Optional[str | Path] = None,
url: Optional[str] = None,
forecast_start: Union[datetime, date, str, int, float] = None,
forecast_start: Union[datetime, date, str, int, float, None] = None,
prediction_hours: Optional[int] = None,
) -> None:
"""Processes the forecast data from the provided source (in-memory `data`, `filepath`, or `url`).
@ -368,6 +367,7 @@ class PVForecast:
) # Invalid path
else:
raise ValueError("No prediction input data available.")
assert data is not None # make mypy happy
# Validate input data to be of a known format
data_format = validate_pv_forecast_data(data)
if data_format != "Akkudoktor":
@ -390,7 +390,7 @@ class PVForecast:
# --------------------------------------------
# From here Akkudoktor PV forecast data format
# ---------------------------------------------
self.meta = data.get("meta")
self.meta = data.get("meta", {})
all_values = data.get("values")
# timezone of the PV system
@ -454,7 +454,7 @@ class PVForecast:
self._forecast_start = self.forecast_data[0].get_date_time()
logger.debug(f"Forecast start adapted to {self._forecast_start}")
def load_data_from_file(self, filepath: str) -> dict:
def load_data_from_file(self, filepath: str | Path) -> dict[str, Any]:
"""Loads forecast data from a file.
Args:
@ -467,7 +467,7 @@ class PVForecast:
data = json.load(file)
return data
def load_data_from_url(self, url: str) -> dict:
def load_data_from_url(self, url: str) -> dict[str, Any]:
"""Loads forecast data from a URL.
Example:
@ -488,7 +488,7 @@ class PVForecast:
return data
@cache_in_file() # use binary mode by default as we have python objects not text
def load_data_from_url_with_caching(self, url: str, until_date=None) -> dict:
def load_data_from_url_with_caching(self, url: str) -> dict[str, Any]:
"""Loads data from a URL or from the cache if available.
Args:
@ -506,7 +506,7 @@ class PVForecast:
logger.error(data)
return data
def get_forecast_data(self):
def get_forecast_data(self) -> list[ForecastData]:
"""Returns the forecast data.
Returns:
@ -516,7 +516,7 @@ class PVForecast:
def get_temperature_forecast_for_date(
self, input_date: Union[datetime, date, str, int, float, None]
):
) -> np.ndarray:
"""Returns the temperature forecast for a specific date.
Args:
@ -543,7 +543,7 @@ class PVForecast:
self,
start_date: Union[datetime, date, str, int, float, None],
end_date: Union[datetime, date, str, int, float, None],
):
) -> np.ndarray:
"""Returns the PV forecast for a date range.
Args:
@ -575,7 +575,7 @@ class PVForecast:
self,
start_date: Union[datetime, date, str, int, float, None],
end_date: Union[datetime, date, str, int, float, None],
):
) -> np.ndarray:
"""Returns the temperature forecast for a given date range.
Args:
@ -601,7 +601,7 @@ class PVForecast:
temperature_forecast = [data.get_temperature() for data in date_range_forecast]
return np.array(temperature_forecast)[: self.prediction_hours]
def get_forecast_dataframe(self):
def get_forecast_dataframe(self) -> pd.DataFrame:
"""Converts the forecast data into a Pandas DataFrame.
Returns:
@ -623,7 +623,7 @@ class PVForecast:
df = pd.DataFrame(data)
return df
def get_forecast_start(self) -> datetime:
def get_forecast_start(self) -> Optional[datetime]:
"""Return the start of the forecast data in local timezone.
Returns:
@ -678,5 +678,5 @@ if __name__ == "__main__":
"past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&"
"hourly=relativehumidity_2m%2Cwindspeed_10m",
)
forecast.update_ac_power_measurement(date_time=datetime.now(), ac_power_measurement=1000)
forecast.update_ac_power_measurement(ac_power_measurement=1000, date_time=datetime.now())
print(forecast.report_ac_power_and_measurement())

View File

@ -120,7 +120,8 @@ def fastapi_gesamtlast(
leistung_haushalt = future_predictions["Adjusted Pred"].values
gesamtlast = Gesamtlast(prediction_hours=hours)
gesamtlast.hinzufuegen(
"Haushalt", leistung_haushalt
"Haushalt",
leistung_haushalt, # type: ignore[arg-type]
) # Add household load to total load calculation
# Calculate the total load
@ -182,12 +183,7 @@ def fastapi_pvprognose(url: str, ac_power_measurement: Optional[float] = None) -
pv_forecast = PVforecast.get_pv_forecast_for_date_range(date_now, date)
temperature_forecast = PVforecast.get_temperature_for_date_range(date_now, date)
# Return both forecasts as a JSON response
ret = {
"temperature": temperature_forecast.tolist(),
"pvpower": pv_forecast.tolist(),
}
return ret
return ForecastResponse(temperature=temperature_forecast.tolist(), pvpower=pv_forecast.tolist())
@app.post("/optimize")
@ -203,12 +199,11 @@ def fastapi_optimize(
# Perform optimization simulation
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
# print(result)
# convert to JSON (None accepted by dumps)
return result
@app.get("/visualization_results.pdf", response_class=PdfResponse)
def get_pdf():
def get_pdf() -> PdfResponse:
# Endpoint to serve the generated PDF with visualization results
output_path = config.working_dir / config.directories.output
if not output_path.is_dir():
@ -216,16 +211,16 @@ def get_pdf():
file_path = output_path / "visualization_results.pdf"
if not file_path.is_file():
raise HTTPException(status_code=404, detail="No visualization result available.")
return FileResponse(file_path)
return PdfResponse(file_path)
@app.get("/site-map", include_in_schema=False)
def site_map():
def site_map() -> RedirectResponse:
return RedirectResponse(url="/docs")
@app.get("/", include_in_schema=False)
def root():
def root() -> RedirectResponse:
# Redirect the root URL to the site map
return RedirectResponse(url="/docs")

View File

@ -25,6 +25,8 @@ Notes:
- Cache files are automatically associated with the current date unless specified.
"""
from __future__ import annotations
import hashlib
import inspect
import os
@ -32,7 +34,7 @@ import pickle
import tempfile
import threading
from datetime import date, datetime, time, timedelta
from typing import List, Optional, Union
from typing import IO, Callable, Generic, List, Optional, ParamSpec, TypeVar, Union
from akkudoktoreos.utils.datetimeutil import to_datetime, to_timedelta
from akkudoktoreos.utils.logutil import get_logger
@ -40,15 +42,20 @@ from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__file__)
class CacheFileStoreMeta(type):
T = TypeVar("T")
Param = ParamSpec("Param")
RetType = TypeVar("RetType")
class CacheFileStoreMeta(type, Generic[T]):
"""A thread-safe implementation of CacheFileStore."""
_instances = {}
_instances: dict[CacheFileStoreMeta[T], T] = {}
_lock: threading.Lock = threading.Lock()
"""Lock object to synchronize threads on first access to CacheFileStore."""
def __call__(cls):
def __call__(cls) -> T:
"""Return CacheFileStore instance."""
with cls._lock:
if cls not in cls._instances:
@ -80,18 +87,18 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
>>> print(cache_file.read()) # Output: 'Some data'
"""
def __init__(self):
def __init__(self) -> None:
"""Initializes the CacheFileStore instance.
This constructor sets up an empty key-value store (a dictionary) where each key
corresponds to a cache file that is associated with a given key and an optional date.
"""
self._store = {}
self._store: dict[str, tuple[IO[bytes], datetime]] = {}
self._store_lock = threading.Lock()
def _generate_cache_file_key(
self, key: str, until_datetime: Union[datetime, None]
) -> (str, datetime):
) -> tuple[str, datetime]:
"""Generates a unique cache file key based on the key and date.
The cache file key is a combination of the input key and the date (if provided),
@ -114,7 +121,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
cache_key = hashlib.sha256(f"{key}{key_datetime}".encode("utf-8")).hexdigest()
return (f"{cache_key}", until_datetime)
def _get_file_path(self, file_obj):
def _get_file_path(self, file_obj: IO[bytes]) -> Optional[str]:
"""Retrieve the file path from a file-like object.
Args:
@ -136,7 +143,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
until_date: Union[datetime, date, str, int, float, None] = None,
until_datetime: Union[datetime, date, str, int, float, None] = None,
with_ttl: Union[timedelta, str, int, float, None] = None,
):
) -> datetime:
"""Get until_datetime from the given options."""
if until_datetime:
until_datetime = to_datetime(until_datetime)
@ -152,11 +159,11 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
def _is_valid_cache_item(
self,
cache_item: (),
until_datetime: datetime = None,
at_datetime: datetime = None,
before_datetime: datetime = None,
):
cache_item: tuple[IO[bytes], datetime],
until_datetime: Optional[datetime] = None,
at_datetime: Optional[datetime] = None,
before_datetime: Optional[datetime] = None,
) -> bool:
cache_file_datetime = cache_item[1] # Extract the datetime associated with the cache item
if (
(until_datetime and until_datetime == cache_file_datetime)
@ -169,10 +176,10 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
def _search(
self,
key: str,
until_datetime: Union[datetime, date, str, int, float] = None,
at_datetime: Union[datetime, date, str, int, float] = None,
before_datetime: Union[datetime, date, str, int, float] = None,
):
until_datetime: Union[datetime, date, str, int, float, None] = None,
at_datetime: Union[datetime, date, str, int, float, None] = None,
before_datetime: Union[datetime, date, str, int, float, None] = None,
) -> Optional[tuple[str, IO[bytes], datetime]]:
"""Searches for a cached item that matches the key and falls within the datetime range.
This method looks for a cache item with a key that matches the given `key`, and whose associated
@ -193,20 +200,23 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
otherwise returns `None`.
"""
# Convert input to datetime if they are not None
if until_datetime:
until_datetime = to_datetime(until_datetime)
if at_datetime:
at_datetime = to_datetime(at_datetime)
if before_datetime:
before_datetime = to_datetime(before_datetime)
until_datetime_dt: Optional[datetime] = None
if until_datetime is not None:
until_datetime_dt = to_datetime(until_datetime)
at_datetime_dt: Optional[datetime] = None
if at_datetime is not None:
at_datetime_dt = to_datetime(at_datetime)
before_datetime_dt: Optional[datetime] = None
if before_datetime is not None:
before_datetime_dt = to_datetime(before_datetime)
for cache_file_key, cache_item in self._store.items():
# Check if the cache file datetime matches the given criteria
if self._is_valid_cache_item(
cache_item,
until_datetime=until_datetime,
at_datetime=at_datetime,
before_datetime=before_datetime,
until_datetime=until_datetime_dt,
at_datetime=at_datetime_dt,
before_datetime=before_datetime_dt,
):
# This cache file is within the given datetime range
# Extract the datetime associated with the cache item
@ -231,7 +241,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
mode: str = "wb+",
delete: bool = False,
suffix: Optional[str] = None,
):
) -> IO[bytes]:
"""Creates a new file-like tempfile object associated with the given key.
If a cache file with the given key and valid timedate already exists, the existing file is
@ -262,31 +272,31 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
>>> cache_file.seek(0)
>>> print(cache_file.read()) # Output: 'Some cached data'
"""
until_datetime = self._until_datetime_by_options(
until_datetime_dt = self._until_datetime_by_options(
until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
)
cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime)
cache_file_key, _ = self._generate_cache_file_key(key, until_datetime_dt)
with self._store_lock: # Synchronize access to _store
if cache_file_key in self._store:
if (cache_file_item := self._store.get(cache_file_key)) is not None:
# File already available
cache_file_obj, until_datetime = self._store.get(cache_file_key)
cache_file_obj = cache_file_item[0]
else:
cache_file_obj = tempfile.NamedTemporaryFile(
mode=mode, delete=delete, suffix=suffix
)
self._store[cache_file_key] = (cache_file_obj, until_datetime)
self._store[cache_file_key] = (cache_file_obj, until_datetime_dt)
cache_file_obj.seek(0)
return cache_file_obj
def set(
self,
key: str,
file_obj,
file_obj: IO[bytes],
until_date: Union[datetime, date, str, int, float, None] = None,
until_datetime: Union[datetime, date, str, int, float, None] = None,
with_ttl: Union[timedelta, str, int, float, None] = None,
):
) -> None:
"""Stores a file-like object in the cache under the specified key and date.
This method allows you to manually set a file-like object into the cache with a specific key
@ -309,11 +319,11 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
Example:
>>> cache_store.set('example_file', io.BytesIO(b'Some binary data'))
"""
until_datetime = self._until_datetime_by_options(
until_datetime_dt = self._until_datetime_by_options(
until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
)
cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime)
cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime_dt)
with self._store_lock: # Synchronize access to _store
if cache_file_key in self._store:
raise ValueError(f"Key already in store: `{key}`.")
@ -327,7 +337,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
until_datetime: Union[datetime, date, str, int, float, None] = None,
at_datetime: Union[datetime, date, str, int, float, None] = None,
before_datetime: Union[datetime, date, str, int, float, None] = None,
):
) -> Optional[IO[bytes]]:
"""Retrieves the cache file associated with the given key and validity datetime.
If no cache file is found for the provided key and datetime, the method returns None.
@ -374,11 +384,11 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
def delete(
self,
key,
key: str,
until_date: Union[datetime, date, str, int, float, None] = None,
until_datetime: Union[datetime, date, str, int, float, None] = None,
before_datetime: Union[datetime, date, str, int, float, None] = None,
):
) -> None:
"""Deletes the cache file associated with the given key and datetime.
This method removes the cache file from the store.
@ -429,8 +439,10 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
logger.error(f"Error deleting cache file {file_path}: {e}")
def clear(
self, clear_all=False, before_datetime: Union[datetime, date, str, int, float, None] = None
):
self,
clear_all: bool = False,
before_datetime: Union[datetime, date, str, int, float, None] = None,
) -> None:
"""Deletes all cache files or those expiring before `before_datetime`.
Args:
@ -500,7 +512,7 @@ def cache_in_file(
mode: str = "wb+",
delete: bool = False,
suffix: Optional[str] = None,
):
) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]:
"""Decorator to cache the output of a function into a temporary file.
The decorator caches function output to a cache file based on its inputs as key to identify the
@ -545,35 +557,35 @@ def cache_in_file(
>>> result = expensive_computation(until_date = date.today())
"""
def decorator(func):
def decorator(func: Callable[Param, RetType]) -> Callable[Param, RetType]:
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
func_source_code = inspect.getsource(func)
def wrapper(*args, **kwargs):
def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> RetType:
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
# Convert args to a dictionary based on the function's signature
args_names = func.__code__.co_varnames[: func.__code__.co_argcount]
args_dict = dict(zip(args_names, args))
# Search for caching parameters of function and remove
force_update = None
force_update: Optional[bool] = None
for param in ["force_update", "until_datetime", "with_ttl", "until_date"]:
if param in kwargs:
if param == "force_update":
force_update = kwargs[param]
force_update = kwargs[param] # type: ignore[assignment]
kwargs.pop("force_update")
if param == "until_datetime":
until_datetime = kwargs[param]
until_datetime = kwargs[param] # type: ignore[assignment]
until_date = None
with_ttl = None
elif param == "with_ttl":
until_datetime = None
until_date = None
with_ttl = kwargs[param]
with_ttl = kwargs[param] # type: ignore[assignment]
elif param == "until_date":
until_datetime = None
until_date = kwargs[param]
until_date = kwargs[param] # type: ignore[assignment]
with_ttl = None
kwargs.pop("until_datetime", None)
kwargs.pop("until_date", None)
@ -589,7 +601,7 @@ def cache_in_file(
# Create key based on argument names, argument values, and function source code
key = str(args_dict) + str(kwargs_clone) + str(func_source_code)
result = None
result: Optional[RetType | bytes] = None
# Get cache file that is currently valid
cache_file = CacheFileStore().get(key)
if not force_update and cache_file is not None:
@ -624,11 +636,11 @@ def cache_in_file(
if "b" in mode:
pickle.dump(result, cache_file)
else:
cache_file.write(result)
cache_file.write(result) # type: ignore[call-overload]
except Exception as e:
logger.info(f"Write failed: {e}")
CacheFileStore().delete(key)
return result
return result # type: ignore[return-value]
return wrapper

View File

@ -24,19 +24,39 @@ Example usage:
import re
from datetime import date, datetime, time, timedelta, timezone
from typing import Optional, Union
from typing import Annotated, Literal, Optional, Union, overload
from zoneinfo import ZoneInfo
from timezonefinder import TimezoneFinder
@overload
def to_datetime(
date_input: Union[datetime, date, str, int, float, None],
as_string: str | Literal[True],
to_timezone: Optional[Union[ZoneInfo, str]] = None,
to_naiv: Optional[bool] = None,
to_maxtime: Optional[bool] = None,
) -> str: ...
@overload
def to_datetime(
date_input: Union[datetime, date, str, int, float, None],
as_string: Literal[False] | None = None,
to_timezone: Optional[Union[ZoneInfo, str]] = None,
to_naiv: Optional[bool] = None,
to_maxtime: Optional[bool] = None,
) -> datetime: ...
def to_datetime(
date_input: Union[datetime, date, str, int, float, None],
as_string: Optional[Union[str, bool]] = None,
to_timezone: Optional[Union[timezone, str]] = None,
to_timezone: Optional[Union[ZoneInfo, str]] = None,
to_naiv: Optional[bool] = None,
to_maxtime: Optional[bool] = None,
):
) -> str | datetime:
"""Converts a date input to a datetime object or a formatted string with timezone support.
Args:
@ -67,7 +87,9 @@ def to_datetime(
Raises:
ValueError: If the date input is not a valid type or format.
RuntimeError: If no local timezone information available.
"""
dt_object: Optional[datetime] = None
if isinstance(date_input, datetime):
dt_object = date_input
elif isinstance(date_input, date):
@ -104,7 +126,6 @@ def to_datetime(
dt_object = datetime.strptime(date_input, fmt)
break
except ValueError as e:
dt_object = None
continue
if dt_object is None:
raise ValueError(f"Date string {date_input} does not match any known formats.")
@ -120,11 +141,13 @@ def to_datetime(
local_date = datetime.now().astimezone()
local_tz_name = local_date.tzname()
local_utc_offset = local_date.utcoffset()
if local_tz_name is None or local_utc_offset is None:
raise RuntimeError("Could not determine local time zone")
local_timezone = timezone(local_utc_offset, local_tz_name)
# Get target timezone
if to_timezone:
if isinstance(to_timezone, timezone):
if isinstance(to_timezone, ZoneInfo):
target_timezone = to_timezone
elif isinstance(to_timezone, str):
try:
@ -168,7 +191,11 @@ def to_datetime(
return dt_object
def to_timedelta(input_value):
def to_timedelta(
input_value: Union[
timedelta, str, int, float, tuple[int, int, int, int], Annotated[list[int], 4]
],
) -> timedelta:
"""Converts various input types into a timedelta object.
Args:
@ -238,7 +265,15 @@ def to_timedelta(input_value):
raise ValueError(f"Unsupported input type: {type(input_value)}")
def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None):
@overload
def to_timezone(lat: float, lon: float, as_string: Literal[True]) -> str: ...
@overload
def to_timezone(lat: float, lon: float, as_string: Literal[False] | None = None) -> ZoneInfo: ...
def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None) -> str | ZoneInfo:
"""Determines the timezone for a given geographic location specified by latitude and longitude.
By default, it returns a `ZoneInfo` object representing the timezone.
@ -269,11 +304,13 @@ def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None):
"""
# Initialize the static variable only once
if not hasattr(to_timezone, "timezone_finder"):
to_timezone.timezone_finder = TimezoneFinder() # static variable
# static variable
to_timezone.timezone_finder = TimezoneFinder() # type: ignore[attr-defined]
# Check and convert coordinates to timezone
tz_name: Optional[str] = None
try:
tz_name = to_timezone.timezone_finder.timezone_at(lat=lat, lng=lon)
tz_name = to_timezone.timezone_finder.timezone_at(lat=lat, lng=lon) # type: ignore[attr-defined]
if not tz_name:
raise ValueError(f"No timezone found for coordinates: latitude {lat}, longitude {lon}")
except Exception as e:

View File

@ -1,12 +1,13 @@
import datetime
import json
import zoneinfo
from typing import Any
import numpy as np
# currently unused
def ist_dst_wechsel(tag: datetime.datetime, timezone="Europe/Berlin") -> bool:
def ist_dst_wechsel(tag: datetime.datetime, timezone: str = "Europe/Berlin") -> bool:
"""Checks if Daylight Saving Time (DST) starts or ends on a given day."""
tz = zoneinfo.ZoneInfo(timezone)
# Get the current day and the next day
@ -20,15 +21,25 @@ def ist_dst_wechsel(tag: datetime.datetime, timezone="Europe/Berlin") -> bool:
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
@classmethod
def convert_numpy(cls, obj: Any) -> tuple[Any, bool]:
if isinstance(obj, np.ndarray):
return obj.tolist() # Convert NumPy arrays to lists
# Convert NumPy arrays to lists
return [
None if isinstance(x, (int, float)) and np.isnan(x) else x for x in obj.tolist()
], True
if isinstance(obj, np.generic):
return obj.item() # Convert NumPy scalars to native Python types
return obj.item(), True # Convert NumPy scalars to native Python types
return obj, False
def default(self, obj: Any) -> Any:
obj, converted = NumpyEncoder.convert_numpy(obj)
if converted:
return obj
return super(NumpyEncoder, self).default(obj)
@staticmethod
def dumps(data):
def dumps(data: Any) -> str:
"""Static method to serialize a Python object into a JSON string using NumpyEncoder.
Args:

View File

@ -1,4 +1,6 @@
# Set the backend for matplotlib to Agg
from typing import Any, Optional
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
@ -10,20 +12,20 @@ matplotlib.use("Agg")
def visualisiere_ergebnisse(
gesamtlast,
pv_forecast,
strompreise,
ergebnisse,
ac, # AC charging allowed
dc, # DC charging allowed
discharge, # Discharge allowed
temperature,
start_hour,
einspeiseverguetung_euro_pro_wh,
gesamtlast: list[float],
pv_forecast: list[float],
strompreise: list[float],
ergebnisse: dict[str, Any],
ac: np.ndarray, # AC charging allowed
dc: np.ndarray, # DC charging allowed
discharge: np.ndarray, # Discharge allowed
temperature: Optional[list[float]],
start_hour: int,
einspeiseverguetung_euro_pro_wh: np.ndarray,
config: AppConfig,
filename="visualization_results.pdf",
extra_data=None,
):
filename: str = "visualization_results.pdf",
extra_data: Optional[dict[str, Any]] = None,
) -> None:
#####################
# 24-hour visualization
#####################
@ -81,13 +83,14 @@ def visualisiere_ergebnisse(
plt.grid(True)
# Temperature forecast
plt.subplot(3, 2, 5)
plt.title("Temperature Forecast (°C)")
plt.plot(hours, temperature, label="Temperature (°C)", marker="x")
plt.xlabel("Hour of the Day")
plt.ylabel("°C")
plt.legend()
plt.grid(True)
if temperature is not None:
plt.subplot(3, 2, 5)
plt.title("Temperature Forecast (°C)")
plt.plot(hours, temperature, label="Temperature (°C)", marker="x")
plt.xlabel("Hour of the Day")
plt.ylabel("°C")
plt.legend()
plt.grid(True)
pdf.savefig() # Save the current figure state to the PDF
plt.close() # Close the current figure to free up memory

View File

@ -15,7 +15,7 @@ from akkudoktoreos.config import EOS_DIR, AppConfig, load_config
def load_config_tmp(tmp_path: Path) -> AppConfig:
"""Creates an AppConfig from default.config.json with a tmp output directory."""
config = load_config(tmp_path)
config.directories.output = tmp_path
config.directories.output = str(tmp_path)
return config

View File

@ -299,7 +299,7 @@ def test_cache_in_file_decorator_forces_update(cache_store):
cache_file.write(result2)
# Call the decorated function again with force update (should get result from function)
result = my_function(until_date=until_date, force_update=True)
result = my_function(until_date=until_date, force_update=True) # type: ignore[call-arg]
assert result == result1
# Assure result was written to the same cache file
@ -319,7 +319,7 @@ def test_cache_in_file_handles_ttl(cache_store):
return "New result"
# Call the decorated function
result = my_function(with_ttl="1 second")
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
# Overwrite cache file
key = next(iter(cache_store._store))
@ -330,14 +330,14 @@ def test_cache_in_file_handles_ttl(cache_store):
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == "Modified result"
result = my_function(with_ttl="1 second")
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result == "Modified result"
# Wait one second to let the cache time out
sleep(1)
# Call again - cache should be timed out
result = my_function(with_ttl="1 second")
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result == "New result"
@ -349,7 +349,7 @@ def test_cache_in_file_handles_bytes_return(cache_store):
# Define a function that returns bytes
@cache_in_file()
def my_function(until_date=None):
def my_function(until_date=None) -> bytes:
return b"Some binary data"
# Call the decorated function
@ -358,7 +358,14 @@ def test_cache_in_file_handles_bytes_return(cache_store):
# Check if the binary data was written to the cache file
key = next(iter(cache_store._store))
cache_file = cache_store._store[key][0]
assert len(cache_store._store) == 1
assert cache_file is not None
cache_file.seek(0)
result1 = pickle.load(cache_file)
assert result1 == result
# Access cache
result = my_function(until_date=datetime.now() + timedelta(days=1))
assert len(cache_store._store) == 1
assert cache_store._store[key][0] is not None
assert result1 == result

View File

@ -8,6 +8,7 @@ from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParamet
from akkudoktoreos.prediction.ems import (
EnergieManagementSystem,
EnergieManagementSystemParameters,
SimulationResult,
)
prediction_hours = 48
@ -211,9 +212,9 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
preis_euro_pro_wh_akku=preis_euro_pro_wh_akku,
gesamtlast=gesamtlast,
),
wechselrichter=wechselrichter,
eauto=eauto,
home_appliance=home_appliance,
wechselrichter=wechselrichter,
)
return ems
@ -255,26 +256,7 @@ def test_simulation(create_ems_instance):
# Check that the result is a dictionary
assert isinstance(result, dict), "Result should be a dictionary."
# Verify that the expected keys are present in the result
expected_keys = [
"Last_Wh_pro_Stunde",
"Netzeinspeisung_Wh_pro_Stunde",
"Netzbezug_Wh_pro_Stunde",
"Kosten_Euro_pro_Stunde",
"akku_soc_pro_stunde",
"Einnahmen_Euro_pro_Stunde",
"Gesamtbilanz_Euro",
"EAuto_SoC_pro_Stunde",
"Gesamteinnahmen_Euro",
"Gesamtkosten_Euro",
"Verluste_Pro_Stunde",
"Gesamt_Verluste",
"Home_appliance_wh_per_hour",
]
for key in expected_keys:
assert key in result, f"The key '{key}' should be present in the result."
assert SimulationResult(**result) is not None
# Check the length of the main arrays
assert (
@ -344,7 +326,7 @@ def test_simulation(create_ems_instance):
assert (
np.nansum(
np.where(
np.equal(result["Home_appliance_wh_per_hour"], None),
result["Home_appliance_wh_per_hour"] is None,
np.nan,
np.array(result["Home_appliance_wh_per_hour"]),
)

View File

@ -44,13 +44,13 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
)
# Parameters based on previous example data
pv_prognose_wh = np.full(prediction_hours, 0)
pv_prognose_wh = [0.0] * prediction_hours
pv_prognose_wh[10] = 5000.0
pv_prognose_wh[11] = 5000.0
strompreis_euro_pro_wh = np.full(48, 0.001)
strompreis_euro_pro_wh[0:10] = 0.00001
strompreis_euro_pro_wh[11:15] = 0.00005
strompreis_euro_pro_wh = [0.001] * prediction_hours
strompreis_euro_pro_wh[0:10] = [0.00001] * 10
strompreis_euro_pro_wh[11:15] = [0.00005] * 4
strompreis_euro_pro_wh[20] = 0.00001
einspeiseverguetung_euro_pro_wh = [0.00007] * len(strompreis_euro_pro_wh)
@ -116,9 +116,9 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
preis_euro_pro_wh_akku=0,
gesamtlast=gesamtlast,
),
wechselrichter=wechselrichter,
eauto=eauto,
home_appliance=home_appliance,
wechselrichter=wechselrichter,
)
ac = np.full(prediction_hours, 0)

View File

@ -54,7 +54,7 @@ def test_optimize(
file = DIR_TESTDATA / fn_out
with file.open("r") as f_out:
expected_output_data = json.load(f_out)
expected_result = OptimizeResponse(**json.load(f_out))
opt_class = optimization_problem(tmp_config, fixed_seed=42)
start_hour = 10
@ -72,9 +72,7 @@ def test_optimize(
# Assert that the output contains all expected entries.
# This does not assert that the optimization always gives the same result!
# Reproducibility and mathematical accuracy should be tested on the level of individual components.
compare_dict(ergebnis, expected_output_data)
compare_dict(ergebnis.model_dump(), expected_result.model_dump())
# The function creates a visualization result PDF as a side-effect.
visualisiere_ergebnisse_patch.assert_called_once()
OptimizeResponse(**ergebnis)

View File

@ -49,7 +49,7 @@ def test_config_merge(tmp_path: Path) -> None:
with pytest.raises(ValueError):
# custom configuration is broken but not updated.
load_config(tmp_path, tmp_path, False)
load_config(tmp_path, True, False)
with config_file.open("r") as f_in:
# custom configuration is not changed.

View File

@ -121,7 +121,7 @@ def test_update_ac_power_measurement(pv_forecast_instance, sample_forecast_start
forecast_start = pv_forecast_instance.get_forecast_start()
assert forecast_start == sample_forecast_start
updated = pv_forecast_instance.update_ac_power_measurement(forecast_start, 1000)
updated = pv_forecast_instance.update_ac_power_measurement(1000, forecast_start)
assert updated is True
forecast_data = pv_forecast_instance.get_forecast_data()
assert forecast_data[0].ac_power_measurement == 1000
@ -130,7 +130,7 @@ def test_update_ac_power_measurement(pv_forecast_instance, sample_forecast_start
def test_update_ac_power_measurement_no_match(pv_forecast_instance):
"""Test updating AC power measurement where no date matches."""
date_time = datetime(2023, 10, 2, 1, 0, 0)
updated = pv_forecast_instance.update_ac_power_measurement(date_time, 1000)
updated = pv_forecast_instance.update_ac_power_measurement(1000, date_time)
assert not updated
@ -265,7 +265,7 @@ def test_timezone_behaviour(
# Test updating AC power measurement for a specific date.
date_time = pv_forecast_instance.get_forecast_start()
assert date_time == sample_forecast_start
updated = pv_forecast_instance.update_ac_power_measurement(date_time, 1000)
updated = pv_forecast_instance.update_ac_power_measurement(1000, date_time)
assert updated is True
forecast_data = pv_forecast_instance.get_forecast_data()
assert forecast_data[0].ac_power_measurement == 1000