* Mypy: Initial support

 * Add to pre-commit (currently installs own deps, could maybe changed
   to poetry venv in the future to reuse environment and don't need
   duplicated types deps).
 * Add type hints.

* Mypy: Add missing annotations
This commit is contained in:
Dominique Lasserre 2024-11-26 22:28:05 +01:00 committed by GitHub
parent 2a163569bc
commit 1163ddb4ac
31 changed files with 637 additions and 531 deletions

View File

@ -24,3 +24,11 @@ repos:
args: [--fix] args: [--fix]
# Run the formatter. # Run the formatter.
- id: ruff-format - id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.13.0'
hooks:
- id: mypy
additional_dependencies:
- "types-requests==2.32.0.20241016"
- "pandas-stubs==2.2.3.241009"
pass_filenames: false

View File

@ -1,5 +1,5 @@
# Define the targets # Define the targets
.PHONY: help venv pip install dist test test-full docker-run docker-build docs read-docs clean format run run-dev .PHONY: help venv pip install dist test test-full docker-run docker-build docs read-docs clean format mypy run run-dev
# Default target # Default target
all: help all: help
@ -11,6 +11,7 @@ help:
@echo " pip - Install dependencies from requirements.txt." @echo " pip - Install dependencies from requirements.txt."
@echo " pip-dev - Install dependencies from requirements-dev.txt." @echo " pip-dev - Install dependencies from requirements-dev.txt."
@echo " format - Format source code." @echo " format - Format source code."
@echo " mypy - Run mypy."
@echo " install - Install EOS in editable form (development mode) into virtual environment." @echo " install - Install EOS in editable form (development mode) into virtual environment."
@echo " docker-run - Run entire setup on docker" @echo " docker-run - Run entire setup on docker"
@echo " docker-build - Rebuild docker image" @echo " docker-build - Rebuild docker image"
@ -100,6 +101,10 @@ test-full:
format: format:
.venv/bin/pre-commit run --all-files .venv/bin/pre-commit run --all-files
# Target to format code.
mypy:
.venv/bin/mypy
# Run entire setup on docker # Run entire setup on docker
docker-run: docker-run:
@docker compose up --remove-orphans @docker compose up --remove-orphans

View File

@ -363,17 +363,17 @@
"hours": { "hours": {
"type": "integer", "type": "integer",
"title": "Hours", "title": "Hours",
"default": "Amount of hours the simulation is done for." "description": "Amount of hours the simulation is done for."
}, },
"kapazitaet_wh": { "kapazitaet_wh": {
"type": "integer", "type": "integer",
"title": "Kapazitaet Wh", "title": "Kapazitaet Wh",
"default": "The capacity of the EV\u2019s battery in watt-hours." "description": "The capacity of the EV\u2019s battery in watt-hours."
}, },
"lade_effizienz": { "lade_effizienz": {
"type": "number", "type": "number",
"title": "Lade Effizienz", "title": "Lade Effizienz",
"default": "The charging efficiency as a float." "description": "The charging efficiency as a float."
}, },
"max_ladeleistung_w": { "max_ladeleistung_w": {
"type": "integer", "type": "integer",
@ -396,6 +396,9 @@
"charge_array", "charge_array",
"discharge_array", "discharge_array",
"entlade_effizienz", "entlade_effizienz",
"hours",
"kapazitaet_wh",
"lade_effizienz",
"max_ladeleistung_w", "max_ladeleistung_w",
"soc_wh", "soc_wh",
"start_soc_prozent" "start_soc_prozent"
@ -533,7 +536,14 @@
} }
}, },
"eauto": { "eauto": {
"$ref": "#/components/schemas/EAutoParameters" "anyOf": [
{
"$ref": "#/components/schemas/EAutoParameters"
},
{
"type": "null"
}
]
}, },
"dishwasher": { "dishwasher": {
"anyOf": [ "anyOf": [
@ -546,12 +556,19 @@
] ]
}, },
"temperature_forecast": { "temperature_forecast": {
"items": { "anyOf": [
"type": "number" {
}, "items": {
"type": "array", "type": "number"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Temperature Forecast", "title": "Temperature Forecast",
"default": "An array of floats representing the temperature forecast in degrees Celsius for different time intervals." "description": "An array of floats representing the temperature forecast in degrees Celsius for different time intervals."
}, },
"start_solution": { "start_solution": {
"anyOf": [ "anyOf": [
@ -603,11 +620,33 @@
"title": "Discharge Allowed", "title": "Discharge Allowed",
"description": "Array with discharge values (1 for discharge, 0 otherwise)." "description": "Array with discharge values (1 for discharge, 0 otherwise)."
}, },
"eautocharge_hours_float": {
"anyOf": [
{
"items": {
"type": "number"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Eautocharge Hours Float",
"description": "TBD"
},
"result": { "result": {
"$ref": "#/components/schemas/SimulationResult" "$ref": "#/components/schemas/SimulationResult"
}, },
"eauto_obj": { "eauto_obj": {
"$ref": "#/components/schemas/EAutoResult" "anyOf": [
{
"$ref": "#/components/schemas/EAutoResult"
},
{
"type": "null"
}
]
}, },
"start_solution": { "start_solution": {
"anyOf": [ "anyOf": [
@ -642,6 +681,7 @@
"ac_charge", "ac_charge",
"dc_charge", "dc_charge",
"discharge_allowed", "discharge_allowed",
"eautocharge_hours_float",
"result", "result",
"eauto_obj" "eauto_obj"
], ],

View File

@ -72,3 +72,25 @@ convention = "google"
minversion = "8.3.3" minversion = "8.3.3"
pythonpath = [ "src", ] pythonpath = [ "src", ]
testpaths = [ "tests", ] testpaths = [ "tests", ]
[tool.mypy]
files = ["src", "tests"]
exclude = "class_soc_calc\\.py$"
check_untyped_defs = true
warn_unused_ignores = true
[[tool.mypy.overrides]]
module = "akkudoktoreos.*"
disallow_untyped_defs = true
[[tool.mypy.overrides]]
module = "sklearn.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "deap.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "xprocess.*"
ignore_missing_imports = true

View File

@ -8,3 +8,6 @@ pytest==8.3.3
pytest-cov==6.0.0 pytest-cov==6.0.0
pytest-xprocess==1.0.2 pytest-xprocess==1.0.2
pre-commit pre-commit
mypy==1.13.0
types-requests==2.32.0.20241016
pandas-stubs==2.2.3.241009

View File

@ -7,11 +7,8 @@ import numpy as np
from akkudoktoreos.config import get_working_dir, load_config from akkudoktoreos.config import get_working_dir, load_config
from akkudoktoreos.optimization.genetic import ( from akkudoktoreos.optimization.genetic import (
OptimizationParameters, OptimizationParameters,
OptimizeResponse,
optimization_problem, optimization_problem,
) )
from akkudoktoreos.utils import NumpyEncoder
from akkudoktoreos.visualize import visualisiere_ergebnisse
start_hour = 0 start_hour = 0
@ -299,30 +296,4 @@ elapsed_time = end_time - start_time
print(f"Elapsed time: {elapsed_time:.4f} seconds") print(f"Elapsed time: {elapsed_time:.4f} seconds")
ac_charge, dc_charge, discharge = ( print(ergebnis.model_dump())
ergebnis["ac_charge"],
ergebnis["dc_charge"],
ergebnis["discharge_allowed"],
)
visualisiere_ergebnisse(
parameters.ems.gesamtlast,
parameters.ems.pv_prognose_wh,
parameters.ems.strompreis_euro_pro_wh,
ergebnis["result"],
ac_charge,
dc_charge,
discharge,
parameters.temperature_forecast,
start_hour,
einspeiseverguetung_euro_pro_wh=np.full(
config.eos.feed_in_tariff_eur_per_wh, parameters.ems.einspeiseverguetung_euro_pro_wh
),
config=config,
)
json_data = NumpyEncoder.dumps(ergebnis)
print(json_data)
OptimizeResponse(**ergebnis)

View File

@ -14,7 +14,7 @@ class HomeApplianceParameters(BaseModel):
class HomeAppliance: class HomeAppliance:
def __init__(self, parameters: HomeApplianceParameters, hours=None): def __init__(self, parameters: HomeApplianceParameters, hours: int):
self.hours = hours # Total duration for which the planning is done self.hours = hours # Total duration for which the planning is done
self.consumption_wh = ( self.consumption_wh = (
parameters.consumption_wh parameters.consumption_wh
@ -22,7 +22,7 @@ class HomeAppliance:
self.duration_h = parameters.duration_h # Duration of use in hours self.duration_h = parameters.duration_h # Duration of use in hours
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
def set_starting_time(self, start_hour, global_start_hour=0): def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
"""Sets the start time of the device and generates the corresponding load curve. """Sets the start time of the device and generates the corresponding load curve.
:param start_hour: The hour at which the device should start. :param start_hour: The hour at which the device should start.
@ -40,15 +40,15 @@ class HomeAppliance:
# Set the power for the duration of use in the load curve array # Set the power for the duration of use in the load curve array
self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour
def reset(self): def reset(self) -> None:
"""Resets the load curve.""" """Resets the load curve."""
self.load_curve = np.zeros(self.hours) self.load_curve = np.zeros(self.hours)
def get_load_curve(self): def get_load_curve(self) -> np.ndarray:
"""Returns the current load curve.""" """Returns the current load curve."""
return self.load_curve return self.load_curve
def get_load_for_hour(self, hour): def get_load_for_hour(self, hour: int) -> float:
"""Returns the load for a specific hour. """Returns the load for a specific hour.
:param hour: The hour for which the load is queried. :param hour: The hour for which the load is queried.
@ -59,6 +59,6 @@ class HomeAppliance:
return self.load_curve[hour] return self.load_curve[hour]
def get_latest_starting_point(self): def get_latest_starting_point(self) -> int:
"""Returns the latest possible start time at which the device can still run completely.""" """Returns the latest possible start time at which the device can still run completely."""
return self.hours - self.duration_h return self.hours - self.duration_h

View File

@ -13,7 +13,7 @@ Key features:
import json import json
import os import os
import shutil import shutil
from datetime import datetime, timedelta from datetime import date, datetime, timedelta
from pathlib import Path from pathlib import Path
from typing import Any, Optional from typing import Any, Optional
@ -273,9 +273,7 @@ def get_working_dir() -> Path:
return working_dir return working_dir
def get_start_enddate( def get_start_enddate(prediction_hours: int, startdate: Optional[date] = None) -> tuple[str, str]:
prediction_hours: int, startdate: Optional[datetime] = None
) -> tuple[str, str]:
"""Calculate the start and end dates based on the given prediction hours and optional start date. """Calculate the start and end dates based on the given prediction hours and optional start date.
Args: Args:

View File

@ -1,19 +1,21 @@
from typing import Optional from typing import Any, Optional
import numpy as np import numpy as np
from pydantic import BaseModel, Field from pydantic import BaseModel, Field, field_validator
from akkudoktoreos.utils.utils import NumpyEncoder
def max_ladeleistung_w_field(default=None): def max_ladeleistung_w_field(default: Optional[float] = None) -> Optional[float]:
return Field( return Field(
default, default=default,
gt=0, gt=0,
description="An integer representing the charging power of the battery in watts.", description="An integer representing the charging power of the battery in watts.",
) )
def start_soc_prozent_field(description: str): def start_soc_prozent_field(description: str) -> int:
return Field(0, ge=0, le=100, description=description) return Field(default=0, ge=0, le=100, description=description)
class BaseAkkuParameters(BaseModel): class BaseAkkuParameters(BaseModel):
@ -21,20 +23,23 @@ class BaseAkkuParameters(BaseModel):
gt=0, description="An integer representing the capacity of the battery in watt-hours." gt=0, description="An integer representing the capacity of the battery in watt-hours."
) )
lade_effizienz: float = Field( lade_effizienz: float = Field(
0.88, gt=0, le=1, description="A float representing the charging efficiency of the battery." default=0.88,
gt=0,
le=1,
description="A float representing the charging efficiency of the battery.",
) )
entlade_effizienz: float = Field(0.88, gt=0, le=1) entlade_effizienz: float = Field(default=0.88, gt=0, le=1)
max_ladeleistung_w: Optional[float] = max_ladeleistung_w_field() max_ladeleistung_w: Optional[float] = max_ladeleistung_w_field()
start_soc_prozent: int = start_soc_prozent_field( start_soc_prozent: int = start_soc_prozent_field(
"An integer representing the state of charge of the battery at the **start** of the current hour (not the current state)." "An integer representing the state of charge of the battery at the **start** of the current hour (not the current state)."
) )
min_soc_prozent: int = Field( min_soc_prozent: int = Field(
0, default=0,
ge=0, ge=0,
le=100, le=100,
description="An integer representing the minimum state of charge (SOC) of the battery in percentage.", description="An integer representing the minimum state of charge (SOC) of the battery in percentage.",
) )
max_soc_prozent: int = Field(100, ge=0, le=100) max_soc_prozent: int = Field(default=100, ge=0, le=100)
class PVAkkuParameters(BaseAkkuParameters): class PVAkkuParameters(BaseAkkuParameters):
@ -48,6 +53,36 @@ class EAutoParameters(BaseAkkuParameters):
) )
class EAutoResult(BaseModel):
"""This object contains information related to the electric vehicle and its charging and discharging behavior."""
charge_array: list[float] = Field(
description="Indicates for each hour whether the EV is charging (`0` for no charging, `1` for charging)."
)
discharge_array: list[int] = Field(
description="Indicates for each hour whether the EV is discharging (`0` for no discharging, `1` for discharging)."
)
entlade_effizienz: float = Field(description="The discharge efficiency as a float.")
hours: int = Field(description="Amount of hours the simulation is done for.")
kapazitaet_wh: int = Field(description="The capacity of the EVs battery in watt-hours.")
lade_effizienz: float = Field(description="The charging efficiency as a float.")
max_ladeleistung_w: int = Field(description="The maximum charging power of the EV in watts.")
soc_wh: float = Field(
description="The state of charge of the battery in watt-hours at the start of the simulation."
)
start_soc_prozent: int = Field(
description="The state of charge of the battery in percentage at the start of the simulation."
)
@field_validator(
"discharge_array",
"charge_array",
mode="before",
)
def convert_numpy(cls, field: Any) -> Any:
return NumpyEncoder.convert_numpy(field)[0]
class PVAkku: class PVAkku:
def __init__(self, parameters: BaseAkkuParameters, hours: int = 24): def __init__(self, parameters: BaseAkkuParameters, hours: int = 24):
# Battery capacity in Wh # Battery capacity in Wh
@ -73,40 +108,20 @@ class PVAkku:
self.min_soc_wh = (self.min_soc_prozent / 100) * self.kapazitaet_wh self.min_soc_wh = (self.min_soc_prozent / 100) * self.kapazitaet_wh
self.max_soc_wh = (self.max_soc_prozent / 100) * self.kapazitaet_wh self.max_soc_wh = (self.max_soc_prozent / 100) * self.kapazitaet_wh
def to_dict(self): def to_dict(self) -> dict[str, Any]:
return { return {
"kapazitaet_wh": self.kapazitaet_wh, "kapazitaet_wh": self.kapazitaet_wh,
"start_soc_prozent": self.start_soc_prozent, "start_soc_prozent": self.start_soc_prozent,
"soc_wh": self.soc_wh, "soc_wh": self.soc_wh,
"hours": self.hours, "hours": self.hours,
"discharge_array": self.discharge_array.tolist(), # Convert np.array to list "discharge_array": self.discharge_array,
"charge_array": self.charge_array.tolist(), "charge_array": self.charge_array,
"lade_effizienz": self.lade_effizienz, "lade_effizienz": self.lade_effizienz,
"entlade_effizienz": self.entlade_effizienz, "entlade_effizienz": self.entlade_effizienz,
"max_ladeleistung_w": self.max_ladeleistung_w, "max_ladeleistung_w": self.max_ladeleistung_w,
} }
@classmethod def reset(self) -> None:
def from_dict(cls, data):
# Create a new object with basic data
obj = cls(
kapazitaet_wh=data["kapazitaet_wh"],
hours=data["hours"],
lade_effizienz=data["lade_effizienz"],
entlade_effizienz=data["entlade_effizienz"],
max_ladeleistung_w=data["max_ladeleistung_w"],
start_soc_prozent=data["start_soc_prozent"],
)
# Set arrays
obj.discharge_array = np.array(data["discharge_array"])
obj.charge_array = np.array(data["charge_array"])
obj.soc_wh = data[
"soc_wh"
] # Set current state of charge, which may differ from start_soc_prozent
return obj
def reset(self):
self.soc_wh = (self.start_soc_prozent / 100) * self.kapazitaet_wh self.soc_wh = (self.start_soc_prozent / 100) * self.kapazitaet_wh
# Ensure soc_wh is within min and max limits # Ensure soc_wh is within min and max limits
self.soc_wh = min(max(self.soc_wh, self.min_soc_wh), self.max_soc_wh) self.soc_wh = min(max(self.soc_wh, self.min_soc_wh), self.max_soc_wh)
@ -114,22 +129,22 @@ class PVAkku:
self.discharge_array = np.full(self.hours, 1) self.discharge_array = np.full(self.hours, 1)
self.charge_array = np.full(self.hours, 1) self.charge_array = np.full(self.hours, 1)
def set_discharge_per_hour(self, discharge_array): def set_discharge_per_hour(self, discharge_array: np.ndarray) -> None:
assert len(discharge_array) == self.hours assert len(discharge_array) == self.hours
self.discharge_array = np.array(discharge_array) self.discharge_array = np.array(discharge_array)
def set_charge_per_hour(self, charge_array): def set_charge_per_hour(self, charge_array: np.ndarray) -> None:
assert len(charge_array) == self.hours assert len(charge_array) == self.hours
self.charge_array = np.array(charge_array) self.charge_array = np.array(charge_array)
def set_charge_allowed_for_hour(self, charge, hour): def set_charge_allowed_for_hour(self, charge: float, hour: int) -> None:
assert hour < self.hours assert hour < self.hours
self.charge_array[hour] = charge self.charge_array[hour] = charge
def ladezustand_in_prozent(self): def ladezustand_in_prozent(self) -> float:
return (self.soc_wh / self.kapazitaet_wh) * 100 return (self.soc_wh / self.kapazitaet_wh) * 100
def energie_abgeben(self, wh, hour): def energie_abgeben(self, wh: float, hour: int) -> tuple[float, float]:
if self.discharge_array[hour] == 0: if self.discharge_array[hour] == 0:
return 0.0, 0.0 # No energy discharge and no losses return 0.0, 0.0 # No energy discharge and no losses
@ -160,9 +175,11 @@ class PVAkku:
# Return the actually discharged energy and the losses # Return the actually discharged energy and the losses
return tatsaechlich_abgegeben_wh, verluste_wh return tatsaechlich_abgegeben_wh, verluste_wh
def energie_laden(self, wh, hour, relative_power=0.0): def energie_laden(
self, wh: Optional[float], hour: int, relative_power: float = 0.0
) -> tuple[float, float]:
if hour is not None and self.charge_array[hour] == 0: if hour is not None and self.charge_array[hour] == 0:
return 0, 0 # Charging not allowed in this hour return 0.0, 0.0 # Charging not allowed in this hour
if relative_power > 0.0: if relative_power > 0.0:
wh = self.max_ladeleistung_w * relative_power wh = self.max_ladeleistung_w * relative_power
# If no value for wh is given, use the maximum charging power # If no value for wh is given, use the maximum charging power
@ -190,7 +207,7 @@ class PVAkku:
verluste_wh = effektive_lademenge - geladene_menge verluste_wh = effektive_lademenge - geladene_menge
return geladene_menge, verluste_wh return geladene_menge, verluste_wh
def aktueller_energieinhalt(self): def aktueller_energieinhalt(self) -> float:
"""This method returns the current remaining energy considering efficiency. """This method returns the current remaining energy considering efficiency.
It accounts for both charging and discharging efficiency. It accounts for both charging and discharging efficiency.
@ -204,11 +221,13 @@ if __name__ == "__main__":
# Test battery discharge below min_soc # Test battery discharge below min_soc
print("Test: Discharge below min_soc") print("Test: Discharge below min_soc")
akku = PVAkku( akku = PVAkku(
kapazitaet_wh=10000, PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1, hours=1,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
) )
akku.reset() akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%") print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")
@ -222,11 +241,13 @@ if __name__ == "__main__":
# Test battery charge above max_soc # Test battery charge above max_soc
print("\nTest: Charge above max_soc") print("\nTest: Charge above max_soc")
akku = PVAkku( akku = PVAkku(
kapazitaet_wh=10000, PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1, hours=1,
start_soc_prozent=50,
min_soc_prozent=20,
max_soc_prozent=80,
) )
akku.reset() akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%") print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")
@ -240,11 +261,13 @@ if __name__ == "__main__":
# Test charging when battery is at max_soc # Test charging when battery is at max_soc
print("\nTest: Charging when at max_soc") print("\nTest: Charging when at max_soc")
akku = PVAkku( akku = PVAkku(
kapazitaet_wh=10000, PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=80,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1, hours=1,
start_soc_prozent=80,
min_soc_prozent=20,
max_soc_prozent=80,
) )
akku.reset() akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%") print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")
@ -256,11 +279,13 @@ if __name__ == "__main__":
# Test discharging when battery is at min_soc # Test discharging when battery is at min_soc
print("\nTest: Discharging when at min_soc") print("\nTest: Discharging when at min_soc")
akku = PVAkku( akku = PVAkku(
kapazitaet_wh=10000, PVAkkuParameters(
kapazitaet_wh=10000,
start_soc_prozent=20,
min_soc_prozent=20,
max_soc_prozent=80,
),
hours=1, hours=1,
start_soc_prozent=20,
min_soc_prozent=20,
max_soc_prozent=80,
) )
akku.reset() akku.reset()
print(f"Initial SoC: {akku.ladezustand_in_prozent()}%") print(f"Initial SoC: {akku.ladezustand_in_prozent()}%")

View File

@ -14,7 +14,7 @@ class HomeApplianceParameters(BaseModel):
class HomeAppliance: class HomeAppliance:
def __init__(self, parameters: HomeApplianceParameters, hours=None): def __init__(self, parameters: HomeApplianceParameters, hours: int = 24):
self.hours = hours # Total duration for which the planning is done self.hours = hours # Total duration for which the planning is done
self.consumption_wh = ( self.consumption_wh = (
parameters.consumption_wh parameters.consumption_wh
@ -22,7 +22,7 @@ class HomeAppliance:
self.duration_h = parameters.duration_h # Duration of use in hours self.duration_h = parameters.duration_h # Duration of use in hours
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
def set_starting_time(self, start_hour, global_start_hour=0): def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
"""Sets the start time of the device and generates the corresponding load curve. """Sets the start time of the device and generates the corresponding load curve.
:param start_hour: The hour at which the device should start. :param start_hour: The hour at which the device should start.
@ -40,15 +40,15 @@ class HomeAppliance:
# Set the power for the duration of use in the load curve array # Set the power for the duration of use in the load curve array
self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour
def reset(self): def reset(self) -> None:
"""Resets the load curve.""" """Resets the load curve."""
self.load_curve = np.zeros(self.hours) self.load_curve = np.zeros(self.hours)
def get_load_curve(self): def get_load_curve(self) -> np.ndarray:
"""Returns the current load curve.""" """Returns the current load curve."""
return self.load_curve return self.load_curve
def get_load_for_hour(self, hour): def get_load_for_hour(self, hour: int) -> float:
"""Returns the load for a specific hour. """Returns the load for a specific hour.
:param hour: The hour for which the load is queried. :param hour: The hour for which the load is queried.
@ -59,6 +59,6 @@ class HomeAppliance:
return self.load_curve[hour] return self.load_curve[hour]
def get_latest_starting_point(self): def get_latest_starting_point(self) -> int:
"""Returns the latest possible start time at which the device can still run completely.""" """Returns the latest possible start time at which the device can still run completely."""
return self.hours - self.duration_h return self.hours - self.duration_h

View File

@ -18,7 +18,7 @@ class Heatpump:
COP_COEFFICIENT = 0.1 COP_COEFFICIENT = 0.1
"""COP increase per degree""" """COP increase per degree"""
def __init__(self, max_heat_output, prediction_hours): def __init__(self, max_heat_output: int, prediction_hours: int):
self.max_heat_output = max_heat_output self.max_heat_output = max_heat_output
self.prediction_hours = prediction_hours self.prediction_hours = prediction_hours
self.log = logging.getLogger(__name__) self.log = logging.getLogger(__name__)

View File

@ -4,7 +4,7 @@ from akkudoktoreos.devices.battery import PVAkku
class WechselrichterParameters(BaseModel): class WechselrichterParameters(BaseModel):
max_leistung_wh: float = Field(10000, gt=0) max_leistung_wh: float = Field(default=10000, gt=0)
class Wechselrichter: class Wechselrichter:
@ -14,9 +14,11 @@ class Wechselrichter:
) )
self.akku = akku # Connection to a battery object self.akku = akku # Connection to a battery object
def energie_verarbeiten(self, erzeugung, verbrauch, hour): def energie_verarbeiten(
verluste = 0 # Losses during processing self, erzeugung: float, verbrauch: float, hour: int
netzeinspeisung = 0 # Grid feed-in ) -> tuple[float, float, float, float]:
verluste = 0.0 # Losses during processing
netzeinspeisung = 0.0 # Grid feed-in
netzbezug = 0.0 # Grid draw netzbezug = 0.0 # Grid draw
eigenverbrauch = 0.0 # Self-consumption eigenverbrauch = 0.0 # Self-consumption

View File

@ -7,13 +7,20 @@ from pydantic import BaseModel, Field, field_validator, model_validator
from typing_extensions import Self from typing_extensions import Self
from akkudoktoreos.config import AppConfig from akkudoktoreos.config import AppConfig
from akkudoktoreos.devices.battery import EAutoParameters, PVAkku, PVAkkuParameters from akkudoktoreos.devices.battery import (
EAutoParameters,
EAutoResult,
PVAkku,
PVAkkuParameters,
)
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
from akkudoktoreos.prediction.ems import ( from akkudoktoreos.prediction.ems import (
EnergieManagementSystem, EnergieManagementSystem,
EnergieManagementSystemParameters, EnergieManagementSystemParameters,
SimulationResult,
) )
from akkudoktoreos.utils.utils import NumpyEncoder
from akkudoktoreos.visualize import visualisiere_ergebnisse from akkudoktoreos.visualize import visualisiere_ergebnisse
@ -21,19 +28,20 @@ class OptimizationParameters(BaseModel):
ems: EnergieManagementSystemParameters ems: EnergieManagementSystemParameters
pv_akku: PVAkkuParameters pv_akku: PVAkkuParameters
wechselrichter: WechselrichterParameters = WechselrichterParameters() wechselrichter: WechselrichterParameters = WechselrichterParameters()
eauto: EAutoParameters eauto: Optional[EAutoParameters]
dishwasher: Optional[HomeApplianceParameters] = None dishwasher: Optional[HomeApplianceParameters] = None
temperature_forecast: list[float] = Field( temperature_forecast: Optional[list[float]] = Field(
"An array of floats representing the temperature forecast in degrees Celsius for different time intervals." default=None,
description="An array of floats representing the temperature forecast in degrees Celsius for different time intervals.",
) )
start_solution: Optional[list[float]] = Field( start_solution: Optional[list[float]] = Field(
None, description="Can be `null` or contain a previous solution (if available)." default=None, description="Can be `null` or contain a previous solution (if available)."
) )
@model_validator(mode="after") @model_validator(mode="after")
def validate_list_length(self) -> Self: def validate_list_length(self) -> Self:
arr_length = len(self.ems.pv_prognose_wh) arr_length = len(self.ems.pv_prognose_wh)
if arr_length != len(self.temperature_forecast): if self.temperature_forecast is not None and arr_length != len(self.temperature_forecast):
raise ValueError("Input lists have different lenghts") raise ValueError("Input lists have different lenghts")
return self return self
@ -46,100 +54,6 @@ class OptimizationParameters(BaseModel):
return start_solution return start_solution
class EAutoResult(BaseModel):
"""This object contains information related to the electric vehicle and its charging and discharging behavior."""
charge_array: list[float] = Field(
description="Indicates for each hour whether the EV is charging (`0` for no charging, `1` for charging)."
)
discharge_array: list[int] = Field(
description="Indicates for each hour whether the EV is discharging (`0` for no discharging, `1` for discharging)."
)
entlade_effizienz: float = Field(description="The discharge efficiency as a float.")
hours: int = Field("Amount of hours the simulation is done for.")
kapazitaet_wh: int = Field("The capacity of the EVs battery in watt-hours.")
lade_effizienz: float = Field("The charging efficiency as a float.")
max_ladeleistung_w: int = Field(description="The maximum charging power of the EV in watts.")
soc_wh: float = Field(
description="The state of charge of the battery in watt-hours at the start of the simulation."
)
start_soc_prozent: int = Field(
description="The state of charge of the battery in percentage at the start of the simulation."
)
class SimulationResult(BaseModel):
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
EAuto_SoC_pro_Stunde: list[Optional[float]] = Field(
description="The state of charge of the EV for each hour."
)
Einnahmen_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The revenue from grid feed-in or other sources in euros per hour."
)
Gesamt_Verluste: float = Field(
description="The total losses in watt-hours over the entire period."
)
Gesamtbilanz_Euro: float = Field(
description="The total balance of revenues minus costs in euros."
)
Gesamteinnahmen_Euro: float = Field(description="The total revenues in euros.")
Gesamtkosten_Euro: float = Field(description="The total costs in euros.")
Home_appliance_wh_per_hour: list[Optional[float]] = Field(
description="The energy consumption of a household appliance in watt-hours per hour."
)
Kosten_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The costs in euros per hour."
)
Netzbezug_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The grid energy drawn in watt-hours per hour."
)
Netzeinspeisung_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The energy fed into the grid in watt-hours per hour."
)
Verluste_Pro_Stunde: list[Optional[float]] = Field(
description="The losses in watt-hours per hour."
)
akku_soc_pro_stunde: list[Optional[float]] = Field(
description="The state of charge of the battery (not the EV) in percentage per hour."
)
# class SimulationData(BaseModel):
# """An object containing the simulated data."""
#
# Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
# EAuto_SoC_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated state of charge of the electric car per hour.",
# )
# Einnahmen_Euro_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated income in euros per hour."
# )
# Gesamt_Verluste: float = Field(description="The total simulated losses in watt-hours.")
# Gesamtbilanz_Euro: float = Field(description="The total simulated balance in euros.")
# Gesamteinnahmen_Euro: float = Field(description="The total simulated income in euros.")
# Gesamtkosten_Euro: float = Field(description="The total simulated costs in euros.")
# Home_appliance_wh_per_hour: list[Optional[float]] = Field(
# description="An array of floats representing the simulated energy consumption of a household appliance in watt-hours per hour."
# )
# Kosten_Euro_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated costs in euros per hour."
# )
# Netzbezug_Wh_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated grid consumption in watt-hours per hour."
# )
# Netzeinspeisung_Wh_pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated grid feed-in in watt-hours per hour."
# )
# Verluste_Pro_Stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated losses per hour."
# )
# akku_soc_pro_stunde: list[Optional[float]] = Field(
# description="An array of floats representing the simulated state of charge of the battery in percentage per hour."
# )
class OptimizeResponse(BaseModel): class OptimizeResponse(BaseModel):
"""**Note**: The first value of "Last_Wh_pro_Stunde", "Netzeinspeisung_Wh_pro_Stunde" and "Netzbezug_Wh_pro_Stunde", will be set to null in the JSON output and represented as NaN or None in the corresponding classes' data returns. This approach is adopted to ensure that the current hour's processing remains unchanged.""" """**Note**: The first value of "Last_Wh_pro_Stunde", "Netzeinspeisung_Wh_pro_Stunde" and "Netzbezug_Wh_pro_Stunde", will be set to null in the JSON output and represented as NaN or None in the corresponding classes' data returns. This approach is adopted to ensure that the current hour's processing remains unchanged."""
@ -152,17 +66,35 @@ class OptimizeResponse(BaseModel):
discharge_allowed: list[int] = Field( discharge_allowed: list[int] = Field(
description="Array with discharge values (1 for discharge, 0 otherwise)." description="Array with discharge values (1 for discharge, 0 otherwise)."
) )
eautocharge_hours_float: Optional[list[float]] = Field(description="TBD")
result: SimulationResult result: SimulationResult
eauto_obj: EAutoResult eauto_obj: Optional[EAutoResult]
start_solution: Optional[list[float]] = Field( start_solution: Optional[list[float]] = Field(
None, default=None,
description="An array of binary values (0 or 1) representing a possible starting solution for the simulation.", description="An array of binary values (0 or 1) representing a possible starting solution for the simulation.",
) )
washingstart: Optional[int] = Field( washingstart: Optional[int] = Field(
None, default=None,
description="Can be `null` or contain an object representing the start of washing (if applicable).", description="Can be `null` or contain an object representing the start of washing (if applicable).",
) )
# simulation_data: Optional[SimulationData] = None
@field_validator(
"ac_charge",
"dc_charge",
"discharge_allowed",
mode="before",
)
def convert_numpy(cls, field: Any) -> Any:
return NumpyEncoder.convert_numpy(field)[0]
@field_validator(
"eauto_obj",
mode="before",
)
def convert_eauto(cls, field: Any) -> Any:
if isinstance(field, PVAkku):
return EAutoResult(**field.to_dict())
return field
class optimization_problem: class optimization_problem:
@ -176,7 +108,7 @@ class optimization_problem:
self._config = config self._config = config
self.prediction_hours = config.eos.prediction_hours self.prediction_hours = config.eos.prediction_hours
self.strafe = config.eos.penalty self.strafe = config.eos.penalty
self.opti_param = None self.opti_param: dict[str, Any] = {}
self.fixed_eauto_hours = config.eos.prediction_hours - config.eos.optimization_hours self.fixed_eauto_hours = config.eos.prediction_hours - config.eos.optimization_hours
self.possible_charge_values = config.eos.available_charging_rates_in_percentage self.possible_charge_values = config.eos.available_charging_rates_in_percentage
self.verbose = verbose self.verbose = verbose
@ -189,7 +121,7 @@ class optimization_problem:
random.seed(fixed_seed) random.seed(fixed_seed)
def decode_charge_discharge( def decode_charge_discharge(
self, discharge_hours_bin: np.ndarray self, discharge_hours_bin: list[int]
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""Decode the input array `discharge_hours_bin` into three separate arrays for AC charging, DC charging, and discharge. """Decode the input array `discharge_hours_bin` into three separate arrays for AC charging, DC charging, and discharge.
@ -209,30 +141,32 @@ class optimization_problem:
- discharge (np.ndarray): Array with discharge values (1 for discharge, 0 otherwise). - discharge (np.ndarray): Array with discharge values (1 for discharge, 0 otherwise).
""" """
# Convert the input list to a NumPy array, if it's not already # Convert the input list to a NumPy array, if it's not already
discharge_hours_bin = np.array(discharge_hours_bin) discharge_hours_bin_np = np.array(discharge_hours_bin)
# Create ac_charge array: Only consider values between 2 and 6 (AC charging power levels), set the rest to 0 # Create ac_charge array: Only consider values between 2 and 6 (AC charging power levels), set the rest to 0
ac_charge = np.where( ac_charge = np.where(
(discharge_hours_bin >= 2) & (discharge_hours_bin <= 6), discharge_hours_bin - 1, 0 (discharge_hours_bin_np >= 2) & (discharge_hours_bin_np <= 6),
discharge_hours_bin_np - 1,
0,
) )
ac_charge = ac_charge / 5.0 # Normalize AC charge to range between 0 and 1 ac_charge = ac_charge / 5.0 # Normalize AC charge to range between 0 and 1
# Create dc_charge array: 7 = Not allowed (mapped to 0), 8 = Allowed (mapped to 1) # Create dc_charge array: 7 = Not allowed (mapped to 0), 8 = Allowed (mapped to 1)
# Create dc_charge array: Only if DC charge optimization is enabled # Create dc_charge array: Only if DC charge optimization is enabled
if self.optimize_dc_charge: if self.optimize_dc_charge:
dc_charge = np.where(discharge_hours_bin == 8, 1, 0) dc_charge = np.where(discharge_hours_bin_np == 8, 1, 0)
else: else:
dc_charge = np.ones_like( dc_charge = np.ones_like(
discharge_hours_bin discharge_hours_bin_np
) # Set DC charge to 0 if optimization is disabled ) # Set DC charge to 0 if optimization is disabled
# Create discharge array: Only consider value 1 (Discharge), set the rest to 0 (binary output) # Create discharge array: Only consider value 1 (Discharge), set the rest to 0 (binary output)
discharge = np.where(discharge_hours_bin == 1, 1, 0) discharge = np.where(discharge_hours_bin_np == 1, 1, 0)
return ac_charge, dc_charge, discharge return ac_charge, dc_charge, discharge
# Custom mutation function that applies type-specific mutations # Custom mutation function that applies type-specific mutations
def mutate(self, individual): def mutate(self, individual: list[int]) -> tuple[list[int]]:
"""Custom mutation function for the individual. """Custom mutation function for the individual.
This function mutates different parts of the individual: This function mutates different parts of the individual:
@ -298,7 +232,7 @@ class optimization_problem:
return (individual,) return (individual,)
# Method to create an individual based on the conditions # Method to create an individual based on the conditions
def create_individual(self): def create_individual(self) -> list[int]:
# Start with discharge states for the individual # Start with discharge states for the individual
individual_components = [ individual_components = [
self.toolbox.attr_discharge_state() for _ in range(self.prediction_hours) self.toolbox.attr_discharge_state() for _ in range(self.prediction_hours)
@ -317,8 +251,8 @@ class optimization_problem:
return creator.Individual(individual_components) return creator.Individual(individual_components)
def split_individual( def split_individual(
self, individual: list[float] self, individual: list[int]
) -> Tuple[list[int], list[float], Optional[int]]: ) -> tuple[list[int], Optional[list[int]], Optional[int]]:
"""Split the individual solution into its components. """Split the individual solution into its components.
Components: Components:
@ -327,18 +261,18 @@ class optimization_problem:
3. Dishwasher start time (integer if applicable). 3. Dishwasher start time (integer if applicable).
""" """
discharge_hours_bin = individual[: self.prediction_hours] discharge_hours_bin = individual[: self.prediction_hours]
eautocharge_hours_float = ( eautocharge_hours_index = (
individual[self.prediction_hours : self.prediction_hours * 2] individual[self.prediction_hours : self.prediction_hours * 2]
if self.optimize_ev if self.optimize_ev
else None else None
) )
washingstart_int = ( washingstart_int = (
individual[-1] int(individual[-1])
if self.opti_param and self.opti_param.get("home_appliance", 0) > 0 if self.opti_param and self.opti_param.get("home_appliance", 0) > 0
else None else None
) )
return discharge_hours_bin, eautocharge_hours_float, washingstart_int return discharge_hours_bin, eautocharge_hours_index, washingstart_int
def setup_deap_environment(self, opti_param: dict[str, Any], start_hour: int) -> None: def setup_deap_environment(self, opti_param: dict[str, Any], start_hour: int) -> None:
"""Set up the DEAP environment with fitness and individual creation rules.""" """Set up the DEAP environment with fitness and individual creation rules."""
@ -403,7 +337,7 @@ class optimization_problem:
self.toolbox.register("select", tools.selTournament, tournsize=3) self.toolbox.register("select", tools.selTournament, tournsize=3)
def evaluate_inner( def evaluate_inner(
self, individual: list[float], ems: EnergieManagementSystem, start_hour: int self, individual: list[int], ems: EnergieManagementSystem, start_hour: int
) -> dict[str, Any]: ) -> dict[str, Any]:
"""Simulates the energy management system (EMS) using the provided individual solution. """Simulates the energy management system (EMS) using the provided individual solution.
@ -413,7 +347,7 @@ class optimization_problem:
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual( discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
individual individual
) )
if self.opti_param.get("home_appliance", 0) > 0: if washingstart_int is not None:
ems.set_home_appliance_start(washingstart_int, global_start_hour=start_hour) ems.set_home_appliance_start(washingstart_int, global_start_hour=start_hour)
ac, dc, discharge = self.decode_charge_discharge(discharge_hours_bin) ac, dc, discharge = self.decode_charge_discharge(discharge_hours_bin)
@ -424,19 +358,19 @@ class optimization_problem:
ems.set_akku_dc_charge_hours(dc) ems.set_akku_dc_charge_hours(dc)
ems.set_akku_ac_charge_hours(ac) ems.set_akku_ac_charge_hours(ac)
if self.optimize_ev: if eautocharge_hours_index is not None:
eautocharge_hours_float = [ eautocharge_hours_float = [
self._config.eos.available_charging_rates_in_percentage[i] self._config.eos.available_charging_rates_in_percentage[i]
for i in eautocharge_hours_index for i in eautocharge_hours_index
] ]
ems.set_ev_charge_hours(eautocharge_hours_float) ems.set_ev_charge_hours(np.array(eautocharge_hours_float))
else: else:
ems.set_ev_charge_hours(np.full(self.prediction_hours, 0)) ems.set_ev_charge_hours(np.full(self.prediction_hours, 0))
return ems.simuliere(start_hour) return ems.simuliere(start_hour)
def evaluate( def evaluate(
self, self,
individual: list[float], individual: list[int],
ems: EnergieManagementSystem, ems: EnergieManagementSystem,
parameters: OptimizationParameters, parameters: OptimizationParameters,
start_hour: int, start_hour: int,
@ -450,7 +384,7 @@ class optimization_problem:
gesamtbilanz = o["Gesamtbilanz_Euro"] * (-1.0 if worst_case else 1.0) gesamtbilanz = o["Gesamtbilanz_Euro"] * (-1.0 if worst_case else 1.0)
discharge_hours_bin, eautocharge_hours_float, _ = self.split_individual(individual) discharge_hours_bin, eautocharge_hours_index, _ = self.split_individual(individual)
# Small Penalty for not discharging # Small Penalty for not discharging
gesamtbilanz += sum( gesamtbilanz += sum(
@ -460,13 +394,15 @@ class optimization_problem:
# Penalty for not meeting the minimum SOC (State of Charge) requirement # Penalty for not meeting the minimum SOC (State of Charge) requirement
# if parameters.eauto_min_soc_prozent - ems.eauto.ladezustand_in_prozent() <= 0.0 and self.optimize_ev: # if parameters.eauto_min_soc_prozent - ems.eauto.ladezustand_in_prozent() <= 0.0 and self.optimize_ev:
# gesamtbilanz += sum( # gesamtbilanz += sum(
# self.strafe for ladeleistung in eautocharge_hours_float if ladeleistung != 0.0 # self.strafe for ladeleistung in eautocharge_hours_index if ladeleistung != 0.0
# ) # )
individual.extra_data = ( individual.extra_data = ( # type: ignore[attr-defined]
o["Gesamtbilanz_Euro"], o["Gesamtbilanz_Euro"],
o["Gesamt_Verluste"], o["Gesamt_Verluste"],
parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent(), parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent()
if parameters.eauto and ems.eauto
else 0,
) )
# Adjust total balance with battery value and penalties for unmet SOC # Adjust total balance with battery value and penalties for unmet SOC
@ -478,7 +414,11 @@ class optimization_problem:
if self.optimize_ev: if self.optimize_ev:
gesamtbilanz += max( gesamtbilanz += max(
0, 0,
(parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent()) (
parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent()
if parameters.eauto and ems.eauto
else 0
)
* self.strafe, * self.strafe,
) )
@ -497,7 +437,7 @@ class optimization_problem:
print("Start optimize:", start_solution) print("Start optimize:", start_solution)
# Insert the start solution into the population if provided # Insert the start solution into the population if provided
if start_solution not in [None, -1]: if start_solution is not None:
for _ in range(3): for _ in range(3):
population.insert(0, creator.Individual(start_solution)) population.insert(0, creator.Individual(start_solution))
@ -515,7 +455,7 @@ class optimization_problem:
verbose=self.verbose, verbose=self.verbose,
) )
member = {"bilanz": [], "verluste": [], "nebenbedingung": []} member: dict[str, list[float]] = {"bilanz": [], "verluste": [], "nebenbedingung": []}
for ind in population: for ind in population:
if hasattr(ind, "extra_data"): if hasattr(ind, "extra_data"):
extra_value1, extra_value2, extra_value3 = ind.extra_data extra_value1, extra_value2, extra_value3 = ind.extra_data
@ -528,12 +468,10 @@ class optimization_problem:
def optimierung_ems( def optimierung_ems(
self, self,
parameters: OptimizationParameters, parameters: OptimizationParameters,
start_hour: Optional[int] = None, start_hour: int,
worst_case: bool = False, worst_case: bool = False,
startdate: Optional[Any] = None, # startdate is not used!
*,
ngen: int = 600, ngen: int = 600,
) -> dict[str, Any]: ) -> OptimizeResponse:
"""Perform EMS (Energy Management System) optimization and visualize results.""" """Perform EMS (Energy Management System) optimization and visualize results."""
einspeiseverguetung_euro_pro_wh = np.full( einspeiseverguetung_euro_pro_wh = np.full(
self.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh self.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
@ -546,16 +484,19 @@ class optimization_problem:
) )
akku.set_charge_per_hour(np.full(self.prediction_hours, 1)) akku.set_charge_per_hour(np.full(self.prediction_hours, 1))
self.optimize_ev = True eauto: Optional[PVAkku] = None
if parameters.eauto.min_soc_prozent - parameters.eauto.start_soc_prozent < 0: if parameters.eauto:
eauto = PVAkku(
parameters.eauto,
hours=self.prediction_hours,
)
eauto.set_charge_per_hour(np.full(self.prediction_hours, 1))
self.optimize_ev = (
parameters.eauto.min_soc_prozent - parameters.eauto.start_soc_prozent >= 0
)
else:
self.optimize_ev = False self.optimize_ev = False
eauto = PVAkku(
parameters.eauto,
hours=self.prediction_hours,
)
eauto.set_charge_per_hour(np.full(self.prediction_hours, 1))
# Initialize household appliance if applicable # Initialize household appliance if applicable
dishwasher = ( dishwasher = (
HomeAppliance( HomeAppliance(
@ -571,9 +512,9 @@ class optimization_problem:
ems = EnergieManagementSystem( ems = EnergieManagementSystem(
self._config.eos, self._config.eos,
parameters.ems, parameters.ems,
wechselrichter=wr,
eauto=eauto, eauto=eauto,
home_appliance=dishwasher, home_appliance=dishwasher,
wechselrichter=wr,
) )
# Setup the DEAP environment and optimization process # Setup the DEAP environment and optimization process
@ -586,14 +527,17 @@ class optimization_problem:
# Perform final evaluation on the best solution # Perform final evaluation on the best solution
o = self.evaluate_inner(start_solution, ems, start_hour) o = self.evaluate_inner(start_solution, ems, start_hour)
discharge_hours_bin, eautocharge_hours_float, washingstart_int = self.split_individual( discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
start_solution start_solution
) )
if self.optimize_ev: eautocharge_hours_float = (
eautocharge_hours_float = [ [
self._config.eos.available_charging_rates_in_percentage[i] self._config.eos.available_charging_rates_in_percentage[i]
for i in eautocharge_hours_float for i in eautocharge_hours_index
] ]
if eautocharge_hours_index is not None
else None
)
ac_charge, dc_charge, discharge = self.decode_charge_discharge(discharge_hours_bin) ac_charge, dc_charge, discharge = self.decode_charge_discharge(discharge_hours_bin)
# Visualize the results # Visualize the results
@ -612,43 +556,15 @@ class optimization_problem:
extra_data=extra_data, extra_data=extra_data,
) )
# List output keys where the first element needs to be changed to None return OptimizeResponse(
keys_to_modify = [ **{
"Last_Wh_pro_Stunde", "ac_charge": ac_charge,
"Netzeinspeisung_Wh_pro_Stunde", "dc_charge": dc_charge,
"akku_soc_pro_stunde", "discharge_allowed": discharge,
"Netzbezug_Wh_pro_Stunde", "eautocharge_hours_float": eautocharge_hours_float,
"Kosten_Euro_pro_Stunde", "result": SimulationResult(**o),
"Einnahmen_Euro_pro_Stunde", "eauto_obj": ems.eauto,
"EAuto_SoC_pro_Stunde", "start_solution": start_solution,
"Verluste_Pro_Stunde", "washingstart": washingstart_int,
"Home_appliance_wh_per_hour", }
] )
# Loop through each key in the list
for key in keys_to_modify:
# Convert the NumPy array to a list
element_list = o[key].tolist()
# Change the first value to None
# element_list[0] = None
# Change the NaN to None (JSON)
element_list = [
None if isinstance(x, (int, float)) and np.isnan(x) else x for x in element_list
]
# Assign the modified list back to the dictionary
o[key] = element_list
# Return final results as a dictionary
return {
"ac_charge": ac_charge.tolist(),
"dc_charge": dc_charge.tolist(),
"discharge_allowed": discharge.tolist(),
"eautocharge_hours_float": eautocharge_hours_float,
"result": o,
"eauto_obj": ems.eauto.to_dict(),
"start_solution": start_solution,
"washingstart": washingstart_int,
# "simulation_data": o,
}

View File

@ -1,14 +1,15 @@
from datetime import datetime from datetime import datetime
from typing import Dict, List, Optional, Union from typing import Any, Dict, Optional, Union
import numpy as np import numpy as np
from pydantic import BaseModel, Field, model_validator from pydantic import BaseModel, Field, field_validator, model_validator
from typing_extensions import Self from typing_extensions import Self
from akkudoktoreos.config import EOSConfig from akkudoktoreos.config import EOSConfig
from akkudoktoreos.devices.battery import PVAkku from akkudoktoreos.devices.battery import PVAkku
from akkudoktoreos.devices.generic import HomeAppliance from akkudoktoreos.devices.generic import HomeAppliance
from akkudoktoreos.devices.inverter import Wechselrichter from akkudoktoreos.devices.inverter import Wechselrichter
from akkudoktoreos.utils.utils import NumpyEncoder
class EnergieManagementSystemParameters(BaseModel): class EnergieManagementSystemParameters(BaseModel):
@ -41,14 +42,67 @@ class EnergieManagementSystemParameters(BaseModel):
return self return self
class SimulationResult(BaseModel):
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
EAuto_SoC_pro_Stunde: list[Optional[float]] = Field(
description="The state of charge of the EV for each hour."
)
Einnahmen_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The revenue from grid feed-in or other sources in euros per hour."
)
Gesamt_Verluste: float = Field(
description="The total losses in watt-hours over the entire period."
)
Gesamtbilanz_Euro: float = Field(
description="The total balance of revenues minus costs in euros."
)
Gesamteinnahmen_Euro: float = Field(description="The total revenues in euros.")
Gesamtkosten_Euro: float = Field(description="The total costs in euros.")
Home_appliance_wh_per_hour: list[Optional[float]] = Field(
description="The energy consumption of a household appliance in watt-hours per hour."
)
Kosten_Euro_pro_Stunde: list[Optional[float]] = Field(
description="The costs in euros per hour."
)
Netzbezug_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The grid energy drawn in watt-hours per hour."
)
Netzeinspeisung_Wh_pro_Stunde: list[Optional[float]] = Field(
description="The energy fed into the grid in watt-hours per hour."
)
Verluste_Pro_Stunde: list[Optional[float]] = Field(
description="The losses in watt-hours per hour."
)
akku_soc_pro_stunde: list[Optional[float]] = Field(
description="The state of charge of the battery (not the EV) in percentage per hour."
)
@field_validator(
"Last_Wh_pro_Stunde",
"Netzeinspeisung_Wh_pro_Stunde",
"akku_soc_pro_stunde",
"Netzbezug_Wh_pro_Stunde",
"Kosten_Euro_pro_Stunde",
"Einnahmen_Euro_pro_Stunde",
"EAuto_SoC_pro_Stunde",
"Verluste_Pro_Stunde",
"Home_appliance_wh_per_hour",
mode="before",
)
def convert_numpy(cls, field: Any) -> Any:
return NumpyEncoder.convert_numpy(field)[0]
class EnergieManagementSystem: class EnergieManagementSystem:
def __init__( def __init__(
self, self,
config: EOSConfig, config: EOSConfig,
parameters: EnergieManagementSystemParameters, parameters: EnergieManagementSystemParameters,
wechselrichter: Wechselrichter,
eauto: Optional[PVAkku] = None, eauto: Optional[PVAkku] = None,
home_appliance: Optional[HomeAppliance] = None, home_appliance: Optional[HomeAppliance] = None,
wechselrichter: Optional[Wechselrichter] = None,
): ):
self.akku = wechselrichter.akku self.akku = wechselrichter.akku
self.gesamtlast = np.array(parameters.gesamtlast, float) self.gesamtlast = np.array(parameters.gesamtlast, float)
@ -66,7 +120,7 @@ class EnergieManagementSystem:
self.dc_charge_hours = np.full(config.prediction_hours, 1) self.dc_charge_hours = np.full(config.prediction_hours, 1)
self.ev_charge_hours = np.full(config.prediction_hours, 0) self.ev_charge_hours = np.full(config.prediction_hours, 0)
def set_akku_discharge_hours(self, ds: List[int]) -> None: def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
self.akku.set_discharge_per_hour(ds) self.akku.set_discharge_per_hour(ds)
def set_akku_ac_charge_hours(self, ds: np.ndarray) -> None: def set_akku_ac_charge_hours(self, ds: np.ndarray) -> None:
@ -75,22 +129,24 @@ class EnergieManagementSystem:
def set_akku_dc_charge_hours(self, ds: np.ndarray) -> None: def set_akku_dc_charge_hours(self, ds: np.ndarray) -> None:
self.dc_charge_hours = ds self.dc_charge_hours = ds
def set_ev_charge_hours(self, ds: List[int]) -> None: def set_ev_charge_hours(self, ds: np.ndarray) -> None:
self.ev_charge_hours = ds self.ev_charge_hours = ds
def set_home_appliance_start(self, ds: List[int], global_start_hour: int = 0) -> None: def set_home_appliance_start(self, start_hour: int, global_start_hour: int = 0) -> None:
self.home_appliance.set_starting_time(ds, global_start_hour=global_start_hour) assert self.home_appliance is not None
self.home_appliance.set_starting_time(start_hour, global_start_hour=global_start_hour)
def reset(self) -> None: def reset(self) -> None:
self.eauto.reset() if self.eauto:
self.eauto.reset()
self.akku.reset() self.akku.reset()
def simuliere_ab_jetzt(self) -> dict: def simuliere_ab_jetzt(self) -> dict[str, Any]:
jetzt = datetime.now() jetzt = datetime.now()
start_stunde = jetzt.hour start_stunde = jetzt.hour
return self.simuliere(start_stunde) return self.simuliere(start_stunde)
def simuliere(self, start_stunde: int) -> dict: def simuliere(self, start_stunde: int) -> dict[str, Any]:
"""hour. """hour.
akku_soc_pro_stunde begin of the hour, initial hour state! akku_soc_pro_stunde begin of the hour, initial hour state!

View File

@ -2,11 +2,13 @@ import numpy as np
class Gesamtlast: class Gesamtlast:
def __init__(self, prediction_hours=24): def __init__(self, prediction_hours: int = 24):
self.lasten = {} # Contains names and load arrays for different sources self.lasten: dict[
str, np.ndarray
] = {} # Contains names and load arrays for different sources
self.prediction_hours = prediction_hours self.prediction_hours = prediction_hours
def hinzufuegen(self, name, last_array): def hinzufuegen(self, name: str, last_array: np.ndarray) -> None:
"""Adds an array of loads for a specific source. """Adds an array of loads for a specific source.
:param name: Name of the load source (e.g., "Household", "Heat Pump") :param name: Name of the load source (e.g., "Household", "Heat Pump")
@ -16,13 +18,13 @@ class Gesamtlast:
raise ValueError(f"Total load inconsistent lengths in arrays: {name} {len(last_array)}") raise ValueError(f"Total load inconsistent lengths in arrays: {name} {len(last_array)}")
self.lasten[name] = last_array self.lasten[name] = last_array
def gesamtlast_berechnen(self): def gesamtlast_berechnen(self) -> np.ndarray:
"""Calculates the total load for each hour and returns an array of total loads. """Calculates the total load for each hour and returns an array of total loads.
:return: Array of total loads, where each entry corresponds to an hour :return: Array of total loads, where each entry corresponds to an hour
""" """
if not self.lasten: if not self.lasten:
return [] return np.ndarray(0)
# Assumption: All load arrays have the same length # Assumption: All load arrays have the same length
stunden = len(next(iter(self.lasten.values()))) stunden = len(next(iter(self.lasten.values())))

View File

@ -1,28 +1,30 @@
from datetime import datetime
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from sklearn.metrics import mean_squared_error, r2_score from sklearn.metrics import mean_squared_error, r2_score
from akkudoktoreos.prediction.load_forecast import LoadForecast
class LoadPredictionAdjuster: class LoadPredictionAdjuster:
def __init__(self, measured_data, predicted_data, load_forecast): def __init__(
self, measured_data: pd.DataFrame, predicted_data: pd.DataFrame, load_forecast: LoadForecast
):
self.measured_data = measured_data self.measured_data = measured_data
self.predicted_data = predicted_data self.predicted_data = predicted_data
self.load_forecast = load_forecast self.load_forecast = load_forecast
self.merged_data = self._merge_data() self.merged_data = self._merge_data()
self.train_data = None
self.test_data = None
self.weekday_diff = None
self.weekend_diff = None
def _remove_outliers(self, data, threshold=2): def _remove_outliers(self, data: pd.DataFrame, threshold: int = 2) -> pd.DataFrame:
# Calculate the Z-Score of the 'Last' data # Calculate the Z-Score of the 'Last' data
data["Z-Score"] = np.abs((data["Last"] - data["Last"].mean()) / data["Last"].std()) data["Z-Score"] = np.abs((data["Last"] - data["Last"].mean()) / data["Last"].std())
# Filter the data based on the threshold # Filter the data based on the threshold
filtered_data = data[data["Z-Score"] < threshold] filtered_data = data[data["Z-Score"] < threshold]
return filtered_data.drop(columns=["Z-Score"]) return filtered_data.drop(columns=["Z-Score"])
def _merge_data(self): def _merge_data(self) -> pd.DataFrame:
# Convert the time column in both DataFrames to datetime # Convert the time column in both DataFrames to datetime
self.predicted_data["time"] = pd.to_datetime(self.predicted_data["time"]) self.predicted_data["time"] = pd.to_datetime(self.predicted_data["time"])
self.measured_data["time"] = pd.to_datetime(self.measured_data["time"]) self.measured_data["time"] = pd.to_datetime(self.measured_data["time"])
@ -47,7 +49,9 @@ class LoadPredictionAdjuster:
merged_data["DayOfWeek"] = merged_data["time"].dt.dayofweek merged_data["DayOfWeek"] = merged_data["time"].dt.dayofweek
return merged_data return merged_data
def calculate_weighted_mean(self, train_period_weeks=9, test_period_weeks=1): def calculate_weighted_mean(
self, train_period_weeks: int = 9, test_period_weeks: int = 1
) -> None:
self.merged_data = self._remove_outliers(self.merged_data) self.merged_data = self._remove_outliers(self.merged_data)
train_end_date = self.merged_data["time"].max() - pd.Timedelta(weeks=test_period_weeks) train_end_date = self.merged_data["time"].max() - pd.Timedelta(weeks=test_period_weeks)
train_start_date = train_end_date - pd.Timedelta(weeks=train_period_weeks) train_start_date = train_end_date - pd.Timedelta(weeks=train_period_weeks)
@ -79,27 +83,27 @@ class LoadPredictionAdjuster:
weekends_train_data.groupby("Hour").apply(self._weighted_mean_diff).dropna() weekends_train_data.groupby("Hour").apply(self._weighted_mean_diff).dropna()
) )
def _weighted_mean_diff(self, data): def _weighted_mean_diff(self, data: pd.DataFrame) -> float:
train_end_date = self.train_data["time"].max() train_end_date = self.train_data["time"].max()
weights = 1 / (train_end_date - data["time"]).dt.days.replace(0, np.nan) weights = 1 / (train_end_date - data["time"]).dt.days.replace(0, np.nan)
weighted_mean = (data["Difference"] * weights).sum() / weights.sum() weighted_mean = (data["Difference"] * weights).sum() / weights.sum()
return weighted_mean return weighted_mean
def adjust_predictions(self): def adjust_predictions(self) -> None:
self.train_data["Adjusted Pred"] = self.train_data.apply(self._adjust_row, axis=1) self.train_data["Adjusted Pred"] = self.train_data.apply(self._adjust_row, axis=1)
self.test_data["Adjusted Pred"] = self.test_data.apply(self._adjust_row, axis=1) self.test_data["Adjusted Pred"] = self.test_data.apply(self._adjust_row, axis=1)
def _adjust_row(self, row): def _adjust_row(self, row: pd.Series) -> pd.Series:
if row["DayOfWeek"] < 5: if row["DayOfWeek"] < 5:
return row["Last Pred"] + self.weekday_diff.get(row["Hour"], 0) return row["Last Pred"] + self.weekday_diff.get(row["Hour"], 0)
else: else:
return row["Last Pred"] + self.weekend_diff.get(row["Hour"], 0) return row["Last Pred"] + self.weekend_diff.get(row["Hour"], 0)
def plot_results(self): def plot_results(self) -> None:
self._plot_data(self.train_data, "Training") self._plot_data(self.train_data, "Training")
self._plot_data(self.test_data, "Testing") self._plot_data(self.test_data, "Testing")
def _plot_data(self, data, data_type): def _plot_data(self, data: pd.DataFrame, data_type: str) -> None:
plt.figure(figsize=(14, 7)) plt.figure(figsize=(14, 7))
plt.plot(data["time"], data["Last"], label=f"Actual Last - {data_type}", color="blue") plt.plot(data["time"], data["Last"], label=f"Actual Last - {data_type}", color="blue")
plt.plot( plt.plot(
@ -123,13 +127,13 @@ class LoadPredictionAdjuster:
plt.grid(True) plt.grid(True)
plt.show() plt.show()
def evaluate_model(self): def evaluate_model(self) -> None:
mse = mean_squared_error(self.test_data["Last"], self.test_data["Adjusted Pred"]) mse = mean_squared_error(self.test_data["Last"], self.test_data["Adjusted Pred"])
r2 = r2_score(self.test_data["Last"], self.test_data["Adjusted Pred"]) r2 = r2_score(self.test_data["Last"], self.test_data["Adjusted Pred"])
print(f"Mean Squared Error: {mse}") print(f"Mean Squared Error: {mse}")
print(f"R-squared: {r2}") print(f"R-squared: {r2}")
def predict_next_hours(self, hours_ahead): def predict_next_hours(self, hours_ahead: int) -> pd.DataFrame:
last_date = self.merged_data["time"].max() last_date = self.merged_data["time"].max()
future_dates = [last_date + pd.Timedelta(hours=i) for i in range(1, hours_ahead + 1)] future_dates = [last_date + pd.Timedelta(hours=i) for i in range(1, hours_ahead + 1)]
future_df = pd.DataFrame({"time": future_dates}) future_df = pd.DataFrame({"time": future_dates})
@ -139,7 +143,7 @@ class LoadPredictionAdjuster:
future_df["Adjusted Pred"] = future_df.apply(self._adjust_row, axis=1) future_df["Adjusted Pred"] = future_df.apply(self._adjust_row, axis=1)
return future_df return future_df
def _forecast_next_hours(self, timestamp): def _forecast_next_hours(self, timestamp: datetime) -> float:
date_str = timestamp.strftime("%Y-%m-%d") date_str = timestamp.strftime("%Y-%m-%d")
hour = timestamp.hour hour = timestamp.hour
daily_forecast = self.load_forecast.get_daily_stats(date_str) daily_forecast = self.load_forecast.get_daily_stats(date_str)

View File

@ -1,4 +1,5 @@
from datetime import datetime from datetime import datetime
from pathlib import Path
import numpy as np import numpy as np
@ -6,14 +7,12 @@ import numpy as np
class LoadForecast: class LoadForecast:
def __init__(self, filepath=None, year_energy=None): def __init__(self, filepath: str | Path, year_energy: float):
self.filepath = filepath self.filepath = filepath
self.data = None
self.data_year_energy = None
self.year_energy = year_energy self.year_energy = year_energy
self.load_data() self.load_data()
def get_daily_stats(self, date_str): def get_daily_stats(self, date_str: str) -> np.ndarray:
"""Returns the 24-hour profile with mean and standard deviation for a given date. """Returns the 24-hour profile with mean and standard deviation for a given date.
:param date_str: Date as a string in the format "YYYY-MM-DD" :param date_str: Date as a string in the format "YYYY-MM-DD"
@ -29,7 +28,7 @@ class LoadForecast:
daily_stats = self.data_year_energy[day_of_year - 1] # -1 because indexing starts at 0 daily_stats = self.data_year_energy[day_of_year - 1] # -1 because indexing starts at 0
return daily_stats return daily_stats
def get_hourly_stats(self, date_str, hour): def get_hourly_stats(self, date_str: str, hour: int) -> np.ndarray:
"""Returns the mean and standard deviation for a specific hour of a given date. """Returns the mean and standard deviation for a specific hour of a given date.
:param date_str: Date as a string in the format "YYYY-MM-DD" :param date_str: Date as a string in the format "YYYY-MM-DD"
@ -47,7 +46,7 @@ class LoadForecast:
return hourly_stats return hourly_stats
def get_stats_for_date_range(self, start_date_str, end_date_str): def get_stats_for_date_range(self, start_date_str: str, end_date_str: str) -> np.ndarray:
"""Returns the means and standard deviations for a date range. """Returns the means and standard deviations for a date range.
:param start_date_str: Start date as a string in the format "YYYY-MM-DD" :param start_date_str: Start date as a string in the format "YYYY-MM-DD"
@ -69,7 +68,7 @@ class LoadForecast:
stats_for_range = stats_for_range.reshape(stats_for_range.shape[0], -1) stats_for_range = stats_for_range.reshape(stats_for_range.shape[0], -1)
return stats_for_range return stats_for_range
def load_data(self): def load_data(self) -> None:
"""Loads data from the specified file.""" """Loads data from the specified file."""
try: try:
data = np.load(self.filepath) data = np.load(self.filepath)
@ -81,11 +80,12 @@ class LoadForecast:
except Exception as e: except Exception as e:
print(f"An error occurred while loading data: {e}") print(f"An error occurred while loading data: {e}")
def get_price_data(self): def get_price_data(self) -> None:
"""Returns price data (currently not implemented).""" """Returns price data (currently not implemented)."""
return self.price_data raise NotImplementedError
# return self.price_data
def _convert_to_datetime(self, date_str): def _convert_to_datetime(self, date_str: str) -> datetime:
"""Converts a date string to a datetime object.""" """Converts a date string to a datetime object."""
return datetime.strptime(date_str, "%Y-%m-%d") return datetime.strptime(date_str, "%Y-%m-%d")

View File

@ -3,6 +3,7 @@ import json
import zoneinfo import zoneinfo
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from pathlib import Path from pathlib import Path
from typing import Any, Sequence
import numpy as np import numpy as np
import requests import requests
@ -10,7 +11,7 @@ import requests
from akkudoktoreos.config import AppConfig, SetupIncomplete from akkudoktoreos.config import AppConfig, SetupIncomplete
def repeat_to_shape(array, target_shape): def repeat_to_shape(array: np.ndarray, target_shape: Sequence[int]) -> np.ndarray:
# Check if the array fits the target shape # Check if the array fits the target shape
if len(target_shape) != array.ndim: if len(target_shape) != array.ndim:
raise ValueError("Array and target shape must have the same number of dimensions") raise ValueError("Array and target shape must have the same number of dimensions")
@ -25,7 +26,11 @@ def repeat_to_shape(array, target_shape):
class HourlyElectricityPriceForecast: class HourlyElectricityPriceForecast:
def __init__( def __init__(
self, source: str | Path, config: AppConfig, charges=0.000228, use_cache=True self,
source: str | Path,
config: AppConfig,
charges: float = 0.000228,
use_cache: bool = True,
): # 228 ): # 228
self.cache_dir = config.working_dir / config.directories.cache self.cache_dir = config.working_dir / config.directories.cache
self.use_cache = use_cache self.use_cache = use_cache
@ -37,7 +42,7 @@ class HourlyElectricityPriceForecast:
self.charges = charges self.charges = charges
self.prediction_hours = config.eos.prediction_hours self.prediction_hours = config.eos.prediction_hours
def load_data(self, source: str | Path): def load_data(self, source: str | Path) -> list[dict[str, Any]]:
cache_file = self.get_cache_file(source) cache_file = self.get_cache_file(source)
if isinstance(source, str): if isinstance(source, str):
if cache_file.is_file() and not self.is_cache_expired() and self.use_cache: if cache_file.is_file() and not self.is_cache_expired() and self.use_cache:
@ -61,12 +66,14 @@ class HourlyElectricityPriceForecast:
raise ValueError(f"Input is not a valid path: {source}") raise ValueError(f"Input is not a valid path: {source}")
return json_data["values"] return json_data["values"]
def get_cache_file(self, url): def get_cache_file(self, url: str | Path) -> Path:
if isinstance(url, Path):
url = str(url)
hash_object = hashlib.sha256(url.encode()) hash_object = hashlib.sha256(url.encode())
hex_dig = hash_object.hexdigest() hex_dig = hash_object.hexdigest()
return self.cache_dir / f"cache_{hex_dig}.json" return self.cache_dir / f"cache_{hex_dig}.json"
def is_cache_expired(self): def is_cache_expired(self) -> bool:
if not self.cache_time_file.is_file(): if not self.cache_time_file.is_file():
return True return True
with self.cache_time_file.open("r") as file: with self.cache_time_file.open("r") as file:
@ -74,11 +81,11 @@ class HourlyElectricityPriceForecast:
last_cache_time = datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S") last_cache_time = datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S")
return datetime.now() - last_cache_time > timedelta(hours=1) return datetime.now() - last_cache_time > timedelta(hours=1)
def update_cache_timestamp(self): def update_cache_timestamp(self) -> None:
with self.cache_time_file.open("w") as file: with self.cache_time_file.open("w") as file:
file.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) file.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
def get_price_for_date(self, date_str): def get_price_for_date(self, date_str: str) -> np.ndarray:
"""Returns all prices for the specified date, including the price from 00:00 of the previous day.""" """Returns all prices for the specified date, including the price from 00:00 of the previous day."""
# Convert date string to datetime object # Convert date string to datetime object
date_obj = datetime.strptime(date_str, "%Y-%m-%d") date_obj = datetime.strptime(date_str, "%Y-%m-%d")
@ -108,7 +115,7 @@ class HourlyElectricityPriceForecast:
return np.array(date_prices) / (1000.0 * 100.0) + self.charges return np.array(date_prices) / (1000.0 * 100.0) + self.charges
def get_price_for_daterange(self, start_date_str, end_date_str): def get_price_for_daterange(self, start_date_str: str, end_date_str: str) -> np.ndarray:
"""Returns all prices between the start and end dates.""" """Returns all prices between the start and end dates."""
print(start_date_str) print(start_date_str)
print(end_date_str) print(end_date_str)
@ -117,7 +124,7 @@ class HourlyElectricityPriceForecast:
start_date = start_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin")) start_date = start_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin"))
end_date = end_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin")) end_date = end_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin"))
price_list = [] price_list: list[float] = []
while start_date < end_date: while start_date < end_date:
date_str = start_date.strftime("%Y-%m-%d") date_str = start_date.strftime("%Y-%m-%d")
@ -127,8 +134,10 @@ class HourlyElectricityPriceForecast:
price_list.extend(daily_prices) price_list.extend(daily_prices)
start_date += timedelta(days=1) start_date += timedelta(days=1)
price_list_np = np.array(price_list)
# If prediction hours are greater than 0, reshape the price list # If prediction hours are greater than 0, reshape the price list
if self.prediction_hours > 0: if self.prediction_hours > 0:
price_list = repeat_to_shape(np.array(price_list), (self.prediction_hours,)) price_list_np = repeat_to_shape(price_list_np, (self.prediction_hours,))
return price_list return price_list_np

View File

@ -21,7 +21,7 @@ Example:
) )
# Update the AC power measurement for a specific date and time # Update the AC power measurement for a specific date and time
forecast.update_ac_power_measurement(date_time=datetime.now(), ac_power_measurement=1000) forecast.update_ac_power_measurement(ac_power_measurement=1000, date_time=datetime.now())
# Print the forecast data with DC and AC power details # Print the forecast data with DC and AC power details
forecast.print_ac_power_and_measurement() forecast.print_ac_power_and_measurement()
@ -36,7 +36,8 @@ Attributes:
import json import json
from datetime import date, datetime from datetime import date, datetime
from typing import List, Optional, Union from pathlib import Path
from typing import Any, List, Optional, Union
import numpy as np import numpy as np
import pandas as pd import pandas as pd
@ -89,21 +90,20 @@ class AkkudoktorForecast(BaseModel):
values: List[List[AkkudoktorForecastValue]] values: List[List[AkkudoktorForecastValue]]
def validate_pv_forecast_data(data) -> str: def validate_pv_forecast_data(data: dict[str, Any]) -> Optional[str]:
"""Validate PV forecast data.""" """Validate PV forecast data."""
data_type = None
error_msg = ""
try: try:
AkkudoktorForecast.model_validate(data) AkkudoktorForecast.model_validate(data)
data_type = "Akkudoktor" data_type = "Akkudoktor"
except ValidationError as e: except ValidationError as e:
error_msg = ""
for error in e.errors(): for error in e.errors():
field = " -> ".join(str(x) for x in error["loc"]) field = " -> ".join(str(x) for x in error["loc"])
message = error["msg"] message = error["msg"]
error_type = error["type"] error_type = error["type"]
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n" error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
logger.debug(f"Validation did not succeed: {error_msg}") logger.debug(f"Validation did not succeed: {error_msg}")
return None
return data_type return data_type
@ -167,7 +167,7 @@ class ForecastData:
""" """
return self.dc_power return self.dc_power
def ac_power_measurement(self) -> float: def get_ac_power_measurement(self) -> Optional[float]:
"""Returns the measured AC power. """Returns the measured AC power.
It returns the measured AC power if available; otherwise None. It returns the measured AC power if available; otherwise None.
@ -191,7 +191,7 @@ class ForecastData:
else: else:
return self.ac_power return self.ac_power
def get_windspeed_10m(self) -> float: def get_windspeed_10m(self) -> Optional[float]:
"""Returns the wind speed at 10 meters altitude. """Returns the wind speed at 10 meters altitude.
Returns: Returns:
@ -199,7 +199,7 @@ class ForecastData:
""" """
return self.windspeed_10m return self.windspeed_10m
def get_temperature(self) -> float: def get_temperature(self) -> Optional[float]:
"""Returns the temperature. """Returns the temperature.
Returns: Returns:
@ -227,10 +227,10 @@ class PVForecast:
def __init__( def __init__(
self, self,
data: Optional[dict] = None, data: Optional[dict[str, Any]] = None,
filepath: Optional[str] = None, filepath: Optional[str | Path] = None,
url: Optional[str] = None, url: Optional[str] = None,
forecast_start: Union[datetime, date, str, int, float] = None, forecast_start: Union[datetime, date, str, int, float, None] = None,
prediction_hours: Optional[int] = None, prediction_hours: Optional[int] = None,
): ):
"""Initializes a `PVForecast` instance. """Initializes a `PVForecast` instance.
@ -253,16 +253,15 @@ class PVForecast:
Example: Example:
forecast = PVForecast(data=my_forecast_data, forecast_start="2024-10-13", prediction_hours=72) forecast = PVForecast(data=my_forecast_data, forecast_start="2024-10-13", prediction_hours=72)
""" """
self.meta = {} self.meta: dict[str, Any] = {}
self.forecast_data = [] self.forecast_data: list[ForecastData] = []
self.current_measurement = None self.current_measurement: Optional[float] = None
self.data = data self.data = data
self.filepath = filepath self.filepath = filepath
self.url = url self.url = url
self._forecast_start: Optional[datetime] = None
if forecast_start: if forecast_start:
self._forecast_start = to_datetime(forecast_start, to_naiv=True, to_maxtime=False) self._forecast_start = to_datetime(forecast_start, to_naiv=True, to_maxtime=False)
else:
self._forecast_start = None
self.prediction_hours = prediction_hours self.prediction_hours = prediction_hours
self._tz_name = None self._tz_name = None
@ -277,8 +276,8 @@ class PVForecast:
def update_ac_power_measurement( def update_ac_power_measurement(
self, self,
ac_power_measurement: float,
date_time: Union[datetime, date, str, int, float, None] = None, date_time: Union[datetime, date, str, int, float, None] = None,
ac_power_measurement=None,
) -> bool: ) -> bool:
"""Updates the AC power measurement for a specific time. """Updates the AC power measurement for a specific time.
@ -309,10 +308,10 @@ class PVForecast:
def process_data( def process_data(
self, self,
data: Optional[dict] = None, data: Optional[dict[str, Any]] = None,
filepath: Optional[str] = None, filepath: Optional[str | Path] = None,
url: Optional[str] = None, url: Optional[str] = None,
forecast_start: Union[datetime, date, str, int, float] = None, forecast_start: Union[datetime, date, str, int, float, None] = None,
prediction_hours: Optional[int] = None, prediction_hours: Optional[int] = None,
) -> None: ) -> None:
"""Processes the forecast data from the provided source (in-memory `data`, `filepath`, or `url`). """Processes the forecast data from the provided source (in-memory `data`, `filepath`, or `url`).
@ -368,6 +367,7 @@ class PVForecast:
) # Invalid path ) # Invalid path
else: else:
raise ValueError("No prediction input data available.") raise ValueError("No prediction input data available.")
assert data is not None # make mypy happy
# Validate input data to be of a known format # Validate input data to be of a known format
data_format = validate_pv_forecast_data(data) data_format = validate_pv_forecast_data(data)
if data_format != "Akkudoktor": if data_format != "Akkudoktor":
@ -390,7 +390,7 @@ class PVForecast:
# -------------------------------------------- # --------------------------------------------
# From here Akkudoktor PV forecast data format # From here Akkudoktor PV forecast data format
# --------------------------------------------- # ---------------------------------------------
self.meta = data.get("meta") self.meta = data.get("meta", {})
all_values = data.get("values") all_values = data.get("values")
# timezone of the PV system # timezone of the PV system
@ -454,7 +454,7 @@ class PVForecast:
self._forecast_start = self.forecast_data[0].get_date_time() self._forecast_start = self.forecast_data[0].get_date_time()
logger.debug(f"Forecast start adapted to {self._forecast_start}") logger.debug(f"Forecast start adapted to {self._forecast_start}")
def load_data_from_file(self, filepath: str) -> dict: def load_data_from_file(self, filepath: str | Path) -> dict[str, Any]:
"""Loads forecast data from a file. """Loads forecast data from a file.
Args: Args:
@ -467,7 +467,7 @@ class PVForecast:
data = json.load(file) data = json.load(file)
return data return data
def load_data_from_url(self, url: str) -> dict: def load_data_from_url(self, url: str) -> dict[str, Any]:
"""Loads forecast data from a URL. """Loads forecast data from a URL.
Example: Example:
@ -488,7 +488,7 @@ class PVForecast:
return data return data
@cache_in_file() # use binary mode by default as we have python objects not text @cache_in_file() # use binary mode by default as we have python objects not text
def load_data_from_url_with_caching(self, url: str, until_date=None) -> dict: def load_data_from_url_with_caching(self, url: str) -> dict[str, Any]:
"""Loads data from a URL or from the cache if available. """Loads data from a URL or from the cache if available.
Args: Args:
@ -506,7 +506,7 @@ class PVForecast:
logger.error(data) logger.error(data)
return data return data
def get_forecast_data(self): def get_forecast_data(self) -> list[ForecastData]:
"""Returns the forecast data. """Returns the forecast data.
Returns: Returns:
@ -516,7 +516,7 @@ class PVForecast:
def get_temperature_forecast_for_date( def get_temperature_forecast_for_date(
self, input_date: Union[datetime, date, str, int, float, None] self, input_date: Union[datetime, date, str, int, float, None]
): ) -> np.ndarray:
"""Returns the temperature forecast for a specific date. """Returns the temperature forecast for a specific date.
Args: Args:
@ -543,7 +543,7 @@ class PVForecast:
self, self,
start_date: Union[datetime, date, str, int, float, None], start_date: Union[datetime, date, str, int, float, None],
end_date: Union[datetime, date, str, int, float, None], end_date: Union[datetime, date, str, int, float, None],
): ) -> np.ndarray:
"""Returns the PV forecast for a date range. """Returns the PV forecast for a date range.
Args: Args:
@ -575,7 +575,7 @@ class PVForecast:
self, self,
start_date: Union[datetime, date, str, int, float, None], start_date: Union[datetime, date, str, int, float, None],
end_date: Union[datetime, date, str, int, float, None], end_date: Union[datetime, date, str, int, float, None],
): ) -> np.ndarray:
"""Returns the temperature forecast for a given date range. """Returns the temperature forecast for a given date range.
Args: Args:
@ -601,7 +601,7 @@ class PVForecast:
temperature_forecast = [data.get_temperature() for data in date_range_forecast] temperature_forecast = [data.get_temperature() for data in date_range_forecast]
return np.array(temperature_forecast)[: self.prediction_hours] return np.array(temperature_forecast)[: self.prediction_hours]
def get_forecast_dataframe(self): def get_forecast_dataframe(self) -> pd.DataFrame:
"""Converts the forecast data into a Pandas DataFrame. """Converts the forecast data into a Pandas DataFrame.
Returns: Returns:
@ -623,7 +623,7 @@ class PVForecast:
df = pd.DataFrame(data) df = pd.DataFrame(data)
return df return df
def get_forecast_start(self) -> datetime: def get_forecast_start(self) -> Optional[datetime]:
"""Return the start of the forecast data in local timezone. """Return the start of the forecast data in local timezone.
Returns: Returns:
@ -678,5 +678,5 @@ if __name__ == "__main__":
"past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&" "past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&"
"hourly=relativehumidity_2m%2Cwindspeed_10m", "hourly=relativehumidity_2m%2Cwindspeed_10m",
) )
forecast.update_ac_power_measurement(date_time=datetime.now(), ac_power_measurement=1000) forecast.update_ac_power_measurement(ac_power_measurement=1000, date_time=datetime.now())
print(forecast.report_ac_power_and_measurement()) print(forecast.report_ac_power_and_measurement())

View File

@ -120,7 +120,8 @@ def fastapi_gesamtlast(
leistung_haushalt = future_predictions["Adjusted Pred"].values leistung_haushalt = future_predictions["Adjusted Pred"].values
gesamtlast = Gesamtlast(prediction_hours=hours) gesamtlast = Gesamtlast(prediction_hours=hours)
gesamtlast.hinzufuegen( gesamtlast.hinzufuegen(
"Haushalt", leistung_haushalt "Haushalt",
leistung_haushalt, # type: ignore[arg-type]
) # Add household load to total load calculation ) # Add household load to total load calculation
# Calculate the total load # Calculate the total load
@ -182,12 +183,7 @@ def fastapi_pvprognose(url: str, ac_power_measurement: Optional[float] = None) -
pv_forecast = PVforecast.get_pv_forecast_for_date_range(date_now, date) pv_forecast = PVforecast.get_pv_forecast_for_date_range(date_now, date)
temperature_forecast = PVforecast.get_temperature_for_date_range(date_now, date) temperature_forecast = PVforecast.get_temperature_for_date_range(date_now, date)
# Return both forecasts as a JSON response return ForecastResponse(temperature=temperature_forecast.tolist(), pvpower=pv_forecast.tolist())
ret = {
"temperature": temperature_forecast.tolist(),
"pvpower": pv_forecast.tolist(),
}
return ret
@app.post("/optimize") @app.post("/optimize")
@ -203,12 +199,11 @@ def fastapi_optimize(
# Perform optimization simulation # Perform optimization simulation
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour) result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
# print(result) # print(result)
# convert to JSON (None accepted by dumps)
return result return result
@app.get("/visualization_results.pdf", response_class=PdfResponse) @app.get("/visualization_results.pdf", response_class=PdfResponse)
def get_pdf(): def get_pdf() -> PdfResponse:
# Endpoint to serve the generated PDF with visualization results # Endpoint to serve the generated PDF with visualization results
output_path = config.working_dir / config.directories.output output_path = config.working_dir / config.directories.output
if not output_path.is_dir(): if not output_path.is_dir():
@ -216,16 +211,16 @@ def get_pdf():
file_path = output_path / "visualization_results.pdf" file_path = output_path / "visualization_results.pdf"
if not file_path.is_file(): if not file_path.is_file():
raise HTTPException(status_code=404, detail="No visualization result available.") raise HTTPException(status_code=404, detail="No visualization result available.")
return FileResponse(file_path) return PdfResponse(file_path)
@app.get("/site-map", include_in_schema=False) @app.get("/site-map", include_in_schema=False)
def site_map(): def site_map() -> RedirectResponse:
return RedirectResponse(url="/docs") return RedirectResponse(url="/docs")
@app.get("/", include_in_schema=False) @app.get("/", include_in_schema=False)
def root(): def root() -> RedirectResponse:
# Redirect the root URL to the site map # Redirect the root URL to the site map
return RedirectResponse(url="/docs") return RedirectResponse(url="/docs")

View File

@ -25,6 +25,8 @@ Notes:
- Cache files are automatically associated with the current date unless specified. - Cache files are automatically associated with the current date unless specified.
""" """
from __future__ import annotations
import hashlib import hashlib
import inspect import inspect
import os import os
@ -32,7 +34,7 @@ import pickle
import tempfile import tempfile
import threading import threading
from datetime import date, datetime, time, timedelta from datetime import date, datetime, time, timedelta
from typing import List, Optional, Union from typing import IO, Callable, Generic, List, Optional, ParamSpec, TypeVar, Union
from akkudoktoreos.utils.datetimeutil import to_datetime, to_timedelta from akkudoktoreos.utils.datetimeutil import to_datetime, to_timedelta
from akkudoktoreos.utils.logutil import get_logger from akkudoktoreos.utils.logutil import get_logger
@ -40,15 +42,20 @@ from akkudoktoreos.utils.logutil import get_logger
logger = get_logger(__file__) logger = get_logger(__file__)
class CacheFileStoreMeta(type): T = TypeVar("T")
Param = ParamSpec("Param")
RetType = TypeVar("RetType")
class CacheFileStoreMeta(type, Generic[T]):
"""A thread-safe implementation of CacheFileStore.""" """A thread-safe implementation of CacheFileStore."""
_instances = {} _instances: dict[CacheFileStoreMeta[T], T] = {}
_lock: threading.Lock = threading.Lock() _lock: threading.Lock = threading.Lock()
"""Lock object to synchronize threads on first access to CacheFileStore.""" """Lock object to synchronize threads on first access to CacheFileStore."""
def __call__(cls): def __call__(cls) -> T:
"""Return CacheFileStore instance.""" """Return CacheFileStore instance."""
with cls._lock: with cls._lock:
if cls not in cls._instances: if cls not in cls._instances:
@ -80,18 +87,18 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
>>> print(cache_file.read()) # Output: 'Some data' >>> print(cache_file.read()) # Output: 'Some data'
""" """
def __init__(self): def __init__(self) -> None:
"""Initializes the CacheFileStore instance. """Initializes the CacheFileStore instance.
This constructor sets up an empty key-value store (a dictionary) where each key This constructor sets up an empty key-value store (a dictionary) where each key
corresponds to a cache file that is associated with a given key and an optional date. corresponds to a cache file that is associated with a given key and an optional date.
""" """
self._store = {} self._store: dict[str, tuple[IO[bytes], datetime]] = {}
self._store_lock = threading.Lock() self._store_lock = threading.Lock()
def _generate_cache_file_key( def _generate_cache_file_key(
self, key: str, until_datetime: Union[datetime, None] self, key: str, until_datetime: Union[datetime, None]
) -> (str, datetime): ) -> tuple[str, datetime]:
"""Generates a unique cache file key based on the key and date. """Generates a unique cache file key based on the key and date.
The cache file key is a combination of the input key and the date (if provided), The cache file key is a combination of the input key and the date (if provided),
@ -114,7 +121,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
cache_key = hashlib.sha256(f"{key}{key_datetime}".encode("utf-8")).hexdigest() cache_key = hashlib.sha256(f"{key}{key_datetime}".encode("utf-8")).hexdigest()
return (f"{cache_key}", until_datetime) return (f"{cache_key}", until_datetime)
def _get_file_path(self, file_obj): def _get_file_path(self, file_obj: IO[bytes]) -> Optional[str]:
"""Retrieve the file path from a file-like object. """Retrieve the file path from a file-like object.
Args: Args:
@ -136,7 +143,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
until_date: Union[datetime, date, str, int, float, None] = None, until_date: Union[datetime, date, str, int, float, None] = None,
until_datetime: Union[datetime, date, str, int, float, None] = None, until_datetime: Union[datetime, date, str, int, float, None] = None,
with_ttl: Union[timedelta, str, int, float, None] = None, with_ttl: Union[timedelta, str, int, float, None] = None,
): ) -> datetime:
"""Get until_datetime from the given options.""" """Get until_datetime from the given options."""
if until_datetime: if until_datetime:
until_datetime = to_datetime(until_datetime) until_datetime = to_datetime(until_datetime)
@ -152,11 +159,11 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
def _is_valid_cache_item( def _is_valid_cache_item(
self, self,
cache_item: (), cache_item: tuple[IO[bytes], datetime],
until_datetime: datetime = None, until_datetime: Optional[datetime] = None,
at_datetime: datetime = None, at_datetime: Optional[datetime] = None,
before_datetime: datetime = None, before_datetime: Optional[datetime] = None,
): ) -> bool:
cache_file_datetime = cache_item[1] # Extract the datetime associated with the cache item cache_file_datetime = cache_item[1] # Extract the datetime associated with the cache item
if ( if (
(until_datetime and until_datetime == cache_file_datetime) (until_datetime and until_datetime == cache_file_datetime)
@ -169,10 +176,10 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
def _search( def _search(
self, self,
key: str, key: str,
until_datetime: Union[datetime, date, str, int, float] = None, until_datetime: Union[datetime, date, str, int, float, None] = None,
at_datetime: Union[datetime, date, str, int, float] = None, at_datetime: Union[datetime, date, str, int, float, None] = None,
before_datetime: Union[datetime, date, str, int, float] = None, before_datetime: Union[datetime, date, str, int, float, None] = None,
): ) -> Optional[tuple[str, IO[bytes], datetime]]:
"""Searches for a cached item that matches the key and falls within the datetime range. """Searches for a cached item that matches the key and falls within the datetime range.
This method looks for a cache item with a key that matches the given `key`, and whose associated This method looks for a cache item with a key that matches the given `key`, and whose associated
@ -193,20 +200,23 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
otherwise returns `None`. otherwise returns `None`.
""" """
# Convert input to datetime if they are not None # Convert input to datetime if they are not None
if until_datetime: until_datetime_dt: Optional[datetime] = None
until_datetime = to_datetime(until_datetime) if until_datetime is not None:
if at_datetime: until_datetime_dt = to_datetime(until_datetime)
at_datetime = to_datetime(at_datetime) at_datetime_dt: Optional[datetime] = None
if before_datetime: if at_datetime is not None:
before_datetime = to_datetime(before_datetime) at_datetime_dt = to_datetime(at_datetime)
before_datetime_dt: Optional[datetime] = None
if before_datetime is not None:
before_datetime_dt = to_datetime(before_datetime)
for cache_file_key, cache_item in self._store.items(): for cache_file_key, cache_item in self._store.items():
# Check if the cache file datetime matches the given criteria # Check if the cache file datetime matches the given criteria
if self._is_valid_cache_item( if self._is_valid_cache_item(
cache_item, cache_item,
until_datetime=until_datetime, until_datetime=until_datetime_dt,
at_datetime=at_datetime, at_datetime=at_datetime_dt,
before_datetime=before_datetime, before_datetime=before_datetime_dt,
): ):
# This cache file is within the given datetime range # This cache file is within the given datetime range
# Extract the datetime associated with the cache item # Extract the datetime associated with the cache item
@ -231,7 +241,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
mode: str = "wb+", mode: str = "wb+",
delete: bool = False, delete: bool = False,
suffix: Optional[str] = None, suffix: Optional[str] = None,
): ) -> IO[bytes]:
"""Creates a new file-like tempfile object associated with the given key. """Creates a new file-like tempfile object associated with the given key.
If a cache file with the given key and valid timedate already exists, the existing file is If a cache file with the given key and valid timedate already exists, the existing file is
@ -262,31 +272,31 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
>>> cache_file.seek(0) >>> cache_file.seek(0)
>>> print(cache_file.read()) # Output: 'Some cached data' >>> print(cache_file.read()) # Output: 'Some cached data'
""" """
until_datetime = self._until_datetime_by_options( until_datetime_dt = self._until_datetime_by_options(
until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
) )
cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime) cache_file_key, _ = self._generate_cache_file_key(key, until_datetime_dt)
with self._store_lock: # Synchronize access to _store with self._store_lock: # Synchronize access to _store
if cache_file_key in self._store: if (cache_file_item := self._store.get(cache_file_key)) is not None:
# File already available # File already available
cache_file_obj, until_datetime = self._store.get(cache_file_key) cache_file_obj = cache_file_item[0]
else: else:
cache_file_obj = tempfile.NamedTemporaryFile( cache_file_obj = tempfile.NamedTemporaryFile(
mode=mode, delete=delete, suffix=suffix mode=mode, delete=delete, suffix=suffix
) )
self._store[cache_file_key] = (cache_file_obj, until_datetime) self._store[cache_file_key] = (cache_file_obj, until_datetime_dt)
cache_file_obj.seek(0) cache_file_obj.seek(0)
return cache_file_obj return cache_file_obj
def set( def set(
self, self,
key: str, key: str,
file_obj, file_obj: IO[bytes],
until_date: Union[datetime, date, str, int, float, None] = None, until_date: Union[datetime, date, str, int, float, None] = None,
until_datetime: Union[datetime, date, str, int, float, None] = None, until_datetime: Union[datetime, date, str, int, float, None] = None,
with_ttl: Union[timedelta, str, int, float, None] = None, with_ttl: Union[timedelta, str, int, float, None] = None,
): ) -> None:
"""Stores a file-like object in the cache under the specified key and date. """Stores a file-like object in the cache under the specified key and date.
This method allows you to manually set a file-like object into the cache with a specific key This method allows you to manually set a file-like object into the cache with a specific key
@ -309,11 +319,11 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
Example: Example:
>>> cache_store.set('example_file', io.BytesIO(b'Some binary data')) >>> cache_store.set('example_file', io.BytesIO(b'Some binary data'))
""" """
until_datetime = self._until_datetime_by_options( until_datetime_dt = self._until_datetime_by_options(
until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
) )
cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime) cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime_dt)
with self._store_lock: # Synchronize access to _store with self._store_lock: # Synchronize access to _store
if cache_file_key in self._store: if cache_file_key in self._store:
raise ValueError(f"Key already in store: `{key}`.") raise ValueError(f"Key already in store: `{key}`.")
@ -327,7 +337,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
until_datetime: Union[datetime, date, str, int, float, None] = None, until_datetime: Union[datetime, date, str, int, float, None] = None,
at_datetime: Union[datetime, date, str, int, float, None] = None, at_datetime: Union[datetime, date, str, int, float, None] = None,
before_datetime: Union[datetime, date, str, int, float, None] = None, before_datetime: Union[datetime, date, str, int, float, None] = None,
): ) -> Optional[IO[bytes]]:
"""Retrieves the cache file associated with the given key and validity datetime. """Retrieves the cache file associated with the given key and validity datetime.
If no cache file is found for the provided key and datetime, the method returns None. If no cache file is found for the provided key and datetime, the method returns None.
@ -374,11 +384,11 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
def delete( def delete(
self, self,
key, key: str,
until_date: Union[datetime, date, str, int, float, None] = None, until_date: Union[datetime, date, str, int, float, None] = None,
until_datetime: Union[datetime, date, str, int, float, None] = None, until_datetime: Union[datetime, date, str, int, float, None] = None,
before_datetime: Union[datetime, date, str, int, float, None] = None, before_datetime: Union[datetime, date, str, int, float, None] = None,
): ) -> None:
"""Deletes the cache file associated with the given key and datetime. """Deletes the cache file associated with the given key and datetime.
This method removes the cache file from the store. This method removes the cache file from the store.
@ -429,8 +439,10 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
logger.error(f"Error deleting cache file {file_path}: {e}") logger.error(f"Error deleting cache file {file_path}: {e}")
def clear( def clear(
self, clear_all=False, before_datetime: Union[datetime, date, str, int, float, None] = None self,
): clear_all: bool = False,
before_datetime: Union[datetime, date, str, int, float, None] = None,
) -> None:
"""Deletes all cache files or those expiring before `before_datetime`. """Deletes all cache files or those expiring before `before_datetime`.
Args: Args:
@ -500,7 +512,7 @@ def cache_in_file(
mode: str = "wb+", mode: str = "wb+",
delete: bool = False, delete: bool = False,
suffix: Optional[str] = None, suffix: Optional[str] = None,
): ) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]:
"""Decorator to cache the output of a function into a temporary file. """Decorator to cache the output of a function into a temporary file.
The decorator caches function output to a cache file based on its inputs as key to identify the The decorator caches function output to a cache file based on its inputs as key to identify the
@ -545,35 +557,35 @@ def cache_in_file(
>>> result = expensive_computation(until_date = date.today()) >>> result = expensive_computation(until_date = date.today())
""" """
def decorator(func): def decorator(func: Callable[Param, RetType]) -> Callable[Param, RetType]:
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
func_source_code = inspect.getsource(func) func_source_code = inspect.getsource(func)
def wrapper(*args, **kwargs): def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> RetType:
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
# Convert args to a dictionary based on the function's signature # Convert args to a dictionary based on the function's signature
args_names = func.__code__.co_varnames[: func.__code__.co_argcount] args_names = func.__code__.co_varnames[: func.__code__.co_argcount]
args_dict = dict(zip(args_names, args)) args_dict = dict(zip(args_names, args))
# Search for caching parameters of function and remove # Search for caching parameters of function and remove
force_update = None force_update: Optional[bool] = None
for param in ["force_update", "until_datetime", "with_ttl", "until_date"]: for param in ["force_update", "until_datetime", "with_ttl", "until_date"]:
if param in kwargs: if param in kwargs:
if param == "force_update": if param == "force_update":
force_update = kwargs[param] force_update = kwargs[param] # type: ignore[assignment]
kwargs.pop("force_update") kwargs.pop("force_update")
if param == "until_datetime": if param == "until_datetime":
until_datetime = kwargs[param] until_datetime = kwargs[param] # type: ignore[assignment]
until_date = None until_date = None
with_ttl = None with_ttl = None
elif param == "with_ttl": elif param == "with_ttl":
until_datetime = None until_datetime = None
until_date = None until_date = None
with_ttl = kwargs[param] with_ttl = kwargs[param] # type: ignore[assignment]
elif param == "until_date": elif param == "until_date":
until_datetime = None until_datetime = None
until_date = kwargs[param] until_date = kwargs[param] # type: ignore[assignment]
with_ttl = None with_ttl = None
kwargs.pop("until_datetime", None) kwargs.pop("until_datetime", None)
kwargs.pop("until_date", None) kwargs.pop("until_date", None)
@ -589,7 +601,7 @@ def cache_in_file(
# Create key based on argument names, argument values, and function source code # Create key based on argument names, argument values, and function source code
key = str(args_dict) + str(kwargs_clone) + str(func_source_code) key = str(args_dict) + str(kwargs_clone) + str(func_source_code)
result = None result: Optional[RetType | bytes] = None
# Get cache file that is currently valid # Get cache file that is currently valid
cache_file = CacheFileStore().get(key) cache_file = CacheFileStore().get(key)
if not force_update and cache_file is not None: if not force_update and cache_file is not None:
@ -624,11 +636,11 @@ def cache_in_file(
if "b" in mode: if "b" in mode:
pickle.dump(result, cache_file) pickle.dump(result, cache_file)
else: else:
cache_file.write(result) cache_file.write(result) # type: ignore[call-overload]
except Exception as e: except Exception as e:
logger.info(f"Write failed: {e}") logger.info(f"Write failed: {e}")
CacheFileStore().delete(key) CacheFileStore().delete(key)
return result return result # type: ignore[return-value]
return wrapper return wrapper

View File

@ -24,19 +24,39 @@ Example usage:
import re import re
from datetime import date, datetime, time, timedelta, timezone from datetime import date, datetime, time, timedelta, timezone
from typing import Optional, Union from typing import Annotated, Literal, Optional, Union, overload
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
from timezonefinder import TimezoneFinder from timezonefinder import TimezoneFinder
@overload
def to_datetime(
date_input: Union[datetime, date, str, int, float, None],
as_string: str | Literal[True],
to_timezone: Optional[Union[ZoneInfo, str]] = None,
to_naiv: Optional[bool] = None,
to_maxtime: Optional[bool] = None,
) -> str: ...
@overload
def to_datetime(
date_input: Union[datetime, date, str, int, float, None],
as_string: Literal[False] | None = None,
to_timezone: Optional[Union[ZoneInfo, str]] = None,
to_naiv: Optional[bool] = None,
to_maxtime: Optional[bool] = None,
) -> datetime: ...
def to_datetime( def to_datetime(
date_input: Union[datetime, date, str, int, float, None], date_input: Union[datetime, date, str, int, float, None],
as_string: Optional[Union[str, bool]] = None, as_string: Optional[Union[str, bool]] = None,
to_timezone: Optional[Union[timezone, str]] = None, to_timezone: Optional[Union[ZoneInfo, str]] = None,
to_naiv: Optional[bool] = None, to_naiv: Optional[bool] = None,
to_maxtime: Optional[bool] = None, to_maxtime: Optional[bool] = None,
): ) -> str | datetime:
"""Converts a date input to a datetime object or a formatted string with timezone support. """Converts a date input to a datetime object or a formatted string with timezone support.
Args: Args:
@ -67,7 +87,9 @@ def to_datetime(
Raises: Raises:
ValueError: If the date input is not a valid type or format. ValueError: If the date input is not a valid type or format.
RuntimeError: If no local timezone information available.
""" """
dt_object: Optional[datetime] = None
if isinstance(date_input, datetime): if isinstance(date_input, datetime):
dt_object = date_input dt_object = date_input
elif isinstance(date_input, date): elif isinstance(date_input, date):
@ -104,7 +126,6 @@ def to_datetime(
dt_object = datetime.strptime(date_input, fmt) dt_object = datetime.strptime(date_input, fmt)
break break
except ValueError as e: except ValueError as e:
dt_object = None
continue continue
if dt_object is None: if dt_object is None:
raise ValueError(f"Date string {date_input} does not match any known formats.") raise ValueError(f"Date string {date_input} does not match any known formats.")
@ -120,11 +141,13 @@ def to_datetime(
local_date = datetime.now().astimezone() local_date = datetime.now().astimezone()
local_tz_name = local_date.tzname() local_tz_name = local_date.tzname()
local_utc_offset = local_date.utcoffset() local_utc_offset = local_date.utcoffset()
if local_tz_name is None or local_utc_offset is None:
raise RuntimeError("Could not determine local time zone")
local_timezone = timezone(local_utc_offset, local_tz_name) local_timezone = timezone(local_utc_offset, local_tz_name)
# Get target timezone # Get target timezone
if to_timezone: if to_timezone:
if isinstance(to_timezone, timezone): if isinstance(to_timezone, ZoneInfo):
target_timezone = to_timezone target_timezone = to_timezone
elif isinstance(to_timezone, str): elif isinstance(to_timezone, str):
try: try:
@ -168,7 +191,11 @@ def to_datetime(
return dt_object return dt_object
def to_timedelta(input_value): def to_timedelta(
input_value: Union[
timedelta, str, int, float, tuple[int, int, int, int], Annotated[list[int], 4]
],
) -> timedelta:
"""Converts various input types into a timedelta object. """Converts various input types into a timedelta object.
Args: Args:
@ -238,7 +265,15 @@ def to_timedelta(input_value):
raise ValueError(f"Unsupported input type: {type(input_value)}") raise ValueError(f"Unsupported input type: {type(input_value)}")
def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None): @overload
def to_timezone(lat: float, lon: float, as_string: Literal[True]) -> str: ...
@overload
def to_timezone(lat: float, lon: float, as_string: Literal[False] | None = None) -> ZoneInfo: ...
def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None) -> str | ZoneInfo:
"""Determines the timezone for a given geographic location specified by latitude and longitude. """Determines the timezone for a given geographic location specified by latitude and longitude.
By default, it returns a `ZoneInfo` object representing the timezone. By default, it returns a `ZoneInfo` object representing the timezone.
@ -269,11 +304,13 @@ def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None):
""" """
# Initialize the static variable only once # Initialize the static variable only once
if not hasattr(to_timezone, "timezone_finder"): if not hasattr(to_timezone, "timezone_finder"):
to_timezone.timezone_finder = TimezoneFinder() # static variable # static variable
to_timezone.timezone_finder = TimezoneFinder() # type: ignore[attr-defined]
# Check and convert coordinates to timezone # Check and convert coordinates to timezone
tz_name: Optional[str] = None
try: try:
tz_name = to_timezone.timezone_finder.timezone_at(lat=lat, lng=lon) tz_name = to_timezone.timezone_finder.timezone_at(lat=lat, lng=lon) # type: ignore[attr-defined]
if not tz_name: if not tz_name:
raise ValueError(f"No timezone found for coordinates: latitude {lat}, longitude {lon}") raise ValueError(f"No timezone found for coordinates: latitude {lat}, longitude {lon}")
except Exception as e: except Exception as e:

View File

@ -1,12 +1,13 @@
import datetime import datetime
import json import json
import zoneinfo import zoneinfo
from typing import Any
import numpy as np import numpy as np
# currently unused # currently unused
def ist_dst_wechsel(tag: datetime.datetime, timezone="Europe/Berlin") -> bool: def ist_dst_wechsel(tag: datetime.datetime, timezone: str = "Europe/Berlin") -> bool:
"""Checks if Daylight Saving Time (DST) starts or ends on a given day.""" """Checks if Daylight Saving Time (DST) starts or ends on a given day."""
tz = zoneinfo.ZoneInfo(timezone) tz = zoneinfo.ZoneInfo(timezone)
# Get the current day and the next day # Get the current day and the next day
@ -20,15 +21,25 @@ def ist_dst_wechsel(tag: datetime.datetime, timezone="Europe/Berlin") -> bool:
class NumpyEncoder(json.JSONEncoder): class NumpyEncoder(json.JSONEncoder):
def default(self, obj): @classmethod
def convert_numpy(cls, obj: Any) -> tuple[Any, bool]:
if isinstance(obj, np.ndarray): if isinstance(obj, np.ndarray):
return obj.tolist() # Convert NumPy arrays to lists # Convert NumPy arrays to lists
return [
None if isinstance(x, (int, float)) and np.isnan(x) else x for x in obj.tolist()
], True
if isinstance(obj, np.generic): if isinstance(obj, np.generic):
return obj.item() # Convert NumPy scalars to native Python types return obj.item(), True # Convert NumPy scalars to native Python types
return obj, False
def default(self, obj: Any) -> Any:
obj, converted = NumpyEncoder.convert_numpy(obj)
if converted:
return obj
return super(NumpyEncoder, self).default(obj) return super(NumpyEncoder, self).default(obj)
@staticmethod @staticmethod
def dumps(data): def dumps(data: Any) -> str:
"""Static method to serialize a Python object into a JSON string using NumpyEncoder. """Static method to serialize a Python object into a JSON string using NumpyEncoder.
Args: Args:

View File

@ -1,4 +1,6 @@
# Set the backend for matplotlib to Agg # Set the backend for matplotlib to Agg
from typing import Any, Optional
import matplotlib import matplotlib
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
@ -10,20 +12,20 @@ matplotlib.use("Agg")
def visualisiere_ergebnisse( def visualisiere_ergebnisse(
gesamtlast, gesamtlast: list[float],
pv_forecast, pv_forecast: list[float],
strompreise, strompreise: list[float],
ergebnisse, ergebnisse: dict[str, Any],
ac, # AC charging allowed ac: np.ndarray, # AC charging allowed
dc, # DC charging allowed dc: np.ndarray, # DC charging allowed
discharge, # Discharge allowed discharge: np.ndarray, # Discharge allowed
temperature, temperature: Optional[list[float]],
start_hour, start_hour: int,
einspeiseverguetung_euro_pro_wh, einspeiseverguetung_euro_pro_wh: np.ndarray,
config: AppConfig, config: AppConfig,
filename="visualization_results.pdf", filename: str = "visualization_results.pdf",
extra_data=None, extra_data: Optional[dict[str, Any]] = None,
): ) -> None:
##################### #####################
# 24-hour visualization # 24-hour visualization
##################### #####################
@ -81,13 +83,14 @@ def visualisiere_ergebnisse(
plt.grid(True) plt.grid(True)
# Temperature forecast # Temperature forecast
plt.subplot(3, 2, 5) if temperature is not None:
plt.title("Temperature Forecast (°C)") plt.subplot(3, 2, 5)
plt.plot(hours, temperature, label="Temperature (°C)", marker="x") plt.title("Temperature Forecast (°C)")
plt.xlabel("Hour of the Day") plt.plot(hours, temperature, label="Temperature (°C)", marker="x")
plt.ylabel("°C") plt.xlabel("Hour of the Day")
plt.legend() plt.ylabel("°C")
plt.grid(True) plt.legend()
plt.grid(True)
pdf.savefig() # Save the current figure state to the PDF pdf.savefig() # Save the current figure state to the PDF
plt.close() # Close the current figure to free up memory plt.close() # Close the current figure to free up memory

View File

@ -15,7 +15,7 @@ from akkudoktoreos.config import EOS_DIR, AppConfig, load_config
def load_config_tmp(tmp_path: Path) -> AppConfig: def load_config_tmp(tmp_path: Path) -> AppConfig:
"""Creates an AppConfig from default.config.json with a tmp output directory.""" """Creates an AppConfig from default.config.json with a tmp output directory."""
config = load_config(tmp_path) config = load_config(tmp_path)
config.directories.output = tmp_path config.directories.output = str(tmp_path)
return config return config

View File

@ -299,7 +299,7 @@ def test_cache_in_file_decorator_forces_update(cache_store):
cache_file.write(result2) cache_file.write(result2)
# Call the decorated function again with force update (should get result from function) # Call the decorated function again with force update (should get result from function)
result = my_function(until_date=until_date, force_update=True) result = my_function(until_date=until_date, force_update=True) # type: ignore[call-arg]
assert result == result1 assert result == result1
# Assure result was written to the same cache file # Assure result was written to the same cache file
@ -319,7 +319,7 @@ def test_cache_in_file_handles_ttl(cache_store):
return "New result" return "New result"
# Call the decorated function # Call the decorated function
result = my_function(with_ttl="1 second") result = my_function(with_ttl="1 second") # type: ignore[call-arg]
# Overwrite cache file # Overwrite cache file
key = next(iter(cache_store._store)) key = next(iter(cache_store._store))
@ -330,14 +330,14 @@ def test_cache_in_file_handles_ttl(cache_store):
cache_file.seek(0) # Move to the start of the file cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == "Modified result" assert cache_file.read() == "Modified result"
result = my_function(with_ttl="1 second") result = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result == "Modified result" assert result == "Modified result"
# Wait one second to let the cache time out # Wait one second to let the cache time out
sleep(1) sleep(1)
# Call again - cache should be timed out # Call again - cache should be timed out
result = my_function(with_ttl="1 second") result = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result == "New result" assert result == "New result"
@ -349,7 +349,7 @@ def test_cache_in_file_handles_bytes_return(cache_store):
# Define a function that returns bytes # Define a function that returns bytes
@cache_in_file() @cache_in_file()
def my_function(until_date=None): def my_function(until_date=None) -> bytes:
return b"Some binary data" return b"Some binary data"
# Call the decorated function # Call the decorated function
@ -358,7 +358,14 @@ def test_cache_in_file_handles_bytes_return(cache_store):
# Check if the binary data was written to the cache file # Check if the binary data was written to the cache file
key = next(iter(cache_store._store)) key = next(iter(cache_store._store))
cache_file = cache_store._store[key][0] cache_file = cache_store._store[key][0]
assert len(cache_store._store) == 1
assert cache_file is not None assert cache_file is not None
cache_file.seek(0) cache_file.seek(0)
result1 = pickle.load(cache_file) result1 = pickle.load(cache_file)
assert result1 == result assert result1 == result
# Access cache
result = my_function(until_date=datetime.now() + timedelta(days=1))
assert len(cache_store._store) == 1
assert cache_store._store[key][0] is not None
assert result1 == result

View File

@ -8,6 +8,7 @@ from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParamet
from akkudoktoreos.prediction.ems import ( from akkudoktoreos.prediction.ems import (
EnergieManagementSystem, EnergieManagementSystem,
EnergieManagementSystemParameters, EnergieManagementSystemParameters,
SimulationResult,
) )
prediction_hours = 48 prediction_hours = 48
@ -211,9 +212,9 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
preis_euro_pro_wh_akku=preis_euro_pro_wh_akku, preis_euro_pro_wh_akku=preis_euro_pro_wh_akku,
gesamtlast=gesamtlast, gesamtlast=gesamtlast,
), ),
wechselrichter=wechselrichter,
eauto=eauto, eauto=eauto,
home_appliance=home_appliance, home_appliance=home_appliance,
wechselrichter=wechselrichter,
) )
return ems return ems
@ -255,26 +256,7 @@ def test_simulation(create_ems_instance):
# Check that the result is a dictionary # Check that the result is a dictionary
assert isinstance(result, dict), "Result should be a dictionary." assert isinstance(result, dict), "Result should be a dictionary."
assert SimulationResult(**result) is not None
# Verify that the expected keys are present in the result
expected_keys = [
"Last_Wh_pro_Stunde",
"Netzeinspeisung_Wh_pro_Stunde",
"Netzbezug_Wh_pro_Stunde",
"Kosten_Euro_pro_Stunde",
"akku_soc_pro_stunde",
"Einnahmen_Euro_pro_Stunde",
"Gesamtbilanz_Euro",
"EAuto_SoC_pro_Stunde",
"Gesamteinnahmen_Euro",
"Gesamtkosten_Euro",
"Verluste_Pro_Stunde",
"Gesamt_Verluste",
"Home_appliance_wh_per_hour",
]
for key in expected_keys:
assert key in result, f"The key '{key}' should be present in the result."
# Check the length of the main arrays # Check the length of the main arrays
assert ( assert (
@ -344,7 +326,7 @@ def test_simulation(create_ems_instance):
assert ( assert (
np.nansum( np.nansum(
np.where( np.where(
np.equal(result["Home_appliance_wh_per_hour"], None), result["Home_appliance_wh_per_hour"] is None,
np.nan, np.nan,
np.array(result["Home_appliance_wh_per_hour"]), np.array(result["Home_appliance_wh_per_hour"]),
) )

View File

@ -44,13 +44,13 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
) )
# Parameters based on previous example data # Parameters based on previous example data
pv_prognose_wh = np.full(prediction_hours, 0) pv_prognose_wh = [0.0] * prediction_hours
pv_prognose_wh[10] = 5000.0 pv_prognose_wh[10] = 5000.0
pv_prognose_wh[11] = 5000.0 pv_prognose_wh[11] = 5000.0
strompreis_euro_pro_wh = np.full(48, 0.001) strompreis_euro_pro_wh = [0.001] * prediction_hours
strompreis_euro_pro_wh[0:10] = 0.00001 strompreis_euro_pro_wh[0:10] = [0.00001] * 10
strompreis_euro_pro_wh[11:15] = 0.00005 strompreis_euro_pro_wh[11:15] = [0.00005] * 4
strompreis_euro_pro_wh[20] = 0.00001 strompreis_euro_pro_wh[20] = 0.00001
einspeiseverguetung_euro_pro_wh = [0.00007] * len(strompreis_euro_pro_wh) einspeiseverguetung_euro_pro_wh = [0.00007] * len(strompreis_euro_pro_wh)
@ -116,9 +116,9 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
preis_euro_pro_wh_akku=0, preis_euro_pro_wh_akku=0,
gesamtlast=gesamtlast, gesamtlast=gesamtlast,
), ),
wechselrichter=wechselrichter,
eauto=eauto, eauto=eauto,
home_appliance=home_appliance, home_appliance=home_appliance,
wechselrichter=wechselrichter,
) )
ac = np.full(prediction_hours, 0) ac = np.full(prediction_hours, 0)

View File

@ -54,7 +54,7 @@ def test_optimize(
file = DIR_TESTDATA / fn_out file = DIR_TESTDATA / fn_out
with file.open("r") as f_out: with file.open("r") as f_out:
expected_output_data = json.load(f_out) expected_result = OptimizeResponse(**json.load(f_out))
opt_class = optimization_problem(tmp_config, fixed_seed=42) opt_class = optimization_problem(tmp_config, fixed_seed=42)
start_hour = 10 start_hour = 10
@ -72,9 +72,7 @@ def test_optimize(
# Assert that the output contains all expected entries. # Assert that the output contains all expected entries.
# This does not assert that the optimization always gives the same result! # This does not assert that the optimization always gives the same result!
# Reproducibility and mathematical accuracy should be tested on the level of individual components. # Reproducibility and mathematical accuracy should be tested on the level of individual components.
compare_dict(ergebnis, expected_output_data) compare_dict(ergebnis.model_dump(), expected_result.model_dump())
# The function creates a visualization result PDF as a side-effect. # The function creates a visualization result PDF as a side-effect.
visualisiere_ergebnisse_patch.assert_called_once() visualisiere_ergebnisse_patch.assert_called_once()
OptimizeResponse(**ergebnis)

View File

@ -49,7 +49,7 @@ def test_config_merge(tmp_path: Path) -> None:
with pytest.raises(ValueError): with pytest.raises(ValueError):
# custom configuration is broken but not updated. # custom configuration is broken but not updated.
load_config(tmp_path, tmp_path, False) load_config(tmp_path, True, False)
with config_file.open("r") as f_in: with config_file.open("r") as f_in:
# custom configuration is not changed. # custom configuration is not changed.

View File

@ -121,7 +121,7 @@ def test_update_ac_power_measurement(pv_forecast_instance, sample_forecast_start
forecast_start = pv_forecast_instance.get_forecast_start() forecast_start = pv_forecast_instance.get_forecast_start()
assert forecast_start == sample_forecast_start assert forecast_start == sample_forecast_start
updated = pv_forecast_instance.update_ac_power_measurement(forecast_start, 1000) updated = pv_forecast_instance.update_ac_power_measurement(1000, forecast_start)
assert updated is True assert updated is True
forecast_data = pv_forecast_instance.get_forecast_data() forecast_data = pv_forecast_instance.get_forecast_data()
assert forecast_data[0].ac_power_measurement == 1000 assert forecast_data[0].ac_power_measurement == 1000
@ -130,7 +130,7 @@ def test_update_ac_power_measurement(pv_forecast_instance, sample_forecast_start
def test_update_ac_power_measurement_no_match(pv_forecast_instance): def test_update_ac_power_measurement_no_match(pv_forecast_instance):
"""Test updating AC power measurement where no date matches.""" """Test updating AC power measurement where no date matches."""
date_time = datetime(2023, 10, 2, 1, 0, 0) date_time = datetime(2023, 10, 2, 1, 0, 0)
updated = pv_forecast_instance.update_ac_power_measurement(date_time, 1000) updated = pv_forecast_instance.update_ac_power_measurement(1000, date_time)
assert not updated assert not updated
@ -265,7 +265,7 @@ def test_timezone_behaviour(
# Test updating AC power measurement for a specific date. # Test updating AC power measurement for a specific date.
date_time = pv_forecast_instance.get_forecast_start() date_time = pv_forecast_instance.get_forecast_start()
assert date_time == sample_forecast_start assert date_time == sample_forecast_start
updated = pv_forecast_instance.update_ac_power_measurement(date_time, 1000) updated = pv_forecast_instance.update_ac_power_measurement(1000, date_time)
assert updated is True assert updated is True
forecast_data = pv_forecast_instance.get_forecast_data() forecast_data = pv_forecast_instance.get_forecast_data()
assert forecast_data[0].ac_power_measurement == 1000 assert forecast_data[0].ac_power_measurement == 1000