mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-08-25 06:52:23 +00:00
Json configuration (#141)
* Add json config * Adjust code to new config --------- Co-authored-by: Chris <git@nootch.de>
This commit is contained in:
@@ -3,12 +3,13 @@ from typing import Dict, List, Optional, Union
|
||||
|
||||
import numpy as np
|
||||
|
||||
from akkudoktoreos.config import prediction_hours
|
||||
from akkudoktoreos.config import EOSConfig
|
||||
|
||||
|
||||
class EnergieManagementSystem:
|
||||
def __init__(
|
||||
self,
|
||||
config: EOSConfig,
|
||||
pv_prognose_wh: Optional[np.ndarray] = None,
|
||||
strompreis_euro_pro_wh: Optional[np.ndarray] = None,
|
||||
einspeiseverguetung_euro_pro_wh: Optional[np.ndarray] = None,
|
||||
@@ -25,9 +26,9 @@ class EnergieManagementSystem:
|
||||
self.eauto = eauto
|
||||
self.haushaltsgeraet = haushaltsgeraet
|
||||
self.wechselrichter = wechselrichter
|
||||
self.ac_charge_hours = np.full(prediction_hours, 0)
|
||||
self.dc_charge_hours = np.full(prediction_hours, 1)
|
||||
self.ev_charge_hours = np.full(prediction_hours, 0)
|
||||
self.ac_charge_hours = np.full(config.prediction_hours, 0)
|
||||
self.dc_charge_hours = np.full(config.prediction_hours, 1)
|
||||
self.ev_charge_hours = np.full(config.prediction_hours, 0)
|
||||
|
||||
def set_akku_discharge_hours(self, ds: List[int]) -> None:
|
||||
self.akku.set_discharge_per_hour(ds)
|
||||
|
@@ -8,25 +8,24 @@ from akkudoktoreos.class_akku import PVAkku
|
||||
from akkudoktoreos.class_ems import EnergieManagementSystem
|
||||
from akkudoktoreos.class_haushaltsgeraet import Haushaltsgeraet
|
||||
from akkudoktoreos.class_inverter import Wechselrichter
|
||||
from akkudoktoreos.config import possible_ev_charge_currents
|
||||
from akkudoktoreos.config import AppConfig
|
||||
from akkudoktoreos.visualize import visualisiere_ergebnisse
|
||||
|
||||
|
||||
class optimization_problem:
|
||||
def __init__(
|
||||
self,
|
||||
prediction_hours: int = 48,
|
||||
strafe: float = 10,
|
||||
optimization_hours: int = 24,
|
||||
config: AppConfig,
|
||||
verbose: bool = False,
|
||||
fixed_seed: Optional[int] = None,
|
||||
):
|
||||
"""Initialize the optimization problem with the required parameters."""
|
||||
self.prediction_hours = prediction_hours
|
||||
self.strafe = strafe
|
||||
self._config = config
|
||||
self.prediction_hours = config.eos.prediction_hours
|
||||
self.strafe = config.eos.penalty
|
||||
self.opti_param = None
|
||||
self.fixed_eauto_hours = prediction_hours - optimization_hours
|
||||
self.possible_charge_values = possible_ev_charge_currents
|
||||
self.fixed_eauto_hours = config.eos.prediction_hours - config.eos.optimization_hours
|
||||
self.possible_charge_values = config.eos.available_charging_rates_in_percentage
|
||||
self.verbose = verbose
|
||||
self.fix_seed = fixed_seed
|
||||
self.optimize_ev = True
|
||||
@@ -210,7 +209,10 @@ class optimization_problem:
|
||||
|
||||
if self.optimize_ev:
|
||||
self.toolbox.register(
|
||||
"attr_ev_charge_index", random.randint, 0, len(possible_ev_charge_currents) - 1
|
||||
"attr_ev_charge_index",
|
||||
random.randint,
|
||||
0,
|
||||
len(self._config.eos.available_charging_rates_in_percentage) - 1,
|
||||
)
|
||||
self.toolbox.register("attr_int", random.randint, start_hour, 23)
|
||||
|
||||
@@ -236,7 +238,7 @@ class optimization_problem:
|
||||
"mutate_ev_charge_index",
|
||||
tools.mutUniformInt,
|
||||
low=0,
|
||||
up=len(possible_ev_charge_currents) - 1,
|
||||
up=len(self._config.eos.available_charging_rates_in_percentage) - 1,
|
||||
indpb=0.2,
|
||||
)
|
||||
# - Start hour mutation for household devices
|
||||
@@ -271,7 +273,8 @@ class optimization_problem:
|
||||
|
||||
if self.optimize_ev:
|
||||
eautocharge_hours_float = [
|
||||
possible_ev_charge_currents[i] for i in eautocharge_hours_index
|
||||
self._config.eos.available_charging_rates_in_percentage[i]
|
||||
for i in eautocharge_hours_index
|
||||
]
|
||||
ems.set_ev_charge_hours(eautocharge_hours_float)
|
||||
else:
|
||||
@@ -420,6 +423,7 @@ class optimization_problem:
|
||||
# Initialize the inverter and energy management system
|
||||
wr = Wechselrichter(10000, akku)
|
||||
ems = EnergieManagementSystem(
|
||||
config=self._config.eos,
|
||||
gesamtlast=parameter["gesamtlast"],
|
||||
pv_prognose_wh=parameter["pv_forecast"],
|
||||
strompreis_euro_pro_wh=parameter["strompreis_euro_pro_wh"],
|
||||
@@ -444,23 +448,24 @@ class optimization_problem:
|
||||
)
|
||||
if self.optimize_ev:
|
||||
eautocharge_hours_float = [
|
||||
possible_ev_charge_currents[i] for i in eautocharge_hours_float
|
||||
self._config.eos.available_charging_rates_in_percentage[i]
|
||||
for i in eautocharge_hours_float
|
||||
]
|
||||
|
||||
ac_charge, dc_charge, discharge = self.decode_charge_discharge(discharge_hours_bin)
|
||||
# Visualize the results
|
||||
visualisiere_ergebnisse(
|
||||
parameter["gesamtlast"],
|
||||
parameter["pv_forecast"],
|
||||
parameter["strompreis_euro_pro_wh"],
|
||||
o,
|
||||
ac_charge,
|
||||
dc_charge,
|
||||
discharge,
|
||||
parameter["temperature_forecast"],
|
||||
start_hour,
|
||||
self.prediction_hours,
|
||||
einspeiseverguetung_euro_pro_wh,
|
||||
gesamtlast=parameter["gesamtlast"],
|
||||
pv_forecast=parameter["pv_forecast"],
|
||||
strompreise=parameter["strompreis_euro_pro_wh"],
|
||||
ergebnisse=o,
|
||||
ac=ac_charge,
|
||||
dc=dc_charge,
|
||||
discharge=discharge,
|
||||
temperature=parameter["temperature_forecast"],
|
||||
start_hour=start_hour,
|
||||
einspeiseverguetung_euro_pro_wh=einspeiseverguetung_euro_pro_wh,
|
||||
config=self._config,
|
||||
extra_data=extra_data,
|
||||
)
|
||||
|
||||
|
@@ -1,12 +1,14 @@
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import zoneinfo
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
import requests
|
||||
|
||||
from akkudoktoreos.config import AppConfig, SetupIncomplete
|
||||
|
||||
|
||||
def repeat_to_shape(array, target_shape):
|
||||
# Check if the array fits the target shape
|
||||
@@ -23,53 +25,57 @@ def repeat_to_shape(array, target_shape):
|
||||
|
||||
class HourlyElectricityPriceForecast:
|
||||
def __init__(
|
||||
self, source, cache_dir="cache", charges=0.000228, prediction_hours=24, cache=True
|
||||
self, source: str | Path, config: AppConfig, charges=0.000228, use_cache=True
|
||||
): # 228
|
||||
self.cache_dir = cache_dir
|
||||
self.cache = cache
|
||||
os.makedirs(self.cache_dir, exist_ok=True)
|
||||
self.cache_time_file = os.path.join(self.cache_dir, "cache_timestamp.txt")
|
||||
self.cache_dir = config.working_dir / config.directories.cache
|
||||
self.use_cache = use_cache
|
||||
if not self.cache_dir.is_dir():
|
||||
raise SetupIncomplete(f"Output path does not exist: {self.cache_dir}.")
|
||||
|
||||
self.cache_time_file = self.cache_dir / "cache_timestamp.txt"
|
||||
self.prices = self.load_data(source)
|
||||
self.charges = charges
|
||||
self.prediction_hours = prediction_hours
|
||||
self.prediction_hours = config.eos.prediction_hours
|
||||
|
||||
def load_data(self, source):
|
||||
cache_filename = self.get_cache_filename(source)
|
||||
if source.startswith("http"):
|
||||
if os.path.exists(cache_filename) and not self.is_cache_expired() and self.cache:
|
||||
def load_data(self, source: str | Path):
|
||||
cache_file = self.get_cache_file(source)
|
||||
if isinstance(source, str):
|
||||
if cache_file.is_file() and not self.is_cache_expired() and self.use_cache:
|
||||
print("Loading data from cache...")
|
||||
with open(cache_filename, "r") as file:
|
||||
with cache_file.open("r") as file:
|
||||
json_data = json.load(file)
|
||||
else:
|
||||
print("Loading data from the URL...")
|
||||
response = requests.get(source)
|
||||
if response.status_code == 200:
|
||||
json_data = response.json()
|
||||
with open(cache_filename, "w") as file:
|
||||
with cache_file.open("w") as file:
|
||||
json.dump(json_data, file)
|
||||
self.update_cache_timestamp()
|
||||
else:
|
||||
raise Exception(f"Error fetching data: {response.status_code}")
|
||||
else:
|
||||
with open(source, "r") as file:
|
||||
elif source.is_file():
|
||||
with source.open("r") as file:
|
||||
json_data = json.load(file)
|
||||
else:
|
||||
raise ValueError(f"Input is not a valid path: {source}")
|
||||
return json_data["values"]
|
||||
|
||||
def get_cache_filename(self, url):
|
||||
def get_cache_file(self, url):
|
||||
hash_object = hashlib.sha256(url.encode())
|
||||
hex_dig = hash_object.hexdigest()
|
||||
return os.path.join(self.cache_dir, f"cache_{hex_dig}.json")
|
||||
return self.cache_dir / f"cache_{hex_dig}.json"
|
||||
|
||||
def is_cache_expired(self):
|
||||
if not os.path.exists(self.cache_time_file):
|
||||
if not self.cache_time_file.is_file():
|
||||
return True
|
||||
with open(self.cache_time_file, "r") as file:
|
||||
with self.cache_time_file.open("r") as file:
|
||||
timestamp_str = file.read()
|
||||
last_cache_time = datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S")
|
||||
return datetime.now() - last_cache_time > timedelta(hours=1)
|
||||
|
||||
def update_cache_timestamp(self):
|
||||
with open(self.cache_time_file, "w") as file:
|
||||
with self.cache_time_file.open("w") as file:
|
||||
file.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
def get_price_for_date(self, date_str):
|
||||
|
@@ -1,30 +1,290 @@
|
||||
"""This module provides functionality to manage and handle configuration for the EOS system.
|
||||
|
||||
The module including loading, merging, and validating JSON configuration files.
|
||||
It also provides utility functions for working directory setup and date handling.
|
||||
|
||||
Key features:
|
||||
- Loading and merging configurations from default or custom JSON files
|
||||
- Validating configurations using Pydantic models
|
||||
- Managing directory setups for the application
|
||||
- Utility to get prediction start and end dates
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
output_dir = "output"
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
prediction_hours = 48
|
||||
optimization_hours = 24
|
||||
strafe = 10
|
||||
possible_ev_charge_currents = [
|
||||
0.0,
|
||||
6.0 / 16.0,
|
||||
# 7.0 / 16.0,
|
||||
8.0 / 16.0,
|
||||
# 9.0 / 16.0,
|
||||
10.0 / 16.0,
|
||||
# 11.0 / 16.0,
|
||||
12.0 / 16.0,
|
||||
# 13.0 / 16.0,
|
||||
14.0 / 16.0,
|
||||
# 15.0 / 16.0,
|
||||
1.0,
|
||||
]
|
||||
EOS_DIR = "EOS_DIR"
|
||||
ENCODING = "UTF-8"
|
||||
CONFIG_FILE_NAME = "EOS.config.json"
|
||||
DEFAULT_CONFIG_FILE = Path(__file__).parent.joinpath("default.config.json")
|
||||
|
||||
|
||||
def get_start_enddate(prediction_hours=48, startdate=None):
|
||||
############
|
||||
# Parameter
|
||||
############
|
||||
class FolderConfig(BaseModel):
|
||||
"""Folder configuration for the EOS system.
|
||||
|
||||
Uses working_dir as root path.
|
||||
The working directory can be either cwd or
|
||||
a path or folder defined by the EOS_DIR environment variable.
|
||||
|
||||
Attributes:
|
||||
output (str): Directory name for output files.
|
||||
cache (str): Directory name for cache files.
|
||||
"""
|
||||
|
||||
output: str
|
||||
cache: str
|
||||
|
||||
|
||||
class EOSConfig(BaseModel):
|
||||
"""EOS system-specific configuration.
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int): Number of hours for predictions.
|
||||
optimization_hours (int): Number of hours for optimizations.
|
||||
penalty (int): Penalty factor used in optimization.
|
||||
available_charging_rates_in_percentage (list[float]): List of available charging rates as percentages.
|
||||
"""
|
||||
|
||||
prediction_hours: int
|
||||
optimization_hours: int
|
||||
penalty: int
|
||||
available_charging_rates_in_percentage: list[float]
|
||||
feed_in_tariff_eur_per_wh: int
|
||||
|
||||
|
||||
class BaseConfig(BaseModel):
|
||||
"""Base configuration for the EOS system.
|
||||
|
||||
Attributes:
|
||||
directories (FolderConfig): Configuration for directory paths (output, cache).
|
||||
eos (EOSConfig): Configuration for EOS-specific settings.
|
||||
"""
|
||||
|
||||
directories: FolderConfig
|
||||
eos: EOSConfig
|
||||
|
||||
|
||||
class AppConfig(BaseConfig):
|
||||
"""Application-level configuration that extends the base configuration with a working directory.
|
||||
|
||||
Attributes:
|
||||
working_dir (Path): The root directory for the application.
|
||||
"""
|
||||
|
||||
working_dir: Path
|
||||
|
||||
def run_setup(self) -> None:
|
||||
"""Runs setup for the application by ensuring that required directories exist.
|
||||
|
||||
If a directory does not exist, it is created.
|
||||
|
||||
Raises:
|
||||
OSError: If directories cannot be created.
|
||||
"""
|
||||
print("Checking directory settings and creating missing directories...")
|
||||
for key, value in self.directories.model_dump().items():
|
||||
if not isinstance(value, str):
|
||||
continue
|
||||
path = self.working_dir / value
|
||||
print(f"'{key}': {path}")
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
|
||||
class SetupIncomplete(Exception):
|
||||
"""Exception class for errors related to incomplete setup of the EOS system."""
|
||||
|
||||
|
||||
def _load_json(path: Path) -> dict[str, Any]:
|
||||
"""Load a JSON file from a given path.
|
||||
|
||||
Args:
|
||||
path (Path): Path to the JSON file.
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: Parsed JSON content.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the JSON file does not exist.
|
||||
json.JSONDecodeError: If the file cannot be parsed as valid JSON.
|
||||
"""
|
||||
with path.open("r") as f_in:
|
||||
return json.load(f_in)
|
||||
|
||||
|
||||
def _merge_json(default_data: dict[str, Any], custom_data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Recursively merge two dictionaries, using values from `custom_data` when available.
|
||||
|
||||
Args:
|
||||
default_data (dict[str, Any]): The default configuration values.
|
||||
custom_data (dict[str, Any]): The custom configuration values.
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: Merged configuration data.
|
||||
"""
|
||||
merged_data = {}
|
||||
for key, default_value in default_data.items():
|
||||
if key in custom_data:
|
||||
custom_value = custom_data[key]
|
||||
if isinstance(default_value, dict) and isinstance(custom_value, dict):
|
||||
merged_data[key] = _merge_json(default_value, custom_value)
|
||||
elif type(default_value) is type(custom_value):
|
||||
merged_data[key] = custom_value
|
||||
else:
|
||||
# use default value if types differ
|
||||
merged_data[key] = default_value
|
||||
else:
|
||||
merged_data[key] = default_value
|
||||
return merged_data
|
||||
|
||||
|
||||
def _config_update_available(merged_data: dict[str, Any], custom_data: dict[str, Any]) -> bool:
|
||||
"""Check if the configuration needs to be updated by comparing merged data and custom data.
|
||||
|
||||
Args:
|
||||
merged_data (dict[str, Any]): The merged configuration data.
|
||||
custom_data (dict[str, Any]): The custom configuration data.
|
||||
|
||||
Returns:
|
||||
bool: True if there is a difference indicating that an update is needed, otherwise False.
|
||||
"""
|
||||
if merged_data.keys() != custom_data.keys():
|
||||
return True
|
||||
|
||||
for key in merged_data:
|
||||
value1 = merged_data[key]
|
||||
value2 = custom_data[key]
|
||||
|
||||
if isinstance(value1, dict) and isinstance(value2, dict):
|
||||
if _config_update_available(value1, value2):
|
||||
return True
|
||||
elif value1 != value2:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_config_file(path: Path, copy_default: bool) -> Path:
|
||||
"""Get the valid configuration file path. If the custom config is not found, it uses the default config.
|
||||
|
||||
Args:
|
||||
path (Path): Path to the working directory.
|
||||
copy_default (bool): If True, copy the default configuration if custom config is not found.
|
||||
|
||||
Returns:
|
||||
Path: Path to the valid configuration file.
|
||||
"""
|
||||
config = path.resolve() / CONFIG_FILE_NAME
|
||||
if config.is_file():
|
||||
print(f"Using configuration from: {config}")
|
||||
return config
|
||||
|
||||
if not path.is_dir():
|
||||
print(f"Path does not exist: {path}. Using default configuration...")
|
||||
return DEFAULT_CONFIG_FILE
|
||||
|
||||
if not copy_default:
|
||||
print("No custom configuration provided. Using default configuration...")
|
||||
return DEFAULT_CONFIG_FILE
|
||||
|
||||
try:
|
||||
return Path(shutil.copy2(DEFAULT_CONFIG_FILE, config))
|
||||
except Exception as exc:
|
||||
print(f"Could not copy default config: {exc}. Using default copy...")
|
||||
return DEFAULT_CONFIG_FILE
|
||||
|
||||
|
||||
def _merge_and_update(custom_config: Path, update_outdated: bool = False) -> bool:
|
||||
"""Merge custom and default configurations, and optionally update the custom config if outdated.
|
||||
|
||||
Args:
|
||||
custom_config (Path): Path to the custom configuration file.
|
||||
update_outdated (bool): If True, update the custom config if it is outdated.
|
||||
|
||||
Returns:
|
||||
bool: True if the custom config was updated, otherwise False.
|
||||
"""
|
||||
if custom_config == DEFAULT_CONFIG_FILE:
|
||||
return False
|
||||
default_data = _load_json(DEFAULT_CONFIG_FILE)
|
||||
custom_data = _load_json(custom_config)
|
||||
merged_data = _merge_json(default_data, custom_data)
|
||||
|
||||
if not _config_update_available(merged_data, custom_data):
|
||||
print(f"Custom config {custom_config} is up-to-date...")
|
||||
return False
|
||||
print(f"Custom config {custom_config} is outdated...")
|
||||
if update_outdated:
|
||||
with custom_config.open("w") as f_out:
|
||||
json.dump(merged_data, f_out, indent=2)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def load_config(
|
||||
working_dir: Path, copy_default: bool = False, update_outdated: bool = True
|
||||
) -> AppConfig:
|
||||
"""Load the application configuration from the specified directory, merging with defaults if needed.
|
||||
|
||||
Args:
|
||||
working_dir (Path): Path to the working directory.
|
||||
copy_default (bool): Whether to copy the default configuration if custom config is missing.
|
||||
update_outdated (bool): Whether to update outdated custom configuration.
|
||||
|
||||
Returns:
|
||||
AppConfig: Loaded application configuration.
|
||||
|
||||
Raises:
|
||||
ValueError: If the configuration is incomplete or not valid.
|
||||
"""
|
||||
# make sure working_dir is always a full path
|
||||
working_dir = working_dir.resolve()
|
||||
|
||||
config = get_config_file(working_dir, copy_default)
|
||||
_merge_and_update(config, update_outdated)
|
||||
|
||||
with config.open("r", encoding=ENCODING) as f_in:
|
||||
try:
|
||||
base_config = BaseConfig.model_validate(json.load(f_in))
|
||||
return AppConfig.model_validate(
|
||||
{"working_dir": working_dir, **base_config.model_dump()}
|
||||
)
|
||||
except ValidationError as exc:
|
||||
raise ValueError(f"Configuration {config} is incomplete or not valid: {exc}")
|
||||
|
||||
|
||||
def get_working_dir() -> Path:
|
||||
"""Get the working directory for the application, either from an environment variable or the current working directory.
|
||||
|
||||
Returns:
|
||||
Path: The path to the working directory.
|
||||
"""
|
||||
custom_dir = os.getenv(EOS_DIR)
|
||||
if custom_dir is None:
|
||||
working_dir = Path.cwd()
|
||||
print(f"No custom directory provided. Setting working directory to: {working_dir}")
|
||||
else:
|
||||
working_dir = Path(custom_dir).resolve()
|
||||
print(f"Custom directory provided. Setting working directory to: {working_dir}")
|
||||
return working_dir
|
||||
|
||||
|
||||
def get_start_enddate(
|
||||
prediction_hours: int, startdate: Optional[datetime] = None
|
||||
) -> tuple[str, str]:
|
||||
"""Calculate the start and end dates based on the given prediction hours and optional start date.
|
||||
|
||||
Args:
|
||||
prediction_hours (int): Number of hours for predictions.
|
||||
startdate (Optional[datetime]): Optional starting datetime.
|
||||
|
||||
Returns:
|
||||
tuple[str, str]: The current date (start date) and end date in the format 'YYYY-MM-DD'.
|
||||
"""
|
||||
if startdate is None:
|
||||
date = (datetime.now().date() + timedelta(hours=prediction_hours)).strftime("%Y-%m-%d")
|
||||
date_now = datetime.now().strftime("%Y-%m-%d")
|
||||
|
15
src/akkudoktoreos/default.config.json
Normal file
15
src/akkudoktoreos/default.config.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"directories": {
|
||||
"output": "output",
|
||||
"cache": "cache"
|
||||
},
|
||||
"eos": {
|
||||
"prediction_hours": 48,
|
||||
"optimization_hours": 24,
|
||||
"penalty": 10,
|
||||
"available_charging_rates_in_percentage": [
|
||||
0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0
|
||||
],
|
||||
"feed_in_tariff_eur_per_wh": 48
|
||||
}
|
||||
}
|
@@ -1,5 +1,4 @@
|
||||
import datetime
|
||||
import os
|
||||
|
||||
# Set the backend for matplotlib to Agg
|
||||
import matplotlib
|
||||
@@ -8,7 +7,7 @@ import numpy as np
|
||||
from matplotlib.backends.backend_pdf import PdfPages
|
||||
|
||||
from akkudoktoreos.class_sommerzeit import ist_dst_wechsel
|
||||
from akkudoktoreos.config import output_dir
|
||||
from akkudoktoreos.config import AppConfig, SetupIncomplete
|
||||
|
||||
matplotlib.use("Agg")
|
||||
|
||||
@@ -23,22 +22,24 @@ def visualisiere_ergebnisse(
|
||||
discharge, # Discharge allowed
|
||||
temperature,
|
||||
start_hour,
|
||||
prediction_hours,
|
||||
einspeiseverguetung_euro_pro_wh,
|
||||
config: AppConfig,
|
||||
filename="visualization_results.pdf",
|
||||
extra_data=None,
|
||||
):
|
||||
#####################
|
||||
# 24-hour visualization
|
||||
#####################
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
output_file = os.path.join(output_dir, filename)
|
||||
output_dir = config.working_dir / config.directories.output
|
||||
if not output_dir.is_dir():
|
||||
raise SetupIncomplete(f"Output path does not exist: {output_dir}.")
|
||||
|
||||
output_file = output_dir.joinpath(filename)
|
||||
with PdfPages(output_file) as pdf:
|
||||
# Load and PV generation
|
||||
plt.figure(figsize=(14, 14))
|
||||
plt.subplot(3, 3, 1)
|
||||
hours = np.arange(0, prediction_hours)
|
||||
hours = np.arange(0, config.eos.prediction_hours)
|
||||
|
||||
gesamtlast_array = np.array(gesamtlast)
|
||||
# Plot individual loads
|
||||
@@ -101,9 +102,9 @@ def visualisiere_ergebnisse(
|
||||
plt.figure(figsize=(14, 10))
|
||||
|
||||
if ist_dst_wechsel(datetime.datetime.now()):
|
||||
hours = np.arange(start_hour, prediction_hours - 1)
|
||||
hours = np.arange(start_hour, config.eos.prediction_hours - 1)
|
||||
else:
|
||||
hours = np.arange(start_hour, prediction_hours)
|
||||
hours = np.arange(start_hour, config.eos.prediction_hours)
|
||||
|
||||
# Energy flow, grid feed-in, and grid consumption
|
||||
plt.subplot(3, 2, 1)
|
||||
@@ -187,7 +188,7 @@ def visualisiere_ergebnisse(
|
||||
|
||||
# Plot for AC, DC charging, and Discharge status using bar charts
|
||||
ax1 = plt.subplot(3, 2, 5)
|
||||
hours = np.arange(0, prediction_hours)
|
||||
hours = np.arange(0, config.eos.prediction_hours)
|
||||
# Plot AC charging as bars (relative values between 0 and 1)
|
||||
plt.bar(hours, ac, width=0.4, label="AC Charging (relative)", color="blue", alpha=0.6)
|
||||
|
||||
@@ -209,16 +210,16 @@ def visualisiere_ergebnisse(
|
||||
|
||||
# Configure the plot
|
||||
ax1.legend(loc="upper left")
|
||||
ax1.set_xlim(0, prediction_hours)
|
||||
ax1.set_xlim(0, config.eos.prediction_hours)
|
||||
ax1.set_xlabel("Hour")
|
||||
ax1.set_ylabel("Relative Power (0-1) / Discharge (0 or 1)")
|
||||
ax1.set_title("AC/DC Charging and Discharge Overview")
|
||||
ax1.grid(True)
|
||||
|
||||
if ist_dst_wechsel(datetime.datetime.now()):
|
||||
hours = np.arange(start_hour, prediction_hours - 1)
|
||||
hours = np.arange(start_hour, config.eos.prediction_hours - 1)
|
||||
else:
|
||||
hours = np.arange(start_hour, prediction_hours)
|
||||
hours = np.arange(start_hour, config.eos.prediction_hours)
|
||||
|
||||
pdf.savefig() # Save the current figure state to the PDF
|
||||
plt.close() # Close the current figure to free up memory
|
||||
|
@@ -20,20 +20,18 @@ from akkudoktoreos.class_optimize import optimization_problem
|
||||
from akkudoktoreos.class_pv_forecast import PVForecast
|
||||
from akkudoktoreos.class_strompreis import HourlyElectricityPriceForecast
|
||||
from akkudoktoreos.config import (
|
||||
SetupIncomplete,
|
||||
get_start_enddate,
|
||||
optimization_hours,
|
||||
output_dir,
|
||||
prediction_hours,
|
||||
get_working_dir,
|
||||
load_config,
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
opt_class = optimization_problem(
|
||||
prediction_hours=prediction_hours,
|
||||
strafe=10,
|
||||
optimization_hours=optimization_hours,
|
||||
verbose=True,
|
||||
)
|
||||
working_dir = get_working_dir()
|
||||
# copy config to working directory. Make this a CLI option later
|
||||
config = load_config(working_dir, True)
|
||||
opt_class = optimization_problem(config)
|
||||
|
||||
|
||||
def isfloat(num: Any) -> TypeGuard[float]:
|
||||
@@ -58,14 +56,11 @@ def isfloat(num: Any) -> TypeGuard[float]:
|
||||
@app.route("/strompreis", methods=["GET"])
|
||||
def flask_strompreis():
|
||||
# Get the current date and the end date based on prediction hours
|
||||
date_now, date = get_start_enddate(prediction_hours, startdate=datetime.now().date())
|
||||
filepath = os.path.join(
|
||||
r"test_data", r"strompreise_akkudokAPI.json"
|
||||
) # Adjust the path to the JSON file
|
||||
date_now, date = get_start_enddate(config.eos.prediction_hours, startdate=datetime.now().date())
|
||||
price_forecast = HourlyElectricityPriceForecast(
|
||||
source=f"https://api.akkudoktor.net/prices?start={date_now}&end={date}",
|
||||
prediction_hours=prediction_hours,
|
||||
cache=False,
|
||||
config=config.eos.prediction_hours,
|
||||
use_cache=False,
|
||||
)
|
||||
specific_date_prices = price_forecast.get_price_for_daterange(
|
||||
date_now, date
|
||||
@@ -141,7 +136,7 @@ def flask_gesamtlast_simple():
|
||||
request.args.get("year_energy")
|
||||
) # Get annual energy value from query parameters
|
||||
date_now, date = get_start_enddate(
|
||||
prediction_hours, startdate=datetime.now().date()
|
||||
config.eos.prediction_hours, startdate=datetime.now().date()
|
||||
) # Get the current date and prediction end date
|
||||
|
||||
###############
|
||||
@@ -159,7 +154,9 @@ def flask_gesamtlast_simple():
|
||||
0
|
||||
] # Get expected household load for the date range
|
||||
|
||||
gesamtlast = Gesamtlast(prediction_hours=prediction_hours) # Create Gesamtlast instance
|
||||
gesamtlast = Gesamtlast(
|
||||
prediction_hours=config.eos.prediction_hours
|
||||
) # Create Gesamtlast instance
|
||||
gesamtlast.hinzufuegen(
|
||||
"Haushalt", leistung_haushalt
|
||||
) # Add household load to total load calculation
|
||||
@@ -181,13 +178,15 @@ def flask_pvprognose():
|
||||
# Retrieve URL and AC power measurement from query parameters
|
||||
url = request.args.get("url")
|
||||
ac_power_measurement = request.args.get("ac_power_measurement")
|
||||
date_now, date = get_start_enddate(prediction_hours, startdate=datetime.now().date())
|
||||
date_now, date = get_start_enddate(
|
||||
config.eos.prediction_hours, startdate=datetime.now().date()
|
||||
)
|
||||
|
||||
###############
|
||||
# PV Forecast
|
||||
###############
|
||||
PVforecast = PVForecast(
|
||||
prediction_hours=prediction_hours, url=url
|
||||
prediction_hours=config.eos.prediction_hours, url=url
|
||||
) # Instantiate PVForecast with given parameters
|
||||
if isfloat(ac_power_measurement): # Check if the AC power measurement is a valid float
|
||||
PVforecast.update_ac_power_measurement(
|
||||
@@ -255,9 +254,10 @@ def flask_optimize():
|
||||
@app.route("/visualization_results.pdf")
|
||||
def get_pdf():
|
||||
# Endpoint to serve the generated PDF with visualization results
|
||||
return send_from_directory(
|
||||
os.path.abspath(output_dir), "visualization_results.pdf"
|
||||
) # Adjust the directory if needed
|
||||
output_path = config.working_dir / config.directories.output
|
||||
if not output_path.is_dir():
|
||||
raise SetupIncomplete(f"Output path does not exist: {output_path}.")
|
||||
return send_from_directory(output_path, "visualization_results.pdf")
|
||||
|
||||
|
||||
@app.route("/site-map")
|
||||
@@ -293,6 +293,8 @@ def root():
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
config.run_setup()
|
||||
|
||||
# Set host and port from environment variables or defaults
|
||||
host = os.getenv("FLASK_RUN_HOST", "0.0.0.0")
|
||||
port = os.getenv("FLASK_RUN_PORT", 8503)
|
||||
|
Reference in New Issue
Block a user