mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-04-19 08:55:15 +00:00
Improve Configuration and Prediction Usability (#220)
* Update utilities in utils submodule. * Add base configuration modules. * Add server base configuration modules. * Add devices base configuration modules. * Add optimization base configuration modules. * Add utils base configuration modules. * Add prediction abstract and base classes plus tests. * Add PV forecast to prediction submodule. The PV forecast modules are adapted from the class_pvforecast module and replace it. * Add weather forecast to prediction submodule. The modules provide classes and methods to retrieve, manage, and process weather forecast data from various sources. Includes are structured representations of weather data and utilities for fetching forecasts for specific locations and time ranges. BrightSky and ClearOutside are currently supported. * Add electricity price forecast to prediction submodule. * Adapt fastapi server to base config and add fasthtml server. * Add ems to core submodule. * Adapt genetic to config. * Adapt visualize to config. * Adapt common test fixtures to config. * Add load forecast to prediction submodule. * Add core abstract and base classes. * Adapt single test optimization to config. * Adapt devices to config. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
parent
a5e637ab4c
commit
aa334d0b61
File diff suppressed because it is too large
Load Diff
@ -51,9 +51,16 @@ select = [
|
|||||||
# Google convention via `convention = "google"`, below.
|
# Google convention via `convention = "google"`, below.
|
||||||
]
|
]
|
||||||
ignore = [
|
ignore = [
|
||||||
# On top of `Pyflakes (F)` to prevent errors for existing sources. Should be removed!!!
|
# Prevent errors due to ruff false positives
|
||||||
|
# ------------------------------------------
|
||||||
|
# On top of `Pyflakes (F)` to allow numpydantic Shape forward annotation
|
||||||
|
"F722", # forward-annotation-syntax-error: forward annotations that include invalid syntax.
|
||||||
|
|
||||||
|
# Prevent errors for existing sources. Should be removed!!!
|
||||||
|
# ---------------------------------------------------------
|
||||||
|
# On top of `Pyflakes (F)`
|
||||||
"F841", # unused-variable: Local variable {name} is assigned to but never used
|
"F841", # unused-variable: Local variable {name} is assigned to but never used
|
||||||
# On top of `pydocstyle (D)` to prevent errors for existing sources. Should be removed!!!
|
# On top of `pydocstyle (D)`
|
||||||
"D100", # undocumented-public-module: Missing docstring in public module
|
"D100", # undocumented-public-module: Missing docstring in public module
|
||||||
"D101", # undocumented-public-class: Missing docstring in public class
|
"D101", # undocumented-public-class: Missing docstring in public class
|
||||||
"D102", # undocumented-public-method: Missing docstring in public method
|
"D102", # undocumented-public-method: Missing docstring in public method
|
||||||
|
@ -1,11 +1,15 @@
|
|||||||
numpy==2.2.0
|
numpy==2.2.0
|
||||||
|
numpydantic==1.6.4
|
||||||
matplotlib==3.9.2
|
matplotlib==3.9.2
|
||||||
fastapi[standard]==0.115.5
|
fastapi[standard]==0.115.5
|
||||||
|
python-fasthtml==0.9.1
|
||||||
uvicorn==0.32.1
|
uvicorn==0.32.1
|
||||||
pydantic==2.10.3
|
|
||||||
scikit-learn==1.6.0
|
scikit-learn==1.6.0
|
||||||
timezonefinder==6.5.7
|
timezonefinder==6.5.7
|
||||||
deap==1.4.1
|
deap==1.4.1
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
pandas==2.2.3
|
pandas==2.2.3
|
||||||
|
pendulum==3.0.0
|
||||||
|
platformdirs==4.3.6
|
||||||
|
pvlib==0.11.1
|
||||||
pydantic==2.10.3
|
pydantic==2.10.3
|
||||||
|
@ -1,299 +1,240 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import cProfile
|
||||||
|
import pstats
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from akkudoktoreos.config import get_working_dir, load_config
|
from akkudoktoreos.config.config import get_config
|
||||||
from akkudoktoreos.optimization.genetic import (
|
from akkudoktoreos.optimization.genetic import (
|
||||||
OptimizationParameters,
|
OptimizationParameters,
|
||||||
optimization_problem,
|
optimization_problem,
|
||||||
)
|
)
|
||||||
|
|
||||||
start_hour = 0
|
|
||||||
|
|
||||||
# PV Forecast (in W)
|
def prepare_optimization_parameters() -> OptimizationParameters:
|
||||||
pv_forecast = np.zeros(48)
|
"""Prepare and return optimization parameters with predefined data.
|
||||||
pv_forecast[12] = 5000
|
|
||||||
# [
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 8.05,
|
|
||||||
# 352.91,
|
|
||||||
# 728.51,
|
|
||||||
# 930.28,
|
|
||||||
# 1043.25,
|
|
||||||
# 1106.74,
|
|
||||||
# 1161.69,
|
|
||||||
# 1018.82,
|
|
||||||
# 1519.07,
|
|
||||||
# 1969.88,
|
|
||||||
# 1017.96,
|
|
||||||
# 1043.07,
|
|
||||||
# 1007.17,
|
|
||||||
# 319.67,
|
|
||||||
# 7.88,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 5.04,
|
|
||||||
# 335.59,
|
|
||||||
# 705.32,
|
|
||||||
# 1121.12,
|
|
||||||
# 1604.79,
|
|
||||||
# 2157.38,
|
|
||||||
# 1433.25,
|
|
||||||
# 5718.49,
|
|
||||||
# 4553.96,
|
|
||||||
# 3027.55,
|
|
||||||
# 2574.46,
|
|
||||||
# 1720.4,
|
|
||||||
# 963.4,
|
|
||||||
# 383.3,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# 0,
|
|
||||||
# ]
|
|
||||||
|
|
||||||
# Temperature Forecast (in degree C)
|
Returns:
|
||||||
temperature_forecast = [
|
OptimizationParameters: Configured optimization parameters
|
||||||
18.3,
|
"""
|
||||||
17.8,
|
# PV Forecast (in W)
|
||||||
16.9,
|
pv_forecast = np.zeros(48)
|
||||||
16.2,
|
pv_forecast[12] = 5000
|
||||||
15.6,
|
|
||||||
15.1,
|
|
||||||
14.6,
|
|
||||||
14.2,
|
|
||||||
14.3,
|
|
||||||
14.8,
|
|
||||||
15.7,
|
|
||||||
16.7,
|
|
||||||
17.4,
|
|
||||||
18.0,
|
|
||||||
18.6,
|
|
||||||
19.2,
|
|
||||||
19.1,
|
|
||||||
18.7,
|
|
||||||
18.5,
|
|
||||||
17.7,
|
|
||||||
16.2,
|
|
||||||
14.6,
|
|
||||||
13.6,
|
|
||||||
13.0,
|
|
||||||
12.6,
|
|
||||||
12.2,
|
|
||||||
11.7,
|
|
||||||
11.6,
|
|
||||||
11.3,
|
|
||||||
11.0,
|
|
||||||
10.7,
|
|
||||||
10.2,
|
|
||||||
11.4,
|
|
||||||
14.4,
|
|
||||||
16.4,
|
|
||||||
18.3,
|
|
||||||
19.5,
|
|
||||||
20.7,
|
|
||||||
21.9,
|
|
||||||
22.7,
|
|
||||||
23.1,
|
|
||||||
23.1,
|
|
||||||
22.8,
|
|
||||||
21.8,
|
|
||||||
20.2,
|
|
||||||
19.1,
|
|
||||||
18.0,
|
|
||||||
17.4,
|
|
||||||
]
|
|
||||||
|
|
||||||
# Electricity Price (in Euro per Wh)
|
# Temperature Forecast (in degree C)
|
||||||
strompreis_euro_pro_wh = np.full(48, 0.001)
|
temperature_forecast = [
|
||||||
strompreis_euro_pro_wh[0:10] = 0.00001
|
18.3,
|
||||||
strompreis_euro_pro_wh[11:15] = 0.00005
|
17.8,
|
||||||
strompreis_euro_pro_wh[20] = 0.00001
|
16.9,
|
||||||
# [
|
16.2,
|
||||||
# 0.0000384,
|
15.6,
|
||||||
# 0.0000318,
|
15.1,
|
||||||
# 0.0000284,
|
14.6,
|
||||||
# 0.0008283,
|
14.2,
|
||||||
# 0.0008289,
|
14.3,
|
||||||
# 0.0008334,
|
14.8,
|
||||||
# 0.0008290,
|
15.7,
|
||||||
# 0.0003302,
|
16.7,
|
||||||
# 0.0003042,
|
17.4,
|
||||||
# 0.0002430,
|
18.0,
|
||||||
# 0.0002280,
|
18.6,
|
||||||
# 0.0002212,
|
19.2,
|
||||||
# 0.0002093,
|
19.1,
|
||||||
# 0.0001879,
|
18.7,
|
||||||
# 0.0001838,
|
18.5,
|
||||||
# 0.0002004,
|
17.7,
|
||||||
# 0.0002198,
|
16.2,
|
||||||
# 0.0002270,
|
14.6,
|
||||||
# 0.0002997,
|
13.6,
|
||||||
# 0.0003195,
|
13.0,
|
||||||
# 0.0003081,
|
12.6,
|
||||||
# 0.0002969,
|
12.2,
|
||||||
# 0.0002921,
|
11.7,
|
||||||
# 0.0002780,
|
11.6,
|
||||||
# 0.0003384,
|
11.3,
|
||||||
# 0.0003318,
|
11.0,
|
||||||
# 0.0003284,
|
10.7,
|
||||||
# 0.0003283,
|
10.2,
|
||||||
# 0.0003289,
|
11.4,
|
||||||
# 0.0003334,
|
14.4,
|
||||||
# 0.0003290,
|
16.4,
|
||||||
# 0.0003302,
|
18.3,
|
||||||
# 0.0003042,
|
19.5,
|
||||||
# 0.0002430,
|
20.7,
|
||||||
# 0.0002280,
|
21.9,
|
||||||
# 0.0002212,
|
22.7,
|
||||||
# 0.0002093,
|
23.1,
|
||||||
# 0.0001879,
|
23.1,
|
||||||
# 0.0001838,
|
22.8,
|
||||||
# 0.0002004,
|
21.8,
|
||||||
# 0.0002198,
|
20.2,
|
||||||
# 0.0002270,
|
19.1,
|
||||||
# 0.0002997,
|
18.0,
|
||||||
# 0.0003195,
|
17.4,
|
||||||
# 0.0003081,
|
]
|
||||||
# 0.0002969,
|
|
||||||
# 0.0002921,
|
|
||||||
# 0.0002780,
|
|
||||||
# ]
|
|
||||||
|
|
||||||
# Overall System Load (in W)
|
# Electricity Price (in Euro per Wh)
|
||||||
gesamtlast = [
|
strompreis_euro_pro_wh = np.full(48, 0.001)
|
||||||
676.71,
|
strompreis_euro_pro_wh[0:10] = 0.00001
|
||||||
876.19,
|
strompreis_euro_pro_wh[11:15] = 0.00005
|
||||||
527.13,
|
strompreis_euro_pro_wh[20] = 0.00001
|
||||||
468.88,
|
|
||||||
531.38,
|
|
||||||
517.95,
|
|
||||||
483.15,
|
|
||||||
472.28,
|
|
||||||
1011.68,
|
|
||||||
995.00,
|
|
||||||
1053.07,
|
|
||||||
1063.91,
|
|
||||||
1320.56,
|
|
||||||
1132.03,
|
|
||||||
1163.67,
|
|
||||||
1176.82,
|
|
||||||
1216.22,
|
|
||||||
1103.78,
|
|
||||||
1129.12,
|
|
||||||
1178.71,
|
|
||||||
1050.98,
|
|
||||||
988.56,
|
|
||||||
912.38,
|
|
||||||
704.61,
|
|
||||||
516.37,
|
|
||||||
868.05,
|
|
||||||
694.34,
|
|
||||||
608.79,
|
|
||||||
556.31,
|
|
||||||
488.89,
|
|
||||||
506.91,
|
|
||||||
804.89,
|
|
||||||
1141.98,
|
|
||||||
1056.97,
|
|
||||||
992.46,
|
|
||||||
1155.99,
|
|
||||||
827.01,
|
|
||||||
1257.98,
|
|
||||||
1232.67,
|
|
||||||
871.26,
|
|
||||||
860.88,
|
|
||||||
1158.03,
|
|
||||||
1222.72,
|
|
||||||
1221.04,
|
|
||||||
949.99,
|
|
||||||
987.01,
|
|
||||||
733.99,
|
|
||||||
592.97,
|
|
||||||
]
|
|
||||||
|
|
||||||
# Start Solution (binary)
|
# Overall System Load (in W)
|
||||||
start_solution = None
|
gesamtlast = [
|
||||||
|
676.71,
|
||||||
|
876.19,
|
||||||
|
527.13,
|
||||||
|
468.88,
|
||||||
|
531.38,
|
||||||
|
517.95,
|
||||||
|
483.15,
|
||||||
|
472.28,
|
||||||
|
1011.68,
|
||||||
|
995.00,
|
||||||
|
1053.07,
|
||||||
|
1063.91,
|
||||||
|
1320.56,
|
||||||
|
1132.03,
|
||||||
|
1163.67,
|
||||||
|
1176.82,
|
||||||
|
1216.22,
|
||||||
|
1103.78,
|
||||||
|
1129.12,
|
||||||
|
1178.71,
|
||||||
|
1050.98,
|
||||||
|
988.56,
|
||||||
|
912.38,
|
||||||
|
704.61,
|
||||||
|
516.37,
|
||||||
|
868.05,
|
||||||
|
694.34,
|
||||||
|
608.79,
|
||||||
|
556.31,
|
||||||
|
488.89,
|
||||||
|
506.91,
|
||||||
|
804.89,
|
||||||
|
1141.98,
|
||||||
|
1056.97,
|
||||||
|
992.46,
|
||||||
|
1155.99,
|
||||||
|
827.01,
|
||||||
|
1257.98,
|
||||||
|
1232.67,
|
||||||
|
871.26,
|
||||||
|
860.88,
|
||||||
|
1158.03,
|
||||||
|
1222.72,
|
||||||
|
1221.04,
|
||||||
|
949.99,
|
||||||
|
987.01,
|
||||||
|
733.99,
|
||||||
|
592.97,
|
||||||
|
]
|
||||||
|
|
||||||
# Define parameters for the optimization problem
|
# Start Solution (binary)
|
||||||
parameters = OptimizationParameters(
|
start_solution = None
|
||||||
**{
|
|
||||||
"ems": {
|
|
||||||
# Value of energy in battery (per Wh)
|
|
||||||
"preis_euro_pro_wh_akku": 0e-05,
|
|
||||||
# Feed-in tariff for exporting electricity (per Wh)
|
|
||||||
"einspeiseverguetung_euro_pro_wh": 7e-05,
|
|
||||||
# Overall load on the system
|
|
||||||
"gesamtlast": gesamtlast,
|
|
||||||
# PV generation forecast (48 hours)
|
|
||||||
"pv_prognose_wh": pv_forecast,
|
|
||||||
# Electricity price forecast (48 hours)
|
|
||||||
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
|
||||||
},
|
|
||||||
"pv_akku": {
|
|
||||||
# Battery capacity (in Wh)
|
|
||||||
"kapazitaet_wh": 26400,
|
|
||||||
# Initial state of charge (SOC) of PV battery (%)
|
|
||||||
"start_soc_prozent": 15,
|
|
||||||
# Minimum Soc PV Battery
|
|
||||||
"min_soc_prozent": 15,
|
|
||||||
},
|
|
||||||
"eauto": {
|
|
||||||
# Minimum SOC for electric car
|
|
||||||
"min_soc_prozent": 50,
|
|
||||||
# Electric car battery capacity (Wh)
|
|
||||||
"kapazitaet_wh": 60000,
|
|
||||||
# Charging efficiency of the electric car
|
|
||||||
"lade_effizienz": 0.95,
|
|
||||||
# Charging power of the electric car (W)
|
|
||||||
"max_ladeleistung_w": 11040,
|
|
||||||
# Current SOC of the electric car (%)
|
|
||||||
"start_soc_prozent": 5,
|
|
||||||
},
|
|
||||||
# "dishwasher": {
|
|
||||||
# # Household appliance consumption (Wh)
|
|
||||||
# "consumption_wh": 5000,
|
|
||||||
# # Duration of appliance usage (hours)
|
|
||||||
# "duration_h": 0,
|
|
||||||
# },
|
|
||||||
# Temperature forecast (48 hours)
|
|
||||||
"temperature_forecast": temperature_forecast,
|
|
||||||
# Initial solution for the optimization
|
|
||||||
"start_solution": start_solution,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Startzeit nehmen
|
# Define parameters for the optimization problem
|
||||||
start_time = time.time()
|
return OptimizationParameters(
|
||||||
|
**{
|
||||||
# Initialize the optimization problem using the default configuration
|
"ems": {
|
||||||
working_dir = get_working_dir()
|
"preis_euro_pro_wh_akku": 0e-05,
|
||||||
config = load_config(working_dir)
|
"einspeiseverguetung_euro_pro_wh": 7e-05,
|
||||||
opt_class = optimization_problem(config, verbose=True, fixed_seed=42)
|
"gesamtlast": gesamtlast,
|
||||||
|
"pv_prognose_wh": pv_forecast,
|
||||||
# Perform the optimisation based on the provided parameters and start hour
|
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
||||||
ergebnis = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
|
},
|
||||||
|
"pv_akku": {
|
||||||
# Endzeit nehmen
|
"kapazitaet_wh": 26400,
|
||||||
end_time = time.time()
|
"start_soc_prozent": 15,
|
||||||
|
"min_soc_prozent": 15,
|
||||||
# Berechnete Zeit ausgeben
|
},
|
||||||
elapsed_time = end_time - start_time
|
"eauto": {
|
||||||
print(f"Elapsed time: {elapsed_time:.4f} seconds")
|
"min_soc_prozent": 50,
|
||||||
|
"kapazitaet_wh": 60000,
|
||||||
|
"lade_effizienz": 0.95,
|
||||||
|
"max_ladeleistung_w": 11040,
|
||||||
|
"start_soc_prozent": 5,
|
||||||
|
},
|
||||||
|
"temperature_forecast": temperature_forecast,
|
||||||
|
"start_solution": start_solution,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
print(ergebnis.model_dump())
|
def run_optimization(start_hour: int = 0, verbose: bool = False) -> dict:
|
||||||
|
"""Run the optimization problem.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start_hour (int, optional): Starting hour for optimization. Defaults to 0.
|
||||||
|
verbose (bool, optional): Whether to print verbose output. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Optimization result as a dictionary
|
||||||
|
"""
|
||||||
|
# Initialize the optimization problem using the default configuration
|
||||||
|
config_eos = get_config()
|
||||||
|
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 24})
|
||||||
|
opt_class = optimization_problem(verbose=verbose, fixed_seed=42)
|
||||||
|
|
||||||
|
# Prepare parameters
|
||||||
|
parameters = prepare_optimization_parameters()
|
||||||
|
|
||||||
|
# Perform the optimisation based on the provided parameters and start hour
|
||||||
|
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
|
||||||
|
|
||||||
|
return result.model_dump()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main function to run the optimization script with optional profiling."""
|
||||||
|
parser = argparse.ArgumentParser(description="Run Energy Optimization Simulation")
|
||||||
|
parser.add_argument("--profile", action="store_true", help="Enable performance profiling")
|
||||||
|
parser.add_argument(
|
||||||
|
"--verbose", action="store_true", help="Enable verbose output during optimization"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--start-hour", type=int, default=0, help="Starting hour for optimization (default: 0)"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.profile:
|
||||||
|
# Run with profiling
|
||||||
|
profiler = cProfile.Profile()
|
||||||
|
try:
|
||||||
|
result = profiler.runcall(
|
||||||
|
run_optimization, start_hour=args.start_hour, verbose=args.verbose
|
||||||
|
)
|
||||||
|
# Print profiling statistics
|
||||||
|
stats = pstats.Stats(profiler)
|
||||||
|
stats.strip_dirs().sort_stats("cumulative").print_stats(200)
|
||||||
|
# Print result
|
||||||
|
print("\nOptimization Result:")
|
||||||
|
print(result)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error during optimization: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
# Run without profiling
|
||||||
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
result = run_optimization(start_hour=args.start_hour, verbose=args.verbose)
|
||||||
|
end_time = time.time()
|
||||||
|
elapsed_time = end_time - start_time
|
||||||
|
print(f"\nElapsed time: {elapsed_time:.4f} seconds.")
|
||||||
|
print("\nOptimization Result:")
|
||||||
|
print(result)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error during optimization: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
@ -1,292 +0,0 @@
|
|||||||
"""This module provides functionality to manage and handle configuration for the EOS system.
|
|
||||||
|
|
||||||
The module including loading, merging, and validating JSON configuration files.
|
|
||||||
It also provides utility functions for working directory setup and date handling.
|
|
||||||
|
|
||||||
Key features:
|
|
||||||
- Loading and merging configurations from default or custom JSON files
|
|
||||||
- Validating configurations using Pydantic models
|
|
||||||
- Managing directory setups for the application
|
|
||||||
- Utility to get prediction start and end dates
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
from datetime import date, datetime, timedelta
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, ValidationError
|
|
||||||
|
|
||||||
EOS_DIR = "EOS_DIR"
|
|
||||||
ENCODING = "UTF-8"
|
|
||||||
CONFIG_FILE_NAME = "EOS.config.json"
|
|
||||||
DEFAULT_CONFIG_FILE = Path(__file__).parent.joinpath("default.config.json")
|
|
||||||
|
|
||||||
|
|
||||||
class FolderConfig(BaseModel):
|
|
||||||
"""Folder configuration for the EOS system.
|
|
||||||
|
|
||||||
Uses working_dir as root path.
|
|
||||||
The working directory can be either cwd or
|
|
||||||
a path or folder defined by the EOS_DIR environment variable.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
output (str): Directory name for output files.
|
|
||||||
cache (str): Directory name for cache files.
|
|
||||||
"""
|
|
||||||
|
|
||||||
output: str
|
|
||||||
cache: str
|
|
||||||
|
|
||||||
|
|
||||||
class EOSConfig(BaseModel):
|
|
||||||
"""EOS system-specific configuration.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
prediction_hours (int): Number of hours for predictions.
|
|
||||||
optimization_hours (int): Number of hours for optimizations.
|
|
||||||
penalty (int): Penalty factor used in optimization.
|
|
||||||
available_charging_rates_in_percentage (list[float]): List of available charging rates as percentages.
|
|
||||||
"""
|
|
||||||
|
|
||||||
prediction_hours: int
|
|
||||||
optimization_hours: int
|
|
||||||
penalty: int
|
|
||||||
available_charging_rates_in_percentage: list[float]
|
|
||||||
feed_in_tariff_eur_per_wh: int
|
|
||||||
|
|
||||||
|
|
||||||
class BaseConfig(BaseModel):
|
|
||||||
"""Base configuration for the EOS system.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
directories (FolderConfig): Configuration for directory paths (output, cache).
|
|
||||||
eos (EOSConfig): Configuration for EOS-specific settings.
|
|
||||||
"""
|
|
||||||
|
|
||||||
directories: FolderConfig
|
|
||||||
eos: EOSConfig
|
|
||||||
|
|
||||||
|
|
||||||
class AppConfig(BaseConfig):
|
|
||||||
"""Application-level configuration that extends the base configuration with a working directory.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
working_dir (Path): The root directory for the application.
|
|
||||||
"""
|
|
||||||
|
|
||||||
working_dir: Path
|
|
||||||
|
|
||||||
def run_setup(self) -> None:
|
|
||||||
"""Runs setup for the application by ensuring that required directories exist.
|
|
||||||
|
|
||||||
If a directory does not exist, it is created.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
OSError: If directories cannot be created.
|
|
||||||
"""
|
|
||||||
print("Checking directory settings and creating missing directories...")
|
|
||||||
for key, value in self.directories.model_dump().items():
|
|
||||||
if not isinstance(value, str):
|
|
||||||
continue
|
|
||||||
path = self.working_dir / value
|
|
||||||
print(f"'{key}': {path}")
|
|
||||||
os.makedirs(path, exist_ok=True)
|
|
||||||
|
|
||||||
|
|
||||||
class SetupIncomplete(Exception):
|
|
||||||
"""Exception class for errors related to incomplete setup of the EOS system."""
|
|
||||||
|
|
||||||
|
|
||||||
def _load_json(path: Path) -> dict[str, Any]:
|
|
||||||
"""Load a JSON file from a given path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (Path): Path to the JSON file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, Any]: Parsed JSON content.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
FileNotFoundError: If the JSON file does not exist.
|
|
||||||
json.JSONDecodeError: If the file cannot be parsed as valid JSON.
|
|
||||||
"""
|
|
||||||
with path.open("r") as f_in:
|
|
||||||
return json.load(f_in)
|
|
||||||
|
|
||||||
|
|
||||||
def _merge_json(default_data: dict[str, Any], custom_data: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Recursively merge two dictionaries, using values from `custom_data` when available.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
default_data (dict[str, Any]): The default configuration values.
|
|
||||||
custom_data (dict[str, Any]): The custom configuration values.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, Any]: Merged configuration data.
|
|
||||||
"""
|
|
||||||
merged_data = {}
|
|
||||||
for key, default_value in default_data.items():
|
|
||||||
if key in custom_data:
|
|
||||||
custom_value = custom_data[key]
|
|
||||||
if isinstance(default_value, dict) and isinstance(custom_value, dict):
|
|
||||||
merged_data[key] = _merge_json(default_value, custom_value)
|
|
||||||
elif type(default_value) is type(custom_value):
|
|
||||||
merged_data[key] = custom_value
|
|
||||||
else:
|
|
||||||
# use default value if types differ
|
|
||||||
merged_data[key] = default_value
|
|
||||||
else:
|
|
||||||
merged_data[key] = default_value
|
|
||||||
return merged_data
|
|
||||||
|
|
||||||
|
|
||||||
def _config_update_available(merged_data: dict[str, Any], custom_data: dict[str, Any]) -> bool:
|
|
||||||
"""Check if the configuration needs to be updated by comparing merged data and custom data.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
merged_data (dict[str, Any]): The merged configuration data.
|
|
||||||
custom_data (dict[str, Any]): The custom configuration data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if there is a difference indicating that an update is needed, otherwise False.
|
|
||||||
"""
|
|
||||||
if merged_data.keys() != custom_data.keys():
|
|
||||||
return True
|
|
||||||
|
|
||||||
for key in merged_data:
|
|
||||||
value1 = merged_data[key]
|
|
||||||
value2 = custom_data[key]
|
|
||||||
|
|
||||||
if isinstance(value1, dict) and isinstance(value2, dict):
|
|
||||||
if _config_update_available(value1, value2):
|
|
||||||
return True
|
|
||||||
elif value1 != value2:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_config_file(path: Path, copy_default: bool) -> Path:
|
|
||||||
"""Get the valid configuration file path. If the custom config is not found, it uses the default config.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (Path): Path to the working directory.
|
|
||||||
copy_default (bool): If True, copy the default configuration if custom config is not found.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path: Path to the valid configuration file.
|
|
||||||
"""
|
|
||||||
config = path.resolve() / CONFIG_FILE_NAME
|
|
||||||
if config.is_file():
|
|
||||||
print(f"Using configuration from: {config}")
|
|
||||||
return config
|
|
||||||
|
|
||||||
if not path.is_dir():
|
|
||||||
print(f"Path does not exist: {path}. Using default configuration...")
|
|
||||||
return DEFAULT_CONFIG_FILE
|
|
||||||
|
|
||||||
if not copy_default:
|
|
||||||
print("No custom configuration provided. Using default configuration...")
|
|
||||||
return DEFAULT_CONFIG_FILE
|
|
||||||
|
|
||||||
try:
|
|
||||||
return Path(shutil.copy2(DEFAULT_CONFIG_FILE, config))
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"Could not copy default config: {exc}. Using default copy...")
|
|
||||||
return DEFAULT_CONFIG_FILE
|
|
||||||
|
|
||||||
|
|
||||||
def _merge_and_update(custom_config: Path, update_outdated: bool = False) -> bool:
|
|
||||||
"""Merge custom and default configurations, and optionally update the custom config if outdated.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
custom_config (Path): Path to the custom configuration file.
|
|
||||||
update_outdated (bool): If True, update the custom config if it is outdated.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the custom config was updated, otherwise False.
|
|
||||||
"""
|
|
||||||
if custom_config == DEFAULT_CONFIG_FILE:
|
|
||||||
return False
|
|
||||||
default_data = _load_json(DEFAULT_CONFIG_FILE)
|
|
||||||
custom_data = _load_json(custom_config)
|
|
||||||
merged_data = _merge_json(default_data, custom_data)
|
|
||||||
|
|
||||||
if not _config_update_available(merged_data, custom_data):
|
|
||||||
print(f"Custom config {custom_config} is up-to-date...")
|
|
||||||
return False
|
|
||||||
print(f"Custom config {custom_config} is outdated...")
|
|
||||||
if update_outdated:
|
|
||||||
with custom_config.open("w") as f_out:
|
|
||||||
json.dump(merged_data, f_out, indent=2)
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def load_config(
|
|
||||||
working_dir: Path, copy_default: bool = False, update_outdated: bool = True
|
|
||||||
) -> AppConfig:
|
|
||||||
"""Load the application configuration from the specified directory, merging with defaults if needed.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
working_dir (Path): Path to the working directory.
|
|
||||||
copy_default (bool): Whether to copy the default configuration if custom config is missing.
|
|
||||||
update_outdated (bool): Whether to update outdated custom configuration.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
AppConfig: Loaded application configuration.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the configuration is incomplete or not valid.
|
|
||||||
"""
|
|
||||||
# make sure working_dir is always a full path
|
|
||||||
working_dir = working_dir.resolve()
|
|
||||||
|
|
||||||
config = get_config_file(working_dir, copy_default)
|
|
||||||
_merge_and_update(config, update_outdated)
|
|
||||||
|
|
||||||
with config.open("r", encoding=ENCODING) as f_in:
|
|
||||||
try:
|
|
||||||
base_config = BaseConfig.model_validate(json.load(f_in))
|
|
||||||
return AppConfig.model_validate(
|
|
||||||
{"working_dir": working_dir, **base_config.model_dump()}
|
|
||||||
)
|
|
||||||
except ValidationError as exc:
|
|
||||||
raise ValueError(f"Configuration {config} is incomplete or not valid: {exc}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_working_dir() -> Path:
|
|
||||||
"""Get the working directory for the application, either from an environment variable or the current working directory.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path: The path to the working directory.
|
|
||||||
"""
|
|
||||||
custom_dir = os.getenv(EOS_DIR)
|
|
||||||
if custom_dir is None:
|
|
||||||
working_dir = Path.cwd()
|
|
||||||
print(f"No custom directory provided. Setting working directory to: {working_dir}")
|
|
||||||
else:
|
|
||||||
working_dir = Path(custom_dir).resolve()
|
|
||||||
print(f"Custom directory provided. Setting working directory to: {working_dir}")
|
|
||||||
return working_dir
|
|
||||||
|
|
||||||
|
|
||||||
def get_start_enddate(prediction_hours: int, startdate: Optional[date] = None) -> tuple[str, str]:
|
|
||||||
"""Calculate the start and end dates based on the given prediction hours and optional start date.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
prediction_hours (int): Number of hours for predictions.
|
|
||||||
startdate (Optional[datetime]): Optional starting datetime.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[str, str]: The current date (start date) and end date in the format 'YYYY-MM-DD'.
|
|
||||||
"""
|
|
||||||
if startdate is None:
|
|
||||||
date = (datetime.now().date() + timedelta(hours=prediction_hours)).strftime("%Y-%m-%d")
|
|
||||||
date_now = datetime.now().strftime("%Y-%m-%d")
|
|
||||||
else:
|
|
||||||
date = (startdate + timedelta(hours=prediction_hours)).strftime("%Y-%m-%d")
|
|
||||||
date_now = startdate.strftime("%Y-%m-%d")
|
|
||||||
return date_now, date
|
|
0
src/akkudoktoreos/config/__init__.py
Normal file
0
src/akkudoktoreos/config/__init__.py
Normal file
453
src/akkudoktoreos/config/config.py
Normal file
453
src/akkudoktoreos/config/config.py
Normal file
@ -0,0 +1,453 @@
|
|||||||
|
"""This module provides functionality to manage and handle configuration for the EOS.
|
||||||
|
|
||||||
|
The module including loading, merging, and validating JSON configuration files.
|
||||||
|
It also provides utility functions for working directory setup and date handling.
|
||||||
|
|
||||||
|
Key features:
|
||||||
|
- Loading and merging configurations from default or custom JSON files
|
||||||
|
- Validating configurations using Pydantic models
|
||||||
|
- Managing directory setups for the application
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, ClassVar, Optional
|
||||||
|
|
||||||
|
import platformdirs
|
||||||
|
from pydantic import Field, ValidationError, computed_field
|
||||||
|
|
||||||
|
# settings
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
|
from akkudoktoreos.devices.devices import DevicesCommonSettings
|
||||||
|
from akkudoktoreos.optimization.optimization import OptimizationCommonSettings
|
||||||
|
from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
||||||
|
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettings
|
||||||
|
from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||||
|
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||||
|
from akkudoktoreos.prediction.loadimport import LoadImportCommonSettings
|
||||||
|
from akkudoktoreos.prediction.prediction import PredictionCommonSettings
|
||||||
|
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||||
|
from akkudoktoreos.prediction.pvforecastimport import PVForecastImportCommonSettings
|
||||||
|
from akkudoktoreos.prediction.weather import WeatherCommonSettings
|
||||||
|
from akkudoktoreos.prediction.weatherimport import WeatherImportCommonSettings
|
||||||
|
from akkudoktoreos.server.server import ServerCommonSettings
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
from akkudoktoreos.utils.utils import UtilsCommonSettings
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigCommonSettings(SettingsBaseModel):
|
||||||
|
"""Settings for common configuration."""
|
||||||
|
|
||||||
|
data_folder_path: Optional[Path] = Field(
|
||||||
|
default=None, description="Path to EOS data directory."
|
||||||
|
)
|
||||||
|
|
||||||
|
data_output_subpath: Optional[Path] = Field(
|
||||||
|
"output", description="Sub-path for the EOS output data directory."
|
||||||
|
)
|
||||||
|
|
||||||
|
data_cache_subpath: Optional[Path] = Field(
|
||||||
|
"cache", description="Sub-path for the EOS cache data directory."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def data_output_path(self) -> Optional[Path]:
|
||||||
|
"""Compute data_output_path based on data_folder_path."""
|
||||||
|
if self.data_output_subpath is None:
|
||||||
|
return self.data_folder_path
|
||||||
|
if self.data_folder_path and self.data_output_subpath:
|
||||||
|
return self.data_folder_path.joinpath(self.data_output_subpath)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def data_cache_path(self) -> Optional[Path]:
|
||||||
|
"""Compute data_cache_path based on data_folder_path."""
|
||||||
|
if self.data_cache_subpath is None:
|
||||||
|
return self.data_folder_path
|
||||||
|
if self.data_folder_path and self.data_cache_subpath:
|
||||||
|
return self.data_folder_path.joinpath(self.data_cache_subpath)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsEOS(
|
||||||
|
ConfigCommonSettings,
|
||||||
|
DevicesCommonSettings,
|
||||||
|
OptimizationCommonSettings,
|
||||||
|
PredictionCommonSettings,
|
||||||
|
ElecPriceCommonSettings,
|
||||||
|
ElecPriceImportCommonSettings,
|
||||||
|
LoadCommonSettings,
|
||||||
|
LoadAkkudoktorCommonSettings,
|
||||||
|
LoadImportCommonSettings,
|
||||||
|
PVForecastCommonSettings,
|
||||||
|
PVForecastImportCommonSettings,
|
||||||
|
WeatherCommonSettings,
|
||||||
|
WeatherImportCommonSettings,
|
||||||
|
ServerCommonSettings,
|
||||||
|
UtilsCommonSettings,
|
||||||
|
):
|
||||||
|
"""Settings for all EOS."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigEOS(SingletonMixin, SettingsEOS):
|
||||||
|
"""Singleton configuration handler for the EOS application.
|
||||||
|
|
||||||
|
ConfigEOS extends `SettingsEOS` with support for default configuration paths and automatic
|
||||||
|
initialization.
|
||||||
|
|
||||||
|
`ConfigEOS` ensures that only one instance of the class is created throughout the application,
|
||||||
|
allowing consistent access to EOS configuration settings. This singleton instance loads
|
||||||
|
configuration data from a predefined set of directories or creates a default configuration if
|
||||||
|
none is found.
|
||||||
|
|
||||||
|
Initialization Process:
|
||||||
|
- Upon instantiation, the singleton instance attempts to load a configuration file in this order:
|
||||||
|
1. The directory specified by the `EOS_DIR` environment variable.
|
||||||
|
2. A platform specific default directory for EOS.
|
||||||
|
3. The current working directory.
|
||||||
|
- The first available configuration file found in these directories is loaded.
|
||||||
|
- If no configuration file is found, a default configuration file is created in the platform
|
||||||
|
specific default directory, and default settings are loaded into it.
|
||||||
|
|
||||||
|
Attributes from the loaded configuration are accessible directly as instance attributes of
|
||||||
|
`ConfigEOS`, providing a centralized, shared configuration object for EOS.
|
||||||
|
|
||||||
|
Singleton Behavior:
|
||||||
|
- This class uses the `SingletonMixin` to ensure that all requests for `ConfigEOS` return
|
||||||
|
the same instance, which contains the most up-to-date configuration. Modifying the configuration
|
||||||
|
in one part of the application reflects across all references to this class.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_settings (ClassVar[SettingsEOS]): Holds application-wide settings.
|
||||||
|
_file_settings (ClassVar[SettingsEOS]): Stores configuration loaded from file.
|
||||||
|
config_folder_path (Optional[Path]): Path to the configuration directory.
|
||||||
|
config_file_path (Optional[Path]): Path to the configuration file.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FileNotFoundError: If no configuration file is found, and creating a default configuration fails.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
To initialize and access configuration attributes (only one instance is created):
|
||||||
|
```python
|
||||||
|
config_eos = ConfigEOS() # Always returns the same instance
|
||||||
|
print(config_eos.prediction_hours) # Access a setting from the loaded configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
APP_NAME: ClassVar[str] = "net.akkudoktor.eos" # reverse order
|
||||||
|
APP_AUTHOR: ClassVar[str] = "akkudoktor"
|
||||||
|
EOS_DIR: ClassVar[str] = "EOS_DIR"
|
||||||
|
ENCODING: ClassVar[str] = "UTF-8"
|
||||||
|
CONFIG_FILE_NAME: ClassVar[str] = "EOS.config.json"
|
||||||
|
|
||||||
|
_settings: ClassVar[Optional[SettingsEOS]] = None
|
||||||
|
_file_settings: ClassVar[Optional[SettingsEOS]] = None
|
||||||
|
|
||||||
|
config_folder_path: Optional[Path] = Field(
|
||||||
|
None, description="Path to EOS configuration directory."
|
||||||
|
)
|
||||||
|
|
||||||
|
config_file_path: Optional[Path] = Field(
|
||||||
|
default=None, description="Path to EOS configuration file."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def config_default_file_path(self) -> Path:
|
||||||
|
"""Compute the default config file path."""
|
||||||
|
return Path(__file__).parent.parent.joinpath("data/default.config.json")
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initializes the singleton ConfigEOS instance.
|
||||||
|
|
||||||
|
Configuration data is loaded from a configuration file or a default one is created if none
|
||||||
|
exists.
|
||||||
|
"""
|
||||||
|
super().__init__()
|
||||||
|
self.from_config_file()
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def settings(self) -> Optional[SettingsEOS]:
|
||||||
|
"""Returns global settings for EOS.
|
||||||
|
|
||||||
|
Settings generally provide configuration for EOS and are typically set only once.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SettingsEOS: The settings for EOS or None.
|
||||||
|
"""
|
||||||
|
return ConfigEOS._settings
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _merge_and_update_settings(cls, settings: SettingsEOS) -> None:
|
||||||
|
"""Merge new and available settings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
settings (SettingsEOS): The new settings to apply.
|
||||||
|
"""
|
||||||
|
for key in SettingsEOS.model_fields:
|
||||||
|
if value := getattr(settings, key, None):
|
||||||
|
setattr(cls._settings, key, value)
|
||||||
|
|
||||||
|
def merge_settings(self, settings: SettingsEOS, force: Optional[bool] = None) -> None:
|
||||||
|
"""Merges the provided settings into the global settings for EOS, with optional overwrite.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
settings (SettingsEOS): The settings to apply globally.
|
||||||
|
force (Optional[bool]): If True, overwrites the existing settings completely.
|
||||||
|
If False, the new settings are merged to the existing ones with priority for
|
||||||
|
the new ones.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If settings are already set and `force` is not True or
|
||||||
|
if the `settings` is not a `SettingsEOS` instance.
|
||||||
|
"""
|
||||||
|
if not isinstance(settings, SettingsEOS):
|
||||||
|
raise ValueError(f"Settings must be an instance of SettingsEOS: '{settings}'.")
|
||||||
|
|
||||||
|
if ConfigEOS._settings is None or force:
|
||||||
|
ConfigEOS._settings = settings
|
||||||
|
else:
|
||||||
|
self._merge_and_update_settings(settings)
|
||||||
|
|
||||||
|
# Update configuration after merging
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
def merge_settings_from_dict(self, data: dict) -> None:
|
||||||
|
"""Merges the provided dictionary data into the current instance.
|
||||||
|
|
||||||
|
Creates a new settings instance with all optional fields reset to None,
|
||||||
|
then applies the dictionary data through validation, and finally merges
|
||||||
|
the validated settings into the current instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict): Dictionary containing field values to merge into the
|
||||||
|
current settings instance.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValidationError: If the data contains invalid values for the defined fields.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> config = get_config()
|
||||||
|
>>> new_data = {"prediction_hours": 24, "server_fastapi_port": 8000}
|
||||||
|
>>> config.merge_settings_from_dict(new_data)
|
||||||
|
"""
|
||||||
|
# Create new settings instance with reset optional fields and merged data
|
||||||
|
settings = SettingsEOS.from_dict_with_reset(data)
|
||||||
|
self.merge_settings(settings)
|
||||||
|
|
||||||
|
def reset_settings(self) -> None:
|
||||||
|
"""Reset all available settings.
|
||||||
|
|
||||||
|
This functions basically deletes the settings provided before.
|
||||||
|
"""
|
||||||
|
ConfigEOS._settings = None
|
||||||
|
|
||||||
|
def _update_data_folder_path(self) -> None:
|
||||||
|
"""Updates path to the data directory."""
|
||||||
|
# From Settings
|
||||||
|
if self.settings and (data_dir := self.settings.data_folder_path):
|
||||||
|
try:
|
||||||
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.data_folder_path = data_dir
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# From EOS_DIR env
|
||||||
|
env_dir = os.getenv(self.EOS_DIR)
|
||||||
|
if env_dir is not None:
|
||||||
|
try:
|
||||||
|
data_dir = Path(env_dir).resolve()
|
||||||
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.data_folder_path = data_dir
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# From configuration file
|
||||||
|
if self._file_settings and (data_dir := self._file_settings.data_folder_path):
|
||||||
|
try:
|
||||||
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.data_folder_path = data_dir
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# From platform specific default path
|
||||||
|
try:
|
||||||
|
data_dir = platformdirs.user_data_dir(self.APP_NAME, self.APP_AUTHOR)
|
||||||
|
if data_dir is not None:
|
||||||
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.data_folder_path = data_dir
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# Current working directory
|
||||||
|
data_dir = Path.cwd()
|
||||||
|
self.data_folder_path = data_dir
|
||||||
|
|
||||||
|
def _config_folder_path(self) -> Optional[Path]:
|
||||||
|
"""Finds the first directory containing a valid configuration file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: The path to the configuration directory, or None if not found.
|
||||||
|
"""
|
||||||
|
config_dirs = []
|
||||||
|
config_dir = None
|
||||||
|
env_dir = os.getenv(self.EOS_DIR)
|
||||||
|
logger.debug(f"Envionment '{self.EOS_DIR}': '{env_dir}'")
|
||||||
|
if env_dir is not None:
|
||||||
|
config_dirs.append(Path(env_dir).resolve())
|
||||||
|
config_dirs.append(Path(platformdirs.user_config_dir(self.APP_NAME)))
|
||||||
|
config_dirs.append(Path.cwd())
|
||||||
|
for cdir in config_dirs:
|
||||||
|
cfile = cdir.joinpath(self.CONFIG_FILE_NAME)
|
||||||
|
if cfile.exists():
|
||||||
|
logger.debug(f"Found config file: '{cfile}'")
|
||||||
|
config_dir = cdir
|
||||||
|
break
|
||||||
|
return config_dir
|
||||||
|
|
||||||
|
def _config_file_path(self) -> Path:
|
||||||
|
"""Finds the path to the configuration file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: The path to the configuration file. May not exist.
|
||||||
|
"""
|
||||||
|
config_file = None
|
||||||
|
config_dir = self._config_folder_path()
|
||||||
|
if config_dir is None:
|
||||||
|
# There is currently no configuration file - create it in default path
|
||||||
|
env_dir = os.getenv(self.EOS_DIR)
|
||||||
|
if env_dir is not None:
|
||||||
|
config_dir = Path(env_dir).resolve()
|
||||||
|
else:
|
||||||
|
config_dir = Path(platformdirs.user_config_dir(self.APP_NAME))
|
||||||
|
config_file = config_dir.joinpath(self.CONFIG_FILE_NAME)
|
||||||
|
else:
|
||||||
|
config_file = config_dir.joinpath(self.CONFIG_FILE_NAME)
|
||||||
|
return config_file
|
||||||
|
|
||||||
|
def from_config_file(self) -> None:
|
||||||
|
"""Loads the configuration file settings for EOS.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the configuration file is invalid or incomplete.
|
||||||
|
"""
|
||||||
|
config_file = self._config_file_path()
|
||||||
|
config_dir = config_file.parent
|
||||||
|
if not config_file.exists():
|
||||||
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
try:
|
||||||
|
shutil.copy2(self.config_default_file_path, config_file)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(f"Could not copy default config: {exc}. Using default copy...")
|
||||||
|
config_file = self.config_default_file_path
|
||||||
|
config_dir = config_file.parent
|
||||||
|
|
||||||
|
with config_file.open("r", encoding=self.ENCODING) as f_in:
|
||||||
|
try:
|
||||||
|
json_str = f_in.read()
|
||||||
|
ConfigEOS._file_settings = SettingsEOS.model_validate_json(json_str)
|
||||||
|
except ValidationError as exc:
|
||||||
|
raise ValueError(f"Configuration '{config_file}' is incomplete or not valid: {exc}")
|
||||||
|
|
||||||
|
self.update()
|
||||||
|
# Everthing worked, remember the values
|
||||||
|
self.config_folder_path = config_dir
|
||||||
|
self.config_file_path = config_file
|
||||||
|
|
||||||
|
def to_config_file(self) -> None:
|
||||||
|
"""Saves the current configuration to the configuration file.
|
||||||
|
|
||||||
|
Also updates the configuration file settings.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the configuration file path is not specified or can not be written to.
|
||||||
|
"""
|
||||||
|
if not self.config_file_path:
|
||||||
|
raise ValueError("Configuration file path unknown.")
|
||||||
|
with self.config_file_path.open("r", encoding=self.ENCODING) as f_out:
|
||||||
|
try:
|
||||||
|
json_str = super().to_json()
|
||||||
|
# Write to file
|
||||||
|
f_out.write(json_str)
|
||||||
|
# Also remeber as actual settings
|
||||||
|
ConfigEOS._file_settings = SettingsEOS.model_validate_json(json_str)
|
||||||
|
except ValidationError as exc:
|
||||||
|
raise ValueError(f"Could not update '{self.config_file_path}': {exc}")
|
||||||
|
|
||||||
|
def _config_value(self, key: str) -> Any:
|
||||||
|
"""Retrieves the configuration value for a specific key, following a priority order.
|
||||||
|
|
||||||
|
Values are fetched in the following order:
|
||||||
|
1. Settings.
|
||||||
|
2. Environment variables.
|
||||||
|
3. EOS configuration file.
|
||||||
|
4. Current configuration.
|
||||||
|
5. Field default constants.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): The configuration key to retrieve.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Any: The configuration value, or None if not found.
|
||||||
|
"""
|
||||||
|
# Settings
|
||||||
|
if ConfigEOS._settings:
|
||||||
|
if (value := getattr(self.settings, key, None)) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
|
if (value := os.getenv(key)) is not None:
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except ValueError:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# EOS configuration file.
|
||||||
|
if self._file_settings:
|
||||||
|
if (value := getattr(self._file_settings, key, None)) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Current configuration - key is valid as called by update().
|
||||||
|
if (value := getattr(self, key, None)) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Field default constants
|
||||||
|
if (value := ConfigEOS.model_fields[key].default) is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
logger.debug(f"Value for configuration key '{key}' not found or is {value}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update(self) -> None:
|
||||||
|
"""Updates all configuration fields.
|
||||||
|
|
||||||
|
This method updates all configuration fields using the following order for value retrieval:
|
||||||
|
1. Settings.
|
||||||
|
2. Environment variables.
|
||||||
|
3. EOS configuration file.
|
||||||
|
4. Current configuration.
|
||||||
|
5. Field default constants.
|
||||||
|
|
||||||
|
The first non None value in priority order is taken.
|
||||||
|
"""
|
||||||
|
self._update_data_folder_path()
|
||||||
|
for key in self.model_fields:
|
||||||
|
setattr(self, key, self._config_value(key))
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> ConfigEOS:
|
||||||
|
"""Gets the EOS configuration data."""
|
||||||
|
return ConfigEOS()
|
24
src/akkudoktoreos/config/configabc.py
Normal file
24
src/akkudoktoreos/config/configabc.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""Abstract and base classes for configuration."""
|
||||||
|
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsBaseModel(PydanticBaseModel):
|
||||||
|
"""Base model class for all settings configurations.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Settings property names shall be disjunctive to all existing settings' property names.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def reset_to_defaults(self) -> None:
|
||||||
|
"""Resets the fields to their default values."""
|
||||||
|
for field_name, field_info in self.model_fields.items():
|
||||||
|
if field_info.default_factory is not None: # Handle fields with default_factory
|
||||||
|
default_value = field_info.default_factory()
|
||||||
|
else:
|
||||||
|
default_value = field_info.default
|
||||||
|
try:
|
||||||
|
setattr(self, field_name, default_value)
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
# Skip fields that are read-only or dynamically computed
|
||||||
|
pass
|
275
src/akkudoktoreos/core/coreabc.py
Normal file
275
src/akkudoktoreos/core/coreabc.py
Normal file
@ -0,0 +1,275 @@
|
|||||||
|
"""Abstract and base classes for EOS core.
|
||||||
|
|
||||||
|
This module provides foundational classes for handling configuration and prediction functionality
|
||||||
|
in EOS. It includes base classes that provide convenient access to global
|
||||||
|
configuration and prediction instances through properties.
|
||||||
|
|
||||||
|
Classes:
|
||||||
|
- ConfigMixin: Mixin class for managing and accessing global configuration.
|
||||||
|
- PredictionMixin: Mixin class for managing and accessing global prediction data.
|
||||||
|
- SingletonMixin: Mixin class to create singletons.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import threading
|
||||||
|
from typing import Any, ClassVar, Dict, Optional, Type
|
||||||
|
|
||||||
|
from pendulum import DateTime
|
||||||
|
from pydantic import computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
config_eos: Any = None
|
||||||
|
prediction_eos: Any = None
|
||||||
|
devices_eos: Any = None
|
||||||
|
ems_eos: Any = None
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigMixin:
|
||||||
|
"""Mixin class for managing EOS configuration data.
|
||||||
|
|
||||||
|
This class serves as a foundational component for EOS-related classes requiring access
|
||||||
|
to the global EOS configuration. It provides a `config` property that dynamically retrieves
|
||||||
|
the configuration instance, ensuring up-to-date access to configuration settings.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Subclass this base class to gain access to the `config` attribute, which retrieves the
|
||||||
|
global configuration instance lazily to avoid import-time circular dependencies.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
config (ConfigEOS): Property to access the global EOS configuration.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
class MyEOSClass(ConfigMixin):
|
||||||
|
def my_method(self):
|
||||||
|
if self.config.myconfigval:
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self) -> Any:
|
||||||
|
"""Convenience method/ attribute to retrieve the EOS onfiguration data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ConfigEOS: The configuration.
|
||||||
|
"""
|
||||||
|
# avoid circular dependency at import time
|
||||||
|
global config_eos
|
||||||
|
if config_eos is None:
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
|
||||||
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionMixin:
|
||||||
|
"""Mixin class for managing EOS prediction data.
|
||||||
|
|
||||||
|
This class serves as a foundational component for EOS-related classes requiring access
|
||||||
|
to global prediction data. It provides a `prediction` property that dynamically retrieves
|
||||||
|
the prediction instance, ensuring up-to-date access to prediction results.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Subclass this base class to gain access to the `prediction` attribute, which retrieves the
|
||||||
|
global prediction instance lazily to avoid import-time circular dependencies.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction (Prediction): Property to access the global EOS prediction data.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
class MyOptimizationClass(PredictionMixin):
|
||||||
|
def analyze_myprediction(self):
|
||||||
|
prediction_data = self.prediction.mypredictionresult
|
||||||
|
# Perform analysis
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prediction(self) -> Any:
|
||||||
|
"""Convenience method/ attribute to retrieve the EOS prediction data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Prediction: The prediction.
|
||||||
|
"""
|
||||||
|
# avoid circular dependency at import time
|
||||||
|
global prediction_eos
|
||||||
|
if prediction_eos is None:
|
||||||
|
from akkudoktoreos.prediction.prediction import get_prediction
|
||||||
|
|
||||||
|
prediction_eos = get_prediction()
|
||||||
|
|
||||||
|
return prediction_eos
|
||||||
|
|
||||||
|
|
||||||
|
class DevicesMixin:
|
||||||
|
"""Mixin class for managing EOS devices simulation data.
|
||||||
|
|
||||||
|
This class serves as a foundational component for EOS-related classes requiring access
|
||||||
|
to global devices simulation data. It provides a `devices` property that dynamically retrieves
|
||||||
|
the devices instance, ensuring up-to-date access to devices simulation results.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Subclass this base class to gain access to the `devices` attribute, which retrieves the
|
||||||
|
global devices instance lazily to avoid import-time circular dependencies.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
devices (Devices): Property to access the global EOS devices simulation data.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
class MyOptimizationClass(DevicesMixin):
|
||||||
|
def analyze_mydevicesimulation(self):
|
||||||
|
device_simulation_data = self.devices.mydevicesresult
|
||||||
|
# Perform analysis
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def devices(self) -> Any:
|
||||||
|
"""Convenience method/ attribute to retrieve the EOS devices simulation data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Devices: The devices simulation.
|
||||||
|
"""
|
||||||
|
# avoid circular dependency at import time
|
||||||
|
global devices_eos
|
||||||
|
if devices_eos is None:
|
||||||
|
from akkudoktoreos.devices.devices import get_devices
|
||||||
|
|
||||||
|
devices_eos = get_devices()
|
||||||
|
|
||||||
|
return devices_eos
|
||||||
|
|
||||||
|
|
||||||
|
class EnergyManagementSystemMixin:
|
||||||
|
"""Mixin class for managing EOS energy management system.
|
||||||
|
|
||||||
|
This class serves as a foundational component for EOS-related classes requiring access
|
||||||
|
to global energy management system. It provides a `ems` property that dynamically retrieves
|
||||||
|
the energy management system instance, ensuring up-to-date access to energy management system
|
||||||
|
control.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Subclass this base class to gain access to the `ems` attribute, which retrieves the
|
||||||
|
global EnergyManagementSystem instance lazily to avoid import-time circular dependencies.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
ems (EnergyManagementSystem): Property to access the global EOS energy management system.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
class MyOptimizationClass(EnergyManagementSystemMixin):
|
||||||
|
def analyze_myprediction(self):
|
||||||
|
ems_data = self.ems.the_ems_method()
|
||||||
|
# Perform analysis
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ems(self) -> Any:
|
||||||
|
"""Convenience method/ attribute to retrieve the EOS energy management system.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
EnergyManagementSystem: The energy management system.
|
||||||
|
"""
|
||||||
|
# avoid circular dependency at import time
|
||||||
|
global ems_eos
|
||||||
|
if ems_eos is None:
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
return ems_eos
|
||||||
|
|
||||||
|
|
||||||
|
class StartMixin(EnergyManagementSystemMixin):
|
||||||
|
"""A mixin to manage the start datetime for energy management.
|
||||||
|
|
||||||
|
Provides property:
|
||||||
|
- `start_datetime`: The starting datetime of the current or latest energy management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Computed field for start_datetime
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def start_datetime(self) -> Optional[DateTime]:
|
||||||
|
"""Returns the start datetime of the current or latest energy management.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DateTime: The starting datetime of the current or latest energy management, or None.
|
||||||
|
"""
|
||||||
|
return self.ems.start_datetime
|
||||||
|
|
||||||
|
|
||||||
|
class SingletonMixin:
|
||||||
|
"""A thread-safe singleton mixin class.
|
||||||
|
|
||||||
|
Ensures that only one instance of the derived class is created, even when accessed from multiple
|
||||||
|
threads. This mixin is intended to be combined with other classes, such as Pydantic models,
|
||||||
|
to make them singletons.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_instances (Dict[Type, Any]): A dictionary holding instances of each singleton class.
|
||||||
|
_lock (threading.Lock): A lock to synchronize access to singleton instance creation.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
- Inherit from `SingletonMixin` alongside other classes to make them singletons.
|
||||||
|
- Avoid using `__init__` to reinitialize the singleton instance after it has been created.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
||||||
|
name: str
|
||||||
|
|
||||||
|
instance1 = MySingletonModel(name="Instance 1")
|
||||||
|
instance2 = MySingletonModel(name="Instance 2")
|
||||||
|
|
||||||
|
assert instance1 is instance2 # True
|
||||||
|
print(instance1.name) # Output: "Instance 1"
|
||||||
|
"""
|
||||||
|
|
||||||
|
_lock: ClassVar[threading.Lock] = threading.Lock()
|
||||||
|
_instances: ClassVar[Dict[Type, Any]] = {}
|
||||||
|
|
||||||
|
def __new__(cls: Type["SingletonMixin"], *args: Any, **kwargs: Any) -> "SingletonMixin":
|
||||||
|
"""Creates or returns the singleton instance of the class.
|
||||||
|
|
||||||
|
Ensures thread-safe instance creation by locking during the first instantiation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*args: Positional arguments for instance creation (ignored if instance exists).
|
||||||
|
**kwargs: Keyword arguments for instance creation (ignored if instance exists).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SingletonMixin: The singleton instance of the derived class.
|
||||||
|
"""
|
||||||
|
if cls not in cls._instances:
|
||||||
|
with cls._lock:
|
||||||
|
if cls not in cls._instances:
|
||||||
|
instance = super().__new__(cls)
|
||||||
|
cls._instances[cls] = instance
|
||||||
|
return cls._instances[cls]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def reset_instance(cls) -> None:
|
||||||
|
"""Resets the singleton instance, forcing it to be recreated on next access."""
|
||||||
|
with cls._lock:
|
||||||
|
if cls in cls._instances:
|
||||||
|
del cls._instances[cls]
|
||||||
|
logger.debug(f"{cls.__name__} singleton instance has been reset.")
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
"""Initializes the singleton instance if it has not been initialized previously.
|
||||||
|
|
||||||
|
Further calls to `__init__` are ignored for the singleton instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*args: Positional arguments for initialization.
|
||||||
|
**kwargs: Keyword arguments for initialization.
|
||||||
|
"""
|
||||||
|
if not hasattr(self, "_initialized"):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._initialized = True
|
1307
src/akkudoktoreos/core/dataabc.py
Normal file
1307
src/akkudoktoreos/core/dataabc.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,18 +1,24 @@
|
|||||||
from datetime import datetime
|
from typing import Any, ClassVar, Dict, Optional, Union
|
||||||
from typing import Any, Dict, Optional, Union
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel, Field, field_validator, model_validator
|
from numpydantic import NDArray, Shape
|
||||||
|
from pendulum import DateTime
|
||||||
|
from pydantic import ConfigDict, Field, computed_field, field_validator, model_validator
|
||||||
from typing_extensions import Self
|
from typing_extensions import Self
|
||||||
|
|
||||||
from akkudoktoreos.config import EOSConfig
|
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin, SingletonMixin
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
from akkudoktoreos.devices.battery import PVAkku
|
from akkudoktoreos.devices.battery import PVAkku
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance
|
from akkudoktoreos.devices.generic import HomeAppliance
|
||||||
from akkudoktoreos.devices.inverter import Wechselrichter
|
from akkudoktoreos.devices.inverter import Wechselrichter
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
class EnergieManagementSystemParameters(BaseModel):
|
|
||||||
|
class EnergieManagementSystemParameters(PydanticBaseModel):
|
||||||
pv_prognose_wh: list[float] = Field(
|
pv_prognose_wh: list[float] = Field(
|
||||||
description="An array of floats representing the forecasted photovoltaic output in watts for different time intervals."
|
description="An array of floats representing the forecasted photovoltaic output in watts for different time intervals."
|
||||||
)
|
)
|
||||||
@ -22,7 +28,9 @@ class EnergieManagementSystemParameters(BaseModel):
|
|||||||
einspeiseverguetung_euro_pro_wh: list[float] | float = Field(
|
einspeiseverguetung_euro_pro_wh: list[float] | float = Field(
|
||||||
description="A float or array of floats representing the feed-in compensation in euros per watt-hour."
|
description="A float or array of floats representing the feed-in compensation in euros per watt-hour."
|
||||||
)
|
)
|
||||||
preis_euro_pro_wh_akku: float
|
preis_euro_pro_wh_akku: float = Field(
|
||||||
|
description="A float representing the cost of battery energy per watt-hour."
|
||||||
|
)
|
||||||
gesamtlast: list[float] = Field(
|
gesamtlast: list[float] = Field(
|
||||||
description="An array of floats representing the total load (consumption) in watts for different time intervals."
|
description="An array of floats representing the total load (consumption) in watts for different time intervals."
|
||||||
)
|
)
|
||||||
@ -42,7 +50,7 @@ class EnergieManagementSystemParameters(BaseModel):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
class SimulationResult(BaseModel):
|
class SimulationResult(PydanticBaseModel):
|
||||||
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
|
"""This object contains the results of the simulation and provides insights into various parameters over the entire forecast period."""
|
||||||
|
|
||||||
Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
|
Last_Wh_pro_Stunde: list[Optional[float]] = Field(description="TBD")
|
||||||
@ -95,16 +103,75 @@ class SimulationResult(BaseModel):
|
|||||||
return NumpyEncoder.convert_numpy(field)[0]
|
return NumpyEncoder.convert_numpy(field)[0]
|
||||||
|
|
||||||
|
|
||||||
class EnergieManagementSystem:
|
class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBaseModel):
|
||||||
def __init__(
|
# Disable validation on assignment to speed up simulation runs.
|
||||||
|
model_config = ConfigDict(
|
||||||
|
validate_assignment=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start datetime.
|
||||||
|
_start_datetime: ClassVar[Optional[DateTime]] = None
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def start_datetime(self) -> DateTime:
|
||||||
|
"""The starting datetime of the current or latest energy management."""
|
||||||
|
if EnergieManagementSystem._start_datetime is None:
|
||||||
|
EnergieManagementSystem.set_start_datetime()
|
||||||
|
return EnergieManagementSystem._start_datetime
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_start_datetime(cls, start_datetime: Optional[DateTime] = None) -> DateTime:
|
||||||
|
if start_datetime is None:
|
||||||
|
start_datetime = to_datetime()
|
||||||
|
cls._start_datetime = start_datetime.set(minute=0, second=0, microsecond=0)
|
||||||
|
return cls._start_datetime
|
||||||
|
|
||||||
|
# -------------------------
|
||||||
|
# TODO: Take from prediction
|
||||||
|
# -------------------------
|
||||||
|
|
||||||
|
gesamtlast: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the total load (consumption) in watts for different time intervals.",
|
||||||
|
)
|
||||||
|
pv_prognose_wh: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the forecasted photovoltaic output in watts for different time intervals.",
|
||||||
|
)
|
||||||
|
strompreis_euro_pro_wh: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the electricity price in euros per watt-hour for different time intervals.",
|
||||||
|
)
|
||||||
|
einspeiseverguetung_euro_pro_wh_arr: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="An array of floats representing the feed-in compensation in euros per watt-hour.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# -------------------------
|
||||||
|
# TODO: Move to devices
|
||||||
|
# -------------------------
|
||||||
|
|
||||||
|
akku: Optional[PVAkku] = Field(default=None, description="TBD.")
|
||||||
|
eauto: Optional[PVAkku] = Field(default=None, description="TBD.")
|
||||||
|
home_appliance: Optional[HomeAppliance] = Field(default=None, description="TBD.")
|
||||||
|
wechselrichter: Optional[Wechselrichter] = Field(default=None, description="TBD.")
|
||||||
|
|
||||||
|
# -------------------------
|
||||||
|
# TODO: Move to devices
|
||||||
|
# -------------------------
|
||||||
|
|
||||||
|
ac_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
|
dc_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
|
ev_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
|
|
||||||
|
def set_parameters(
|
||||||
self,
|
self,
|
||||||
config: EOSConfig,
|
|
||||||
parameters: EnergieManagementSystemParameters,
|
parameters: EnergieManagementSystemParameters,
|
||||||
wechselrichter: Wechselrichter,
|
|
||||||
eauto: Optional[PVAkku] = None,
|
eauto: Optional[PVAkku] = None,
|
||||||
home_appliance: Optional[HomeAppliance] = None,
|
home_appliance: Optional[HomeAppliance] = None,
|
||||||
):
|
wechselrichter: Optional[Wechselrichter] = None,
|
||||||
self.akku = wechselrichter.akku
|
) -> None:
|
||||||
self.gesamtlast = np.array(parameters.gesamtlast, float)
|
self.gesamtlast = np.array(parameters.gesamtlast, float)
|
||||||
self.pv_prognose_wh = np.array(parameters.pv_prognose_wh, float)
|
self.pv_prognose_wh = np.array(parameters.pv_prognose_wh, float)
|
||||||
self.strompreis_euro_pro_wh = np.array(parameters.strompreis_euro_pro_wh, float)
|
self.strompreis_euro_pro_wh = np.array(parameters.strompreis_euro_pro_wh, float)
|
||||||
@ -113,15 +180,20 @@ class EnergieManagementSystem:
|
|||||||
if isinstance(parameters.einspeiseverguetung_euro_pro_wh, list)
|
if isinstance(parameters.einspeiseverguetung_euro_pro_wh, list)
|
||||||
else np.full(len(self.gesamtlast), parameters.einspeiseverguetung_euro_pro_wh, float)
|
else np.full(len(self.gesamtlast), parameters.einspeiseverguetung_euro_pro_wh, float)
|
||||||
)
|
)
|
||||||
|
if wechselrichter is not None:
|
||||||
|
self.akku = wechselrichter.akku
|
||||||
|
else:
|
||||||
|
self.akku = None
|
||||||
self.eauto = eauto
|
self.eauto = eauto
|
||||||
self.home_appliance = home_appliance
|
self.home_appliance = home_appliance
|
||||||
self.wechselrichter = wechselrichter
|
self.wechselrichter = wechselrichter
|
||||||
self.ac_charge_hours = np.full(config.prediction_hours, 0)
|
self.ac_charge_hours = np.full(self.config.prediction_hours, 0.0)
|
||||||
self.dc_charge_hours = np.full(config.prediction_hours, 1)
|
self.dc_charge_hours = np.full(self.config.prediction_hours, 1.0)
|
||||||
self.ev_charge_hours = np.full(config.prediction_hours, 0)
|
self.ev_charge_hours = np.full(self.config.prediction_hours, 0.0)
|
||||||
|
|
||||||
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
||||||
self.akku.set_discharge_per_hour(ds)
|
if self.akku is not None:
|
||||||
|
self.akku.set_discharge_per_hour(ds)
|
||||||
|
|
||||||
def set_akku_ac_charge_hours(self, ds: np.ndarray) -> None:
|
def set_akku_ac_charge_hours(self, ds: np.ndarray) -> None:
|
||||||
self.ac_charge_hours = ds
|
self.ac_charge_hours = ds
|
||||||
@ -132,17 +204,69 @@ class EnergieManagementSystem:
|
|||||||
def set_ev_charge_hours(self, ds: np.ndarray) -> None:
|
def set_ev_charge_hours(self, ds: np.ndarray) -> None:
|
||||||
self.ev_charge_hours = ds
|
self.ev_charge_hours = ds
|
||||||
|
|
||||||
def set_home_appliance_start(self, start_hour: int, global_start_hour: int = 0) -> None:
|
def set_home_appliance_start(self, ds: int, global_start_hour: int = 0) -> None:
|
||||||
assert self.home_appliance is not None
|
if self.home_appliance is not None:
|
||||||
self.home_appliance.set_starting_time(start_hour, global_start_hour=global_start_hour)
|
self.home_appliance.set_starting_time(ds, global_start_hour=global_start_hour)
|
||||||
|
|
||||||
def reset(self) -> None:
|
def reset(self) -> None:
|
||||||
if self.eauto:
|
if self.eauto:
|
||||||
self.eauto.reset()
|
self.eauto.reset()
|
||||||
self.akku.reset()
|
if self.akku:
|
||||||
|
self.akku.reset()
|
||||||
|
|
||||||
|
def run(
|
||||||
|
self,
|
||||||
|
start_hour: Optional[int] = None,
|
||||||
|
force_enable: Optional[bool] = False,
|
||||||
|
force_update: Optional[bool] = False,
|
||||||
|
) -> None:
|
||||||
|
"""Run energy management.
|
||||||
|
|
||||||
|
Sets `start_datetime` to current hour, updates the configuration and the prediction, and
|
||||||
|
starts simulation at current hour.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start_hour (int, optional): Hour to take as start time for the energy management. Defaults
|
||||||
|
to now.
|
||||||
|
force_enable (bool, optional): If True, forces to update even if disabled. This
|
||||||
|
is mostly relevant to prediction providers.
|
||||||
|
force_update (bool, optional): If True, forces to update the data even if still cached.
|
||||||
|
"""
|
||||||
|
self.set_start_hour(start_hour=start_hour)
|
||||||
|
self.config.update()
|
||||||
|
|
||||||
|
# Check for run definitions
|
||||||
|
if self.start_datetime is None:
|
||||||
|
error_msg = "Start datetime unknown."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
if self.config.prediction_hours is None:
|
||||||
|
error_msg = "Prediction hours unknown."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
if self.config.optimisation_hours is None:
|
||||||
|
error_msg = "Optimisation hours unknown."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
self.prediction.update_data(force_enable=force_enable, force_update=force_update)
|
||||||
|
# TODO: Create optimisation problem that calls into devices.update_data() for simulations.
|
||||||
|
|
||||||
|
def set_start_hour(self, start_hour: Optional[int] = None) -> None:
|
||||||
|
"""Sets start datetime to given hour.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start_hour (int, optional): Hour to take as start time for the energy management. Defaults
|
||||||
|
to now.
|
||||||
|
"""
|
||||||
|
if start_hour is None:
|
||||||
|
self.set_start_datetime()
|
||||||
|
else:
|
||||||
|
start_datetime = to_datetime().set(hour=start_hour, minute=0, second=0, microsecond=0)
|
||||||
|
self.set_start_datetime(start_datetime)
|
||||||
|
|
||||||
def simuliere_ab_jetzt(self) -> dict[str, Any]:
|
def simuliere_ab_jetzt(self) -> dict[str, Any]:
|
||||||
jetzt = datetime.now()
|
jetzt = to_datetime().now()
|
||||||
start_stunde = jetzt.hour
|
start_stunde = jetzt.hour
|
||||||
return self.simuliere(start_stunde)
|
return self.simuliere(start_stunde)
|
||||||
|
|
||||||
@ -152,10 +276,35 @@ class EnergieManagementSystem:
|
|||||||
akku_soc_pro_stunde begin of the hour, initial hour state!
|
akku_soc_pro_stunde begin of the hour, initial hour state!
|
||||||
last_wh_pro_stunde integral of last hour (end state)
|
last_wh_pro_stunde integral of last hour (end state)
|
||||||
"""
|
"""
|
||||||
|
# Check for simulation integrity
|
||||||
|
if (
|
||||||
|
self.gesamtlast is None
|
||||||
|
or self.pv_prognose_wh is None
|
||||||
|
or self.strompreis_euro_pro_wh is None
|
||||||
|
or self.ev_charge_hours is None
|
||||||
|
or self.ac_charge_hours is None
|
||||||
|
or self.dc_charge_hours is None
|
||||||
|
or self.einspeiseverguetung_euro_pro_wh_arr is None
|
||||||
|
):
|
||||||
|
error_msg = (
|
||||||
|
f"Mandatory data missing - "
|
||||||
|
f"Load Curve: {self.gesamtlast}, "
|
||||||
|
f"PV Forecast: {self.pv_prognose_wh}, "
|
||||||
|
f"Electricity Price: {self.strompreis_euro_pro_wh}, "
|
||||||
|
f"EV Charge Hours: {self.ev_charge_hours}, "
|
||||||
|
f"AC Charge Hours: {self.ac_charge_hours}, "
|
||||||
|
f"DC Charge Hours: {self.dc_charge_hours}, "
|
||||||
|
f"Feed-in tariff: {self.einspeiseverguetung_euro_pro_wh_arr}"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
lastkurve_wh = self.gesamtlast
|
lastkurve_wh = self.gesamtlast
|
||||||
assert (
|
|
||||||
len(lastkurve_wh) == len(self.pv_prognose_wh) == len(self.strompreis_euro_pro_wh)
|
if not (len(lastkurve_wh) == len(self.pv_prognose_wh) == len(self.strompreis_euro_pro_wh)):
|
||||||
), f"Array sizes do not match: Load Curve = {len(lastkurve_wh)}, PV Forecast = {len(self.pv_prognose_wh)}, Electricity Price = {len(self.strompreis_euro_pro_wh)}"
|
error_msg = f"Array sizes do not match: Load Curve = {len(lastkurve_wh)}, PV Forecast = {len(self.pv_prognose_wh)}, Electricity Price = {len(self.strompreis_euro_pro_wh)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
# Optimized total hours calculation
|
# Optimized total hours calculation
|
||||||
ende = len(lastkurve_wh)
|
ende = len(lastkurve_wh)
|
||||||
@ -173,7 +322,8 @@ class EnergieManagementSystem:
|
|||||||
home_appliance_wh_per_hour = np.full((total_hours), np.nan)
|
home_appliance_wh_per_hour = np.full((total_hours), np.nan)
|
||||||
|
|
||||||
# Set initial state
|
# Set initial state
|
||||||
akku_soc_pro_stunde[0] = self.akku.ladezustand_in_prozent()
|
if self.akku:
|
||||||
|
akku_soc_pro_stunde[0] = self.akku.ladezustand_in_prozent()
|
||||||
if self.eauto:
|
if self.eauto:
|
||||||
eauto_soc_pro_stunde[0] = self.eauto.ladezustand_in_prozent()
|
eauto_soc_pro_stunde[0] = self.eauto.ladezustand_in_prozent()
|
||||||
|
|
||||||
@ -183,30 +333,35 @@ class EnergieManagementSystem:
|
|||||||
# Accumulate loads and PV generation
|
# Accumulate loads and PV generation
|
||||||
verbrauch = self.gesamtlast[stunde]
|
verbrauch = self.gesamtlast[stunde]
|
||||||
verluste_wh_pro_stunde[stunde_since_now] = 0.0
|
verluste_wh_pro_stunde[stunde_since_now] = 0.0
|
||||||
|
|
||||||
|
# Home appliances
|
||||||
if self.home_appliance:
|
if self.home_appliance:
|
||||||
ha_load = self.home_appliance.get_load_for_hour(stunde)
|
ha_load = self.home_appliance.get_load_for_hour(stunde)
|
||||||
verbrauch += ha_load
|
verbrauch += ha_load
|
||||||
home_appliance_wh_per_hour[stunde_since_now] = ha_load
|
home_appliance_wh_per_hour[stunde_since_now] = ha_load
|
||||||
|
|
||||||
# E-Auto handling
|
# E-Auto handling
|
||||||
if self.eauto and self.ev_charge_hours[stunde] > 0:
|
|
||||||
geladene_menge_eauto, verluste_eauto = self.eauto.energie_laden(
|
|
||||||
None, stunde, relative_power=self.ev_charge_hours[stunde]
|
|
||||||
)
|
|
||||||
verbrauch += geladene_menge_eauto
|
|
||||||
verluste_wh_pro_stunde[stunde_since_now] += verluste_eauto
|
|
||||||
|
|
||||||
if self.eauto:
|
if self.eauto:
|
||||||
|
if self.ev_charge_hours[stunde] > 0:
|
||||||
|
geladene_menge_eauto, verluste_eauto = self.eauto.energie_laden(
|
||||||
|
None, stunde, relative_power=self.ev_charge_hours[stunde]
|
||||||
|
)
|
||||||
|
verbrauch += geladene_menge_eauto
|
||||||
|
verluste_wh_pro_stunde[stunde_since_now] += verluste_eauto
|
||||||
eauto_soc_pro_stunde[stunde_since_now] = self.eauto.ladezustand_in_prozent()
|
eauto_soc_pro_stunde[stunde_since_now] = self.eauto.ladezustand_in_prozent()
|
||||||
|
|
||||||
# Process inverter logic
|
# Process inverter logic
|
||||||
erzeugung = self.pv_prognose_wh[stunde]
|
netzeinspeisung, netzbezug, verluste, eigenverbrauch = (0.0, 0.0, 0.0, 0.0)
|
||||||
self.akku.set_charge_allowed_for_hour(self.dc_charge_hours[stunde], stunde)
|
if self.akku:
|
||||||
netzeinspeisung, netzbezug, verluste, eigenverbrauch = (
|
self.akku.set_charge_allowed_for_hour(self.dc_charge_hours[stunde], stunde)
|
||||||
self.wechselrichter.energie_verarbeiten(erzeugung, verbrauch, stunde)
|
if self.wechselrichter:
|
||||||
)
|
erzeugung = self.pv_prognose_wh[stunde]
|
||||||
|
netzeinspeisung, netzbezug, verluste, eigenverbrauch = (
|
||||||
|
self.wechselrichter.energie_verarbeiten(erzeugung, verbrauch, stunde)
|
||||||
|
)
|
||||||
|
|
||||||
# AC PV Battery Charge
|
# AC PV Battery Charge
|
||||||
if self.ac_charge_hours[stunde] > 0.0:
|
if self.akku and self.ac_charge_hours[stunde] > 0.0:
|
||||||
self.akku.set_charge_allowed_for_hour(1, stunde)
|
self.akku.set_charge_allowed_for_hour(1, stunde)
|
||||||
geladene_menge, verluste_wh = self.akku.energie_laden(
|
geladene_menge, verluste_wh = self.akku.energie_laden(
|
||||||
None, stunde, relative_power=self.ac_charge_hours[stunde]
|
None, stunde, relative_power=self.ac_charge_hours[stunde]
|
||||||
@ -232,7 +387,10 @@ class EnergieManagementSystem:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Akku SOC tracking
|
# Akku SOC tracking
|
||||||
akku_soc_pro_stunde[stunde_since_now] = self.akku.ladezustand_in_prozent()
|
if self.akku:
|
||||||
|
akku_soc_pro_stunde[stunde_since_now] = self.akku.ladezustand_in_prozent()
|
||||||
|
else:
|
||||||
|
akku_soc_pro_stunde[stunde_since_now] = 0.0
|
||||||
|
|
||||||
# Total cost and return
|
# Total cost and return
|
||||||
gesamtkosten_euro = np.nansum(kosten_euro_pro_stunde) - np.nansum(einnahmen_euro_pro_stunde)
|
gesamtkosten_euro = np.nansum(kosten_euro_pro_stunde) - np.nansum(einnahmen_euro_pro_stunde)
|
||||||
@ -255,3 +413,12 @@ class EnergieManagementSystem:
|
|||||||
}
|
}
|
||||||
|
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize the Energy Management System, it is a singleton.
|
||||||
|
ems = EnergieManagementSystem()
|
||||||
|
|
||||||
|
|
||||||
|
def get_ems() -> EnergieManagementSystem:
|
||||||
|
"""Gets the EOS Energy Management System."""
|
||||||
|
return ems
|
226
src/akkudoktoreos/core/pydantic.py
Normal file
226
src/akkudoktoreos/core/pydantic.py
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
"""Module for managing and serializing Pydantic-based models with custom support.
|
||||||
|
|
||||||
|
This module introduces the `PydanticBaseModel` class, which extends Pydantic’s `BaseModel` to facilitate
|
||||||
|
custom serialization and deserialization for `pendulum.DateTime` objects. The main features include
|
||||||
|
automatic handling of `pendulum.DateTime` fields, custom serialization to ISO 8601 format, and utility
|
||||||
|
methods for converting model instances to and from dictionary and JSON formats.
|
||||||
|
|
||||||
|
Key Classes:
|
||||||
|
- PendulumDateTime: A custom type adapter that provides serialization and deserialization
|
||||||
|
functionality for `pendulum.DateTime` objects, converting them to ISO 8601 strings and back.
|
||||||
|
- PydanticBaseModel: A base model class for handling prediction records or configuration data
|
||||||
|
with automatic Pendulum DateTime handling and additional methods for JSON and dictionary
|
||||||
|
conversion.
|
||||||
|
|
||||||
|
Classes:
|
||||||
|
PendulumDateTime(TypeAdapter[pendulum.DateTime]): Type adapter for `pendulum.DateTime` fields
|
||||||
|
with ISO 8601 serialization. Includes:
|
||||||
|
- serialize: Converts `pendulum.DateTime` instances to ISO 8601 string.
|
||||||
|
- deserialize: Converts ISO 8601 strings to `pendulum.DateTime` instances.
|
||||||
|
- is_iso8601: Validates if a string matches the ISO 8601 date format.
|
||||||
|
|
||||||
|
PydanticBaseModel(BaseModel): Extends `pydantic.BaseModel` to handle `pendulum.DateTime` fields
|
||||||
|
and adds convenience methods for dictionary and JSON serialization. Key methods:
|
||||||
|
- model_dump: Dumps the model, converting `pendulum.DateTime` fields to ISO 8601.
|
||||||
|
- model_construct: Constructs a model instance with automatic deserialization of
|
||||||
|
`pendulum.DateTime` fields from ISO 8601.
|
||||||
|
- to_dict: Serializes the model instance to a dictionary.
|
||||||
|
- from_dict: Constructs a model instance from a dictionary.
|
||||||
|
- to_json: Converts the model instance to a JSON string.
|
||||||
|
- from_json: Creates a model instance from a JSON string.
|
||||||
|
|
||||||
|
Usage Example:
|
||||||
|
# Define custom settings in a model using PydanticBaseModel
|
||||||
|
class PredictionCommonSettings(PydanticBaseModel):
|
||||||
|
prediction_start: pendulum.DateTime = Field(...)
|
||||||
|
|
||||||
|
# Serialize a model instance to a dictionary or JSON
|
||||||
|
config = PredictionCommonSettings(prediction_start=pendulum.now())
|
||||||
|
config_dict = config.to_dict()
|
||||||
|
config_json = config.to_json()
|
||||||
|
|
||||||
|
# Deserialize from dictionary or JSON
|
||||||
|
new_config = PredictionCommonSettings.from_dict(config_dict)
|
||||||
|
restored_config = PredictionCommonSettings.from_json(config_json)
|
||||||
|
|
||||||
|
Dependencies:
|
||||||
|
- `pendulum`: Required for handling timezone-aware datetime fields.
|
||||||
|
- `pydantic`: Required for model and validation functionality.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- This module enables custom handling of Pendulum DateTime fields within Pydantic models,
|
||||||
|
which is particularly useful for applications requiring consistent ISO 8601 datetime formatting
|
||||||
|
and robust timezone-aware datetime support.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from typing import Any, Type
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
from pydantic import BaseModel, ConfigDict, TypeAdapter
|
||||||
|
|
||||||
|
|
||||||
|
# Custom type adapter for Pendulum DateTime fields
|
||||||
|
class PendulumDateTime(TypeAdapter[pendulum.DateTime]):
|
||||||
|
@classmethod
|
||||||
|
def serialize(cls, value: Any) -> str:
|
||||||
|
"""Convert pendulum.DateTime to ISO 8601 string."""
|
||||||
|
if isinstance(value, pendulum.DateTime):
|
||||||
|
return value.to_iso8601_string()
|
||||||
|
raise ValueError(f"Expected pendulum.DateTime, got {type(value)}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def deserialize(cls, value: Any) -> pendulum.DateTime:
|
||||||
|
"""Convert ISO 8601 string to pendulum.DateTime."""
|
||||||
|
if isinstance(value, str) and cls.is_iso8601(value):
|
||||||
|
try:
|
||||||
|
return pendulum.parse(value)
|
||||||
|
except pendulum.parsing.exceptions.ParserError as e:
|
||||||
|
raise ValueError(f"Invalid date format: {value}") from e
|
||||||
|
elif isinstance(value, pendulum.DateTime):
|
||||||
|
return value
|
||||||
|
raise ValueError(f"Expected ISO 8601 string or pendulum.DateTime, got {type(value)}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_iso8601(value: str) -> bool:
|
||||||
|
"""Check if the string is a valid ISO 8601 date string."""
|
||||||
|
iso8601_pattern = (
|
||||||
|
r"^(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}(?:\.\d{1,3})?(?:Z|[+-]\d{2}:\d{2})?)$"
|
||||||
|
)
|
||||||
|
return bool(re.match(iso8601_pattern, value))
|
||||||
|
|
||||||
|
|
||||||
|
class PydanticBaseModel(BaseModel):
|
||||||
|
"""Base model class with automatic serialization and deserialization of `pendulum.DateTime` fields.
|
||||||
|
|
||||||
|
This model serializes pendulum.DateTime objects to ISO 8601 strings and
|
||||||
|
deserializes ISO 8601 strings to pendulum.DateTime objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Enable custom serialization globally in config
|
||||||
|
model_config = ConfigDict(
|
||||||
|
arbitrary_types_allowed=True,
|
||||||
|
use_enum_values=True,
|
||||||
|
validate_assignment=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Override Pydantic’s serialization for all DateTime fields
|
||||||
|
def model_dump(self, *args: Any, **kwargs: Any) -> dict:
|
||||||
|
"""Custom dump method to handle serialization for DateTime fields."""
|
||||||
|
result = super().model_dump(*args, **kwargs)
|
||||||
|
for key, value in result.items():
|
||||||
|
if isinstance(value, pendulum.DateTime):
|
||||||
|
result[key] = PendulumDateTime.serialize(value)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def model_construct(cls, data: dict) -> "PydanticBaseModel":
|
||||||
|
"""Custom constructor to handle deserialization for DateTime fields."""
|
||||||
|
for key, value in data.items():
|
||||||
|
if isinstance(value, str) and PendulumDateTime.is_iso8601(value):
|
||||||
|
data[key] = PendulumDateTime.deserialize(value)
|
||||||
|
return super().model_construct(data)
|
||||||
|
|
||||||
|
def reset_optional(self) -> "PydanticBaseModel":
|
||||||
|
"""Resets all optional fields in the model to None.
|
||||||
|
|
||||||
|
Iterates through all model fields and sets any optional (non-required)
|
||||||
|
fields to None. The modification is done in-place on the current instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
PydanticBaseModel: The current instance with all optional fields
|
||||||
|
reset to None.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> settings = PydanticBaseModel(name="test", optional_field="value")
|
||||||
|
>>> settings.reset_optional()
|
||||||
|
>>> assert settings.optional_field is None
|
||||||
|
"""
|
||||||
|
for field_name, field in self.model_fields.items():
|
||||||
|
if field.is_required is False: # Check if field is optional
|
||||||
|
setattr(self, field_name, None)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
"""Convert this PredictionRecord instance to a dictionary representation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary where the keys are the field names of the PydanticBaseModel,
|
||||||
|
and the values are the corresponding field values.
|
||||||
|
"""
|
||||||
|
return self.model_dump()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls: Type["PydanticBaseModel"], data: dict) -> "PydanticBaseModel":
|
||||||
|
"""Create a PydanticBaseModel instance from a dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict): A dictionary containing data to initialize the PydanticBaseModel.
|
||||||
|
Keys should match the field names defined in the model.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
PydanticBaseModel: An instance of the PydanticBaseModel populated with the data.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
Works with derived classes by ensuring the `cls` argument is used to instantiate the object.
|
||||||
|
"""
|
||||||
|
return cls.model_validate(data)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict_with_reset(cls, data: dict | None = None) -> "PydanticBaseModel":
|
||||||
|
"""Creates a new instance with reset optional fields, then updates from dict.
|
||||||
|
|
||||||
|
First creates an instance with default values, resets all optional fields
|
||||||
|
to None, then updates the instance with the provided dictionary data if any.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict | None): Dictionary containing field values to initialize
|
||||||
|
the instance with. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
PydanticBaseModel: A new instance with all optional fields initially
|
||||||
|
reset to None and then updated with provided data.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> data = {"name": "test", "optional_field": "value"}
|
||||||
|
>>> settings = PydanticBaseModel.from_dict_with_reset(data)
|
||||||
|
>>> # All non-specified optional fields will be None
|
||||||
|
"""
|
||||||
|
# Create instance with model defaults
|
||||||
|
instance = cls()
|
||||||
|
|
||||||
|
# Reset all optional fields to None
|
||||||
|
instance.reset_optional()
|
||||||
|
|
||||||
|
# Update with provided data if any
|
||||||
|
if data:
|
||||||
|
# Use model_validate to ensure proper type conversion and validation
|
||||||
|
updated_instance = instance.model_validate({**instance.model_dump(), **data})
|
||||||
|
return updated_instance
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def to_json(self) -> str:
|
||||||
|
"""Convert the PydanticBaseModel instance to a JSON string.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The JSON representation of the instance.
|
||||||
|
"""
|
||||||
|
return self.model_dump_json()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(cls: Type["PydanticBaseModel"], json_str: str) -> "PydanticBaseModel":
|
||||||
|
"""Create an instance of the PydanticBaseModel class or its subclass from a JSON string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
json_str (str): JSON string to parse and convert into a PydanticBaseModel instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
PydanticBaseModel: A new instance of the class, populated with data from the JSON string.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
Works with derived classes by ensuring the `cls` argument is used to instantiate the object.
|
||||||
|
"""
|
||||||
|
data = json.loads(json_str)
|
||||||
|
return cls.model_validate(data)
|
122
src/akkudoktoreos/data/default.config.json
Normal file
122
src/akkudoktoreos/data/default.config.json
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
{
|
||||||
|
"config_file_path": null,
|
||||||
|
"config_folder_path": null,
|
||||||
|
"data_cache_path": null,
|
||||||
|
"data_cache_subpath": null,
|
||||||
|
"data_folder_path": null,
|
||||||
|
"data_output_path": null,
|
||||||
|
"data_output_subpath": null,
|
||||||
|
"elecprice_provider": null,
|
||||||
|
"elecpriceimport_file_path": null,
|
||||||
|
"latitude": null,
|
||||||
|
"load0_import_file_path": null,
|
||||||
|
"load0_name": null,
|
||||||
|
"load0_provider": null,
|
||||||
|
"load1_import_file_path": null,
|
||||||
|
"load1_name": null,
|
||||||
|
"load1_provider": null,
|
||||||
|
"load2_import_file_path": null,
|
||||||
|
"load2_name": null,
|
||||||
|
"load2_provider": null,
|
||||||
|
"load3_import_file_path": null,
|
||||||
|
"load3_name": null,
|
||||||
|
"load3_provider": null,
|
||||||
|
"load4_import_file_path": null,
|
||||||
|
"load4_name": null,
|
||||||
|
"load4_provider": null,
|
||||||
|
"loadakkudoktor_year_energy": null,
|
||||||
|
"longitude": null,
|
||||||
|
"optimization_ev_available_charge_rates_percent": [],
|
||||||
|
"optimization_hours": 24,
|
||||||
|
"optimization_penalty": null,
|
||||||
|
"prediction_historic_hours": 48,
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"pvforecast0_albedo": null,
|
||||||
|
"pvforecast0_inverter_model": null,
|
||||||
|
"pvforecast0_inverter_paco": null,
|
||||||
|
"pvforecast0_loss": null,
|
||||||
|
"pvforecast0_module_model": null,
|
||||||
|
"pvforecast0_modules_per_string": null,
|
||||||
|
"pvforecast0_mountingplace": "free",
|
||||||
|
"pvforecast0_optimal_surface_tilt": false,
|
||||||
|
"pvforecast0_optimalangles": false,
|
||||||
|
"pvforecast0_peakpower": null,
|
||||||
|
"pvforecast0_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast0_strings_per_inverter": null,
|
||||||
|
"pvforecast0_surface_azimuth": 180,
|
||||||
|
"pvforecast0_surface_tilt": 0,
|
||||||
|
"pvforecast0_trackingtype": 0,
|
||||||
|
"pvforecast0_userhorizon": null,
|
||||||
|
"pvforecast1_albedo": null,
|
||||||
|
"pvforecast1_inverter_model": null,
|
||||||
|
"pvforecast1_inverter_paco": null,
|
||||||
|
"pvforecast1_loss": 0,
|
||||||
|
"pvforecast1_module_model": null,
|
||||||
|
"pvforecast1_modules_per_string": null,
|
||||||
|
"pvforecast1_mountingplace": "free",
|
||||||
|
"pvforecast1_optimal_surface_tilt": false,
|
||||||
|
"pvforecast1_optimalangles": false,
|
||||||
|
"pvforecast1_peakpower": null,
|
||||||
|
"pvforecast1_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast1_strings_per_inverter": null,
|
||||||
|
"pvforecast1_surface_azimuth": 180,
|
||||||
|
"pvforecast1_surface_tilt": 0,
|
||||||
|
"pvforecast1_trackingtype": 0,
|
||||||
|
"pvforecast1_userhorizon": null,
|
||||||
|
"pvforecast2_albedo": null,
|
||||||
|
"pvforecast2_inverter_model": null,
|
||||||
|
"pvforecast2_inverter_paco": null,
|
||||||
|
"pvforecast2_loss": 0,
|
||||||
|
"pvforecast2_module_model": null,
|
||||||
|
"pvforecast2_modules_per_string": null,
|
||||||
|
"pvforecast2_mountingplace": "free",
|
||||||
|
"pvforecast2_optimal_surface_tilt": false,
|
||||||
|
"pvforecast2_optimalangles": false,
|
||||||
|
"pvforecast2_peakpower": null,
|
||||||
|
"pvforecast2_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast2_strings_per_inverter": null,
|
||||||
|
"pvforecast2_surface_azimuth": 180,
|
||||||
|
"pvforecast2_surface_tilt": 0,
|
||||||
|
"pvforecast2_trackingtype": 0,
|
||||||
|
"pvforecast2_userhorizon": null,
|
||||||
|
"pvforecast3_albedo": null,
|
||||||
|
"pvforecast3_inverter_model": null,
|
||||||
|
"pvforecast3_inverter_paco": null,
|
||||||
|
"pvforecast3_loss": 0,
|
||||||
|
"pvforecast3_module_model": null,
|
||||||
|
"pvforecast3_modules_per_string": null,
|
||||||
|
"pvforecast3_mountingplace": "free",
|
||||||
|
"pvforecast3_optimal_surface_tilt": false,
|
||||||
|
"pvforecast3_optimalangles": false,
|
||||||
|
"pvforecast3_peakpower": null,
|
||||||
|
"pvforecast3_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast3_strings_per_inverter": null,
|
||||||
|
"pvforecast3_surface_azimuth": 180,
|
||||||
|
"pvforecast3_surface_tilt": 0,
|
||||||
|
"pvforecast3_trackingtype": 0,
|
||||||
|
"pvforecast3_userhorizon": null,
|
||||||
|
"pvforecast4_albedo": null,
|
||||||
|
"pvforecast4_inverter_model": null,
|
||||||
|
"pvforecast4_inverter_paco": null,
|
||||||
|
"pvforecast4_loss": 0,
|
||||||
|
"pvforecast4_module_model": null,
|
||||||
|
"pvforecast4_modules_per_string": null,
|
||||||
|
"pvforecast4_mountingplace": "free",
|
||||||
|
"pvforecast4_optimal_surface_tilt": false,
|
||||||
|
"pvforecast4_optimalangles": false,
|
||||||
|
"pvforecast4_peakpower": null,
|
||||||
|
"pvforecast4_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast4_strings_per_inverter": null,
|
||||||
|
"pvforecast4_surface_azimuth": 180,
|
||||||
|
"pvforecast4_surface_tilt": 0,
|
||||||
|
"pvforecast4_trackingtype": 0,
|
||||||
|
"pvforecast4_userhorizon": null,
|
||||||
|
"pvforecast_provider": null,
|
||||||
|
"pvforecastimport_file_path": null,
|
||||||
|
"server_fastapi_host": "0.0.0.0",
|
||||||
|
"server_fastapi_port": 8503,
|
||||||
|
"server_fasthtml_host": "0.0.0.0",
|
||||||
|
"server_fasthtml_port": 8504,
|
||||||
|
"weather_provider": null,
|
||||||
|
"weatherimport_file_path": null
|
||||||
|
}
|
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"directories": {
|
|
||||||
"output": "output",
|
|
||||||
"cache": "cache"
|
|
||||||
},
|
|
||||||
"eos": {
|
|
||||||
"prediction_hours": 48,
|
|
||||||
"optimization_hours": 24,
|
|
||||||
"penalty": 10,
|
|
||||||
"available_charging_rates_in_percentage": [
|
|
||||||
0.0, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0
|
|
||||||
],
|
|
||||||
"feed_in_tariff_eur_per_wh": 48
|
|
||||||
}
|
|
||||||
}
|
|
@ -3,8 +3,12 @@ from typing import Any, Optional
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.devices.devicesabc import DeviceBase
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def max_ladeleistung_w_field(default: Optional[float] = None) -> Optional[float]:
|
def max_ladeleistung_w_field(default: Optional[float] = None) -> Optional[float]:
|
||||||
return Field(
|
return Field(
|
||||||
@ -83,31 +87,86 @@ class EAutoResult(BaseModel):
|
|||||||
return NumpyEncoder.convert_numpy(field)[0]
|
return NumpyEncoder.convert_numpy(field)[0]
|
||||||
|
|
||||||
|
|
||||||
class PVAkku:
|
class PVAkku(DeviceBase):
|
||||||
def __init__(self, parameters: BaseAkkuParameters, hours: int = 24):
|
def __init__(
|
||||||
# Battery capacity in Wh
|
self,
|
||||||
self.kapazitaet_wh = parameters.kapazitaet_wh
|
parameters: Optional[BaseAkkuParameters] = None,
|
||||||
# Initial state of charge in Wh
|
hours: Optional[int] = 24,
|
||||||
self.start_soc_prozent = parameters.start_soc_prozent
|
provider_id: Optional[str] = None,
|
||||||
self.soc_wh = (parameters.start_soc_prozent / 100) * parameters.kapazitaet_wh
|
):
|
||||||
self.hours = hours
|
# Configuration initialisation
|
||||||
|
self.provider_id = provider_id
|
||||||
|
self.prefix = "<invalid>"
|
||||||
|
if self.provider_id == "GenericBattery":
|
||||||
|
self.prefix = "battery"
|
||||||
|
elif self.provider_id == "GenericBEV":
|
||||||
|
self.prefix = "bev"
|
||||||
|
# Parameter initialisiation
|
||||||
|
self.parameters = parameters
|
||||||
|
if hours is None:
|
||||||
|
self.hours = self.total_hours
|
||||||
|
else:
|
||||||
|
self.hours = hours
|
||||||
|
|
||||||
|
self.initialised = False
|
||||||
|
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
||||||
|
if self.parameters is not None:
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self) -> None:
|
||||||
|
if self.initialised:
|
||||||
|
return
|
||||||
|
if self.provider_id is not None:
|
||||||
|
# Setup by configuration
|
||||||
|
# Battery capacity in Wh
|
||||||
|
self.kapazitaet_wh = getattr(self.config, f"{self.prefix}_capacity")
|
||||||
|
# Initial state of charge in Wh
|
||||||
|
self.start_soc_prozent = getattr(self.config, f"{self.prefix}_soc_start")
|
||||||
|
self.hours = self.total_hours
|
||||||
|
# Charge and discharge efficiency
|
||||||
|
self.lade_effizienz = getattr(self.config, f"{self.prefix}_charge_efficiency")
|
||||||
|
self.entlade_effizienz = getattr(self.config, f"{self.prefix}_discharge_efficiency")
|
||||||
|
self.max_ladeleistung_w = getattr(self.config, f"{self.prefix}_charge_power_max")
|
||||||
|
# Only assign for storage battery
|
||||||
|
if self.provider_id == "GenericBattery":
|
||||||
|
self.min_soc_prozent = getattr(self.config, f"{self.prefix}_soc_mint")
|
||||||
|
else:
|
||||||
|
self.min_soc_prozent = 0
|
||||||
|
self.max_soc_prozent = getattr(self.config, f"{self.prefix}_soc_mint")
|
||||||
|
elif self.parameters is not None:
|
||||||
|
# Setup by parameters
|
||||||
|
# Battery capacity in Wh
|
||||||
|
self.kapazitaet_wh = self.parameters.kapazitaet_wh
|
||||||
|
# Initial state of charge in Wh
|
||||||
|
self.start_soc_prozent = self.parameters.start_soc_prozent
|
||||||
|
# Charge and discharge efficiency
|
||||||
|
self.lade_effizienz = self.parameters.lade_effizienz
|
||||||
|
self.entlade_effizienz = self.parameters.entlade_effizienz
|
||||||
|
self.max_ladeleistung_w = self.parameters.max_ladeleistung_w
|
||||||
|
# Only assign for storage battery
|
||||||
|
self.min_soc_prozent = (
|
||||||
|
self.parameters.min_soc_prozent
|
||||||
|
if isinstance(self.parameters, PVAkkuParameters)
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
self.max_soc_prozent = self.parameters.max_soc_prozent
|
||||||
|
else:
|
||||||
|
error_msg = "Parameters and provider ID missing. Can't instantiate."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# init
|
||||||
|
if self.max_ladeleistung_w is None:
|
||||||
|
self.max_ladeleistung_w = self.kapazitaet_wh
|
||||||
self.discharge_array = np.full(self.hours, 1)
|
self.discharge_array = np.full(self.hours, 1)
|
||||||
self.charge_array = np.full(self.hours, 1)
|
self.charge_array = np.full(self.hours, 1)
|
||||||
# Charge and discharge efficiency
|
# Calculate start, min and max SoC in Wh
|
||||||
self.lade_effizienz = parameters.lade_effizienz
|
self.soc_wh = (self.start_soc_prozent / 100) * self.kapazitaet_wh
|
||||||
self.entlade_effizienz = parameters.entlade_effizienz
|
|
||||||
self.max_ladeleistung_w = (
|
|
||||||
parameters.max_ladeleistung_w if parameters.max_ladeleistung_w else self.kapazitaet_wh
|
|
||||||
)
|
|
||||||
# Only assign for storage battery
|
|
||||||
self.min_soc_prozent = (
|
|
||||||
parameters.min_soc_prozent if isinstance(parameters, PVAkkuParameters) else 0
|
|
||||||
)
|
|
||||||
self.max_soc_prozent = parameters.max_soc_prozent
|
|
||||||
# Calculate min and max SoC in Wh
|
|
||||||
self.min_soc_wh = (self.min_soc_prozent / 100) * self.kapazitaet_wh
|
self.min_soc_wh = (self.min_soc_prozent / 100) * self.kapazitaet_wh
|
||||||
self.max_soc_wh = (self.max_soc_prozent / 100) * self.kapazitaet_wh
|
self.max_soc_wh = (self.max_soc_prozent / 100) * self.kapazitaet_wh
|
||||||
|
|
||||||
|
self.initialised = True
|
||||||
|
|
||||||
def to_dict(self) -> dict[str, Any]:
|
def to_dict(self) -> dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"kapazitaet_wh": self.kapazitaet_wh,
|
"kapazitaet_wh": self.kapazitaet_wh,
|
||||||
|
310
src/akkudoktoreos/devices/devices.py
Normal file
310
src/akkudoktoreos/devices/devices.py
Normal file
@ -0,0 +1,310 @@
|
|||||||
|
from typing import Any, ClassVar, Dict, Optional, Union
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpydantic import NDArray, Shape
|
||||||
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
|
from akkudoktoreos.devices.battery import PVAkku
|
||||||
|
from akkudoktoreos.devices.devicesabc import DevicesBase
|
||||||
|
from akkudoktoreos.devices.generic import HomeAppliance
|
||||||
|
from akkudoktoreos.devices.inverter import Wechselrichter
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DevicesCommonSettings(SettingsBaseModel):
|
||||||
|
"""Base configuration for devices simulation settings."""
|
||||||
|
|
||||||
|
# Battery
|
||||||
|
# -------
|
||||||
|
battery_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of Battery simulation provider."
|
||||||
|
)
|
||||||
|
battery_capacity: Optional[int] = Field(default=None, description="Battery capacity [Wh].")
|
||||||
|
battery_soc_start: Optional[int] = Field(
|
||||||
|
default=None, description="Battery initial state of charge [%]."
|
||||||
|
)
|
||||||
|
battery_soc_min: Optional[int] = Field(
|
||||||
|
default=None, description="Battery minimum state of charge [%]."
|
||||||
|
)
|
||||||
|
battery_soc_max: Optional[int] = Field(
|
||||||
|
default=None, description="Battery maximum state of charge [%]."
|
||||||
|
)
|
||||||
|
battery_charge_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery charging efficiency [%]."
|
||||||
|
)
|
||||||
|
battery_discharge_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery discharging efficiency [%]."
|
||||||
|
)
|
||||||
|
battery_charge_power_max: Optional[int] = Field(
|
||||||
|
default=None, description="Battery maximum charge power [W]."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Battery Electric Vehicle
|
||||||
|
# ------------------------
|
||||||
|
bev_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of Battery Electric Vehicle simulation provider."
|
||||||
|
)
|
||||||
|
bev_capacity: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle capacity [Wh]."
|
||||||
|
)
|
||||||
|
bev_soc_start: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle initial state of charge [%]."
|
||||||
|
)
|
||||||
|
bev_soc_max: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle maximum state of charge [%]."
|
||||||
|
)
|
||||||
|
bev_charge_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle charging efficiency [%]."
|
||||||
|
)
|
||||||
|
bev_discharge_efficiency: Optional[float] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle discharging efficiency [%]."
|
||||||
|
)
|
||||||
|
bev_charge_power_max: Optional[int] = Field(
|
||||||
|
default=None, description="Battery Electric Vehicle maximum charge power [W]."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Home Appliance - Dish Washer
|
||||||
|
# ----------------------------
|
||||||
|
dishwasher_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of Dish Washer simulation provider."
|
||||||
|
)
|
||||||
|
dishwasher_consumption: Optional[int] = Field(
|
||||||
|
default=None, description="Dish Washer energy consumption [Wh]."
|
||||||
|
)
|
||||||
|
dishwasher_duration: Optional[int] = Field(
|
||||||
|
default=None, description="Dish Washer usage duration [h]."
|
||||||
|
)
|
||||||
|
|
||||||
|
# PV Inverter
|
||||||
|
# -----------
|
||||||
|
inverter_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Id of PV Inverter simulation provider."
|
||||||
|
)
|
||||||
|
inverter_power_max: Optional[float] = Field(
|
||||||
|
default=None, description="Inverter maximum power [W]."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Devices(SingletonMixin, DevicesBase):
|
||||||
|
# Results of the devices simulation and
|
||||||
|
# insights into various parameters over the entire forecast period.
|
||||||
|
# -----------------------------------------------------------------
|
||||||
|
last_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The load in watt-hours per hour."
|
||||||
|
)
|
||||||
|
eauto_soc_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The state of charge of the EV for each hour."
|
||||||
|
)
|
||||||
|
einnahmen_euro_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="The revenue from grid feed-in or other sources in euros per hour.",
|
||||||
|
)
|
||||||
|
home_appliance_wh_per_hour: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="The energy consumption of a household appliance in watt-hours per hour.",
|
||||||
|
)
|
||||||
|
kosten_euro_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The costs in euros per hour."
|
||||||
|
)
|
||||||
|
netzbezug_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The grid energy drawn in watt-hours per hour."
|
||||||
|
)
|
||||||
|
netzeinspeisung_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The energy fed into the grid in watt-hours per hour."
|
||||||
|
)
|
||||||
|
verluste_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None, description="The losses in watt-hours per hour."
|
||||||
|
)
|
||||||
|
akku_soc_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="The state of charge of the battery (not the EV) in percentage per hour.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_balance_euro(self) -> float:
|
||||||
|
"""The total balance of revenues minus costs in euros."""
|
||||||
|
return self.total_revenues_euro - self.total_costs_euro
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_revenues_euro(self) -> float:
|
||||||
|
"""The total revenues in euros."""
|
||||||
|
if self.einnahmen_euro_pro_stunde is None:
|
||||||
|
return 0
|
||||||
|
return np.nansum(self.einnahmen_euro_pro_stunde)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_costs_euro(self) -> float:
|
||||||
|
"""The total costs in euros."""
|
||||||
|
if self.kosten_euro_pro_stunde is None:
|
||||||
|
return 0
|
||||||
|
return np.nansum(self.kosten_euro_pro_stunde)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_losses_wh(self) -> float:
|
||||||
|
"""The total losses in watt-hours over the entire period."""
|
||||||
|
if self.verluste_wh_pro_stunde is None:
|
||||||
|
return 0
|
||||||
|
return np.nansum(self.verluste_wh_pro_stunde)
|
||||||
|
|
||||||
|
# Devices
|
||||||
|
# TODO: Make devices class a container of device simulation providers.
|
||||||
|
# Device simulations to be used are then enabled in the configuration.
|
||||||
|
akku: ClassVar[PVAkku] = PVAkku(provider_id="GenericBattery")
|
||||||
|
eauto: ClassVar[PVAkku] = PVAkku(provider_id="GenericBEV")
|
||||||
|
home_appliance: ClassVar[HomeAppliance] = HomeAppliance(provider_id="GenericDishWasher")
|
||||||
|
wechselrichter: ClassVar[Wechselrichter] = Wechselrichter(
|
||||||
|
akku=akku, provider_id="GenericInverter"
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_data(self) -> None:
|
||||||
|
"""Update device simulation data."""
|
||||||
|
# Assure devices are set up
|
||||||
|
self.akku.setup()
|
||||||
|
self.eauto.setup()
|
||||||
|
self.home_appliance.setup()
|
||||||
|
self.wechselrichter.setup()
|
||||||
|
|
||||||
|
# Pre-allocate arrays for the results, optimized for speed
|
||||||
|
self.last_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.netzeinspeisung_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.netzbezug_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.kosten_euro_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.einnahmen_euro_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.akku_soc_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.eauto_soc_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.verluste_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
||||||
|
self.home_appliance_wh_per_hour = np.full((self.total_hours), np.nan)
|
||||||
|
|
||||||
|
# Set initial state
|
||||||
|
simulation_step = to_duration("1 hour")
|
||||||
|
if self.akku:
|
||||||
|
self.akku_soc_pro_stunde[0] = self.akku.ladezustand_in_prozent()
|
||||||
|
if self.eauto:
|
||||||
|
self.eauto_soc_pro_stunde[0] = self.eauto.ladezustand_in_prozent()
|
||||||
|
|
||||||
|
# Get predictions for full device simulation time range
|
||||||
|
# gesamtlast[stunde]
|
||||||
|
load_total_mean = self.prediction.key_to_array(
|
||||||
|
"load_total_mean",
|
||||||
|
start_datetime=self.start_datetime,
|
||||||
|
end_datetime=self.end_datetime,
|
||||||
|
interval=simulation_step,
|
||||||
|
)
|
||||||
|
# pv_prognose_wh[stunde]
|
||||||
|
pvforecast_ac_power = self.prediction.key_to_array(
|
||||||
|
"pvforecast_ac_power",
|
||||||
|
start_datetime=self.start_datetime,
|
||||||
|
end_datetime=self.end_datetime,
|
||||||
|
interval=simulation_step,
|
||||||
|
)
|
||||||
|
# strompreis_euro_pro_wh[stunde]
|
||||||
|
elecprice_marketprice = self.prediction.key_to_array(
|
||||||
|
"elecprice_marketprice",
|
||||||
|
start_datetime=self.start_datetime,
|
||||||
|
end_datetime=self.end_datetime,
|
||||||
|
interval=simulation_step,
|
||||||
|
)
|
||||||
|
# einspeiseverguetung_euro_pro_wh_arr[stunde]
|
||||||
|
# TODO: Create prediction for einspeiseverguetung_euro_pro_wh_arr
|
||||||
|
einspeiseverguetung_euro_pro_wh_arr = np.full((self.total_hours), 0.078)
|
||||||
|
|
||||||
|
for stunde_since_now in range(0, self.total_hours):
|
||||||
|
stunde = self.start_datetime.hour + stunde_since_now
|
||||||
|
|
||||||
|
# Accumulate loads and PV generation
|
||||||
|
verbrauch = load_total_mean[stunde_since_now]
|
||||||
|
self.verluste_wh_pro_stunde[stunde_since_now] = 0.0
|
||||||
|
|
||||||
|
# Home appliances
|
||||||
|
if self.home_appliance:
|
||||||
|
ha_load = self.home_appliance.get_load_for_hour(stunde)
|
||||||
|
verbrauch += ha_load
|
||||||
|
self.home_appliance_wh_per_hour[stunde_since_now] = ha_load
|
||||||
|
|
||||||
|
# E-Auto handling
|
||||||
|
if self.eauto:
|
||||||
|
if self.ev_charge_hours[stunde] > 0:
|
||||||
|
geladene_menge_eauto, verluste_eauto = self.eauto.energie_laden(
|
||||||
|
None, stunde, relative_power=self.ev_charge_hours[stunde]
|
||||||
|
)
|
||||||
|
verbrauch += geladene_menge_eauto
|
||||||
|
self.verluste_wh_pro_stunde[stunde_since_now] += verluste_eauto
|
||||||
|
self.eauto_soc_pro_stunde[stunde_since_now] = self.eauto.ladezustand_in_prozent()
|
||||||
|
|
||||||
|
# Process inverter logic
|
||||||
|
netzeinspeisung, netzbezug, verluste, eigenverbrauch = (0.0, 0.0, 0.0, 0.0)
|
||||||
|
if self.akku:
|
||||||
|
self.akku.set_charge_allowed_for_hour(self.dc_charge_hours[stunde], stunde)
|
||||||
|
if self.wechselrichter:
|
||||||
|
erzeugung = pvforecast_ac_power[stunde]
|
||||||
|
netzeinspeisung, netzbezug, verluste, eigenverbrauch = (
|
||||||
|
self.wechselrichter.energie_verarbeiten(erzeugung, verbrauch, stunde)
|
||||||
|
)
|
||||||
|
|
||||||
|
# AC PV Battery Charge
|
||||||
|
if self.akku and self.ac_charge_hours[stunde] > 0.0:
|
||||||
|
self.akku.set_charge_allowed_for_hour(1, stunde)
|
||||||
|
geladene_menge, verluste_wh = self.akku.energie_laden(
|
||||||
|
None, stunde, relative_power=self.ac_charge_hours[stunde]
|
||||||
|
)
|
||||||
|
# print(stunde, " ", geladene_menge, " ",self.ac_charge_hours[stunde]," ",self.akku.ladezustand_in_prozent())
|
||||||
|
verbrauch += geladene_menge
|
||||||
|
netzbezug += geladene_menge
|
||||||
|
self.verluste_wh_pro_stunde[stunde_since_now] += verluste_wh
|
||||||
|
|
||||||
|
self.netzeinspeisung_wh_pro_stunde[stunde_since_now] = netzeinspeisung
|
||||||
|
self.netzbezug_wh_pro_stunde[stunde_since_now] = netzbezug
|
||||||
|
self.verluste_wh_pro_stunde[stunde_since_now] += verluste
|
||||||
|
self.last_wh_pro_stunde[stunde_since_now] = verbrauch
|
||||||
|
|
||||||
|
# Financial calculations
|
||||||
|
self.kosten_euro_pro_stunde[stunde_since_now] = (
|
||||||
|
netzbezug * self.strompreis_euro_pro_wh[stunde]
|
||||||
|
)
|
||||||
|
self.einnahmen_euro_pro_stunde[stunde_since_now] = (
|
||||||
|
netzeinspeisung * self.einspeiseverguetung_euro_pro_wh_arr[stunde]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Akku SOC tracking
|
||||||
|
if self.akku:
|
||||||
|
self.akku_soc_pro_stunde[stunde_since_now] = self.akku.ladezustand_in_prozent()
|
||||||
|
else:
|
||||||
|
self.akku_soc_pro_stunde[stunde_since_now] = 0.0
|
||||||
|
|
||||||
|
def report_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Provides devices simulation output as a dictionary."""
|
||||||
|
out: Dict[str, Optional[Union[np.ndarray, float]]] = {
|
||||||
|
"Last_Wh_pro_Stunde": self.last_wh_pro_stunde,
|
||||||
|
"Netzeinspeisung_Wh_pro_Stunde": self.netzeinspeisung_wh_pro_stunde,
|
||||||
|
"Netzbezug_Wh_pro_Stunde": self.netzbezug_wh_pro_stunde,
|
||||||
|
"Kosten_Euro_pro_Stunde": self.kosten_euro_pro_stunde,
|
||||||
|
"akku_soc_pro_stunde": self.akku_soc_pro_stunde,
|
||||||
|
"Einnahmen_Euro_pro_Stunde": self.einnahmen_euro_pro_stunde,
|
||||||
|
"Gesamtbilanz_Euro": self.total_balance_euro,
|
||||||
|
"EAuto_SoC_pro_Stunde": self.eauto_soc_pro_stunde,
|
||||||
|
"Gesamteinnahmen_Euro": self.total_revenues_euro,
|
||||||
|
"Gesamtkosten_Euro": self.total_costs_euro,
|
||||||
|
"Verluste_Pro_Stunde": self.verluste_wh_pro_stunde,
|
||||||
|
"Gesamt_Verluste": self.total_losses_wh,
|
||||||
|
"Home_appliance_wh_per_hour": self.home_appliance_wh_per_hour,
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize the Devices simulation, it is a singleton.
|
||||||
|
devices = Devices()
|
||||||
|
|
||||||
|
|
||||||
|
def get_devices() -> Devices:
|
||||||
|
"""Gets the EOS Devices simulation."""
|
||||||
|
return devices
|
100
src/akkudoktoreos/devices/devicesabc.py
Normal file
100
src/akkudoktoreos/devices/devicesabc.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
"""Abstract and base classes for devices."""
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pendulum import DateTime
|
||||||
|
from pydantic import ConfigDict, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.core.coreabc import (
|
||||||
|
ConfigMixin,
|
||||||
|
EnergyManagementSystemMixin,
|
||||||
|
PredictionMixin,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
||||||
|
"""A mixin to manage start, end datetimes for devices data.
|
||||||
|
|
||||||
|
The starting datetime for devices data generation is provided by the energy management
|
||||||
|
system. Device data cannot be computed if this value is `None`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Computed field for end_datetime and keep_datetime
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def end_datetime(self) -> Optional[DateTime]:
|
||||||
|
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
||||||
|
|
||||||
|
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||||
|
"""
|
||||||
|
if self.ems.start_datetime and self.config.prediction_hours:
|
||||||
|
end_datetime = self.ems.start_datetime + to_duration(
|
||||||
|
f"{self.config.prediction_hours} hours"
|
||||||
|
)
|
||||||
|
dst_change = end_datetime.offset_hours - self.ems.start_datetime.offset_hours
|
||||||
|
logger.debug(
|
||||||
|
f"Pre: {self.ems.start_datetime}..{end_datetime}: DST change: {dst_change}"
|
||||||
|
)
|
||||||
|
if dst_change < 0:
|
||||||
|
end_datetime = end_datetime + to_duration(f"{abs(int(dst_change))} hours")
|
||||||
|
elif dst_change > 0:
|
||||||
|
end_datetime = end_datetime - to_duration(f"{abs(int(dst_change))} hours")
|
||||||
|
logger.debug(
|
||||||
|
f"Pst: {self.ems.start_datetime}..{end_datetime}: DST change: {dst_change}"
|
||||||
|
)
|
||||||
|
return end_datetime
|
||||||
|
return None
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_hours(self) -> Optional[int]:
|
||||||
|
"""Compute the hours from `start_datetime` to `end_datetime`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[pendulum.period]: The duration hours, or `None` if either datetime is unavailable.
|
||||||
|
"""
|
||||||
|
end_dt = self.end_datetime
|
||||||
|
if end_dt is None:
|
||||||
|
return None
|
||||||
|
duration = end_dt - self.ems.start_datetime
|
||||||
|
return int(duration.total_hours())
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceBase(DevicesStartEndMixin, PredictionMixin):
|
||||||
|
"""Base class for device simulations.
|
||||||
|
|
||||||
|
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
||||||
|
`prediction`).
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Validation on assignment of the Pydantic model is disabled to speed up simulation runs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Disable validation on assignment to speed up simulation runs.
|
||||||
|
model_config = ConfigDict(
|
||||||
|
validate_assignment=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DevicesBase(DevicesStartEndMixin, PredictionMixin, PydanticBaseModel):
|
||||||
|
"""Base class for handling device data.
|
||||||
|
|
||||||
|
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
||||||
|
`prediction`).
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Validation on assignment of the Pydantic model is disabled to speed up simulation runs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Disable validation on assignment to speed up simulation runs.
|
||||||
|
model_config = ConfigDict(
|
||||||
|
validate_assignment=False,
|
||||||
|
)
|
@ -1,6 +1,13 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from akkudoktoreos.devices.devicesabc import DeviceBase
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HomeApplianceParameters(BaseModel):
|
class HomeApplianceParameters(BaseModel):
|
||||||
consumption_wh: int = Field(
|
consumption_wh: int = Field(
|
||||||
@ -13,21 +20,57 @@ class HomeApplianceParameters(BaseModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class HomeAppliance:
|
class HomeAppliance(DeviceBase):
|
||||||
def __init__(self, parameters: HomeApplianceParameters, hours: int = 24):
|
def __init__(
|
||||||
self.hours = hours # Total duration for which the planning is done
|
self,
|
||||||
self.consumption_wh = (
|
parameters: Optional[HomeApplianceParameters] = None,
|
||||||
parameters.consumption_wh
|
hours: Optional[int] = 24,
|
||||||
) # Total energy consumption of the device in kWh
|
provider_id: Optional[str] = None,
|
||||||
self.duration_h = parameters.duration_h # Duration of use in hours
|
):
|
||||||
|
# Configuration initialisation
|
||||||
|
self.provider_id = provider_id
|
||||||
|
self.prefix = "<invalid>"
|
||||||
|
if self.provider_id == "GenericDishWasher":
|
||||||
|
self.prefix = "dishwasher"
|
||||||
|
# Parameter initialisiation
|
||||||
|
self.parameters = parameters
|
||||||
|
if hours is None:
|
||||||
|
self.hours = self.total_hours
|
||||||
|
else:
|
||||||
|
self.hours = hours
|
||||||
|
|
||||||
|
self.initialised = False
|
||||||
|
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
||||||
|
if self.parameters is not None:
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self) -> None:
|
||||||
|
if self.initialised:
|
||||||
|
return
|
||||||
|
if self.provider_id is not None:
|
||||||
|
# Setup by configuration
|
||||||
|
self.hours = self.total_hours
|
||||||
|
self.consumption_wh = getattr(self.config, f"{self.prefix}_consumption")
|
||||||
|
self.duration_h = getattr(self.config, f"{self.prefix}_duration")
|
||||||
|
elif self.parameters is not None:
|
||||||
|
# Setup by parameters
|
||||||
|
self.consumption_wh = (
|
||||||
|
self.parameters.consumption_wh
|
||||||
|
) # Total energy consumption of the device in kWh
|
||||||
|
self.duration_h = self.parameters.duration_h # Duration of use in hours
|
||||||
|
else:
|
||||||
|
error_msg = "Parameters and provider ID missing. Can't instantiate."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
|
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
|
||||||
|
self.initialised = True
|
||||||
|
|
||||||
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
|
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
|
||||||
"""Sets the start time of the device and generates the corresponding load curve.
|
"""Sets the start time of the device and generates the corresponding load curve.
|
||||||
|
|
||||||
:param start_hour: The hour at which the device should start.
|
:param start_hour: The hour at which the device should start.
|
||||||
"""
|
"""
|
||||||
self.reset()
|
self.reset_load_curve()
|
||||||
# Check if the duration of use is within the available time frame
|
# Check if the duration of use is within the available time frame
|
||||||
if start_hour + self.duration_h > self.hours:
|
if start_hour + self.duration_h > self.hours:
|
||||||
raise ValueError("The duration of use exceeds the available time frame.")
|
raise ValueError("The duration of use exceeds the available time frame.")
|
||||||
@ -40,7 +83,7 @@ class HomeAppliance:
|
|||||||
# Set the power for the duration of use in the load curve array
|
# Set the power for the duration of use in the load curve array
|
||||||
self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour
|
self.load_curve[start_hour : start_hour + self.duration_h] = power_per_hour
|
||||||
|
|
||||||
def reset(self) -> None:
|
def reset_load_curve(self) -> None:
|
||||||
"""Resets the load curve."""
|
"""Resets the load curve."""
|
||||||
self.load_curve = np.zeros(self.hours)
|
self.load_curve = np.zeros(self.hours)
|
||||||
|
|
||||||
|
@ -1,19 +1,61 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from akkudoktoreos.devices.battery import PVAkku
|
from akkudoktoreos.devices.battery import PVAkku
|
||||||
|
from akkudoktoreos.devices.devicesabc import DeviceBase
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class WechselrichterParameters(BaseModel):
|
class WechselrichterParameters(BaseModel):
|
||||||
max_leistung_wh: float = Field(default=10000, gt=0)
|
max_leistung_wh: float = Field(default=10000, gt=0)
|
||||||
|
|
||||||
|
|
||||||
class Wechselrichter:
|
class Wechselrichter(DeviceBase):
|
||||||
def __init__(self, parameters: WechselrichterParameters, akku: PVAkku):
|
def __init__(
|
||||||
self.max_leistung_wh = (
|
self,
|
||||||
parameters.max_leistung_wh # Maximum power that the inverter can handle
|
parameters: Optional[WechselrichterParameters] = None,
|
||||||
)
|
akku: Optional[PVAkku] = None,
|
||||||
|
provider_id: Optional[str] = None,
|
||||||
|
):
|
||||||
|
# Configuration initialisation
|
||||||
|
self.provider_id = provider_id
|
||||||
|
self.prefix = "<invalid>"
|
||||||
|
if self.provider_id == "GenericInverter":
|
||||||
|
self.prefix = "inverter"
|
||||||
|
# Parameter initialisiation
|
||||||
|
self.parameters = parameters
|
||||||
|
if akku is None:
|
||||||
|
# For the moment raise exception
|
||||||
|
# TODO: Make akku configurable by config
|
||||||
|
error_msg = "Battery for PV inverter is mandatory."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise NotImplementedError(error_msg)
|
||||||
self.akku = akku # Connection to a battery object
|
self.akku = akku # Connection to a battery object
|
||||||
|
|
||||||
|
self.initialised = False
|
||||||
|
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
||||||
|
if self.parameters is not None:
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self) -> None:
|
||||||
|
if self.initialised:
|
||||||
|
return
|
||||||
|
if self.provider_id is not None:
|
||||||
|
# Setup by configuration
|
||||||
|
self.max_leistung_wh = getattr(self.config, f"{self.prefix}_power_max")
|
||||||
|
elif self.parameters is not None:
|
||||||
|
# Setup by parameters
|
||||||
|
self.max_leistung_wh = (
|
||||||
|
self.parameters.max_leistung_wh # Maximum power that the inverter can handle
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
error_msg = "Parameters and provider ID missing. Can't instantiate."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
def energie_verarbeiten(
|
def energie_verarbeiten(
|
||||||
self, erzeugung: float, verbrauch: float, hour: int
|
self, erzeugung: float, verbrauch: float, hour: int
|
||||||
) -> tuple[float, float, float, float]:
|
) -> tuple[float, float, float, float]:
|
||||||
|
@ -6,7 +6,12 @@ from deap import algorithms, base, creator, tools
|
|||||||
from pydantic import BaseModel, Field, field_validator, model_validator
|
from pydantic import BaseModel, Field, field_validator, model_validator
|
||||||
from typing_extensions import Self
|
from typing_extensions import Self
|
||||||
|
|
||||||
from akkudoktoreos.config import AppConfig
|
from akkudoktoreos.core.coreabc import (
|
||||||
|
ConfigMixin,
|
||||||
|
DevicesMixin,
|
||||||
|
EnergyManagementSystemMixin,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.core.ems import EnergieManagementSystemParameters, SimulationResult
|
||||||
from akkudoktoreos.devices.battery import (
|
from akkudoktoreos.devices.battery import (
|
||||||
EAutoParameters,
|
EAutoParameters,
|
||||||
EAutoResult,
|
EAutoResult,
|
||||||
@ -15,11 +20,6 @@ from akkudoktoreos.devices.battery import (
|
|||||||
)
|
)
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
||||||
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
|
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
|
||||||
from akkudoktoreos.prediction.ems import (
|
|
||||||
EnergieManagementSystem,
|
|
||||||
EnergieManagementSystemParameters,
|
|
||||||
SimulationResult,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
from akkudoktoreos.visualize import visualisiere_ergebnisse
|
from akkudoktoreos.visualize import visualisiere_ergebnisse
|
||||||
|
|
||||||
@ -97,20 +97,16 @@ class OptimizeResponse(BaseModel):
|
|||||||
return field
|
return field
|
||||||
|
|
||||||
|
|
||||||
class optimization_problem:
|
class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixin):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
config: AppConfig,
|
|
||||||
verbose: bool = False,
|
verbose: bool = False,
|
||||||
fixed_seed: Optional[int] = None,
|
fixed_seed: Optional[int] = None,
|
||||||
):
|
):
|
||||||
"""Initialize the optimization problem with the required parameters."""
|
"""Initialize the optimization problem with the required parameters."""
|
||||||
self._config = config
|
|
||||||
self.prediction_hours = config.eos.prediction_hours
|
|
||||||
self.strafe = config.eos.penalty
|
|
||||||
self.opti_param: dict[str, Any] = {}
|
self.opti_param: dict[str, Any] = {}
|
||||||
self.fixed_eauto_hours = config.eos.prediction_hours - config.eos.optimization_hours
|
self.fixed_eauto_hours = self.config.prediction_hours - self.config.optimization_hours
|
||||||
self.possible_charge_values = config.eos.available_charging_rates_in_percentage
|
self.possible_charge_values = self.config.optimization_ev_available_charge_rates_percent
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.fix_seed = fixed_seed
|
self.fix_seed = fixed_seed
|
||||||
self.optimize_ev = True
|
self.optimize_ev = True
|
||||||
@ -121,7 +117,7 @@ class optimization_problem:
|
|||||||
random.seed(fixed_seed)
|
random.seed(fixed_seed)
|
||||||
|
|
||||||
def decode_charge_discharge(
|
def decode_charge_discharge(
|
||||||
self, discharge_hours_bin: list[int]
|
self, discharge_hours_bin: list[float]
|
||||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||||
"""Decode the input array `discharge_hours_bin` into three separate arrays for AC charging, DC charging, and discharge.
|
"""Decode the input array `discharge_hours_bin` into three separate arrays for AC charging, DC charging, and discharge.
|
||||||
|
|
||||||
@ -182,7 +178,7 @@ class optimization_problem:
|
|||||||
"""
|
"""
|
||||||
# Step 1: Mutate the charge/discharge states (idle, discharge, AC charge, DC charge)
|
# Step 1: Mutate the charge/discharge states (idle, discharge, AC charge, DC charge)
|
||||||
# Extract the relevant part of the individual for prediction hours, which represents the charge/discharge behavior.
|
# Extract the relevant part of the individual for prediction hours, which represents the charge/discharge behavior.
|
||||||
charge_discharge_part = individual[: self.prediction_hours]
|
charge_discharge_part = individual[: self.config.prediction_hours]
|
||||||
|
|
||||||
# Apply the mutation to the charge/discharge part
|
# Apply the mutation to the charge/discharge part
|
||||||
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
||||||
@ -200,23 +196,27 @@ class optimization_problem:
|
|||||||
# applying additional constraints or penalties, or keeping track of charging limits.
|
# applying additional constraints or penalties, or keeping track of charging limits.
|
||||||
|
|
||||||
# Reassign the mutated values back to the individual
|
# Reassign the mutated values back to the individual
|
||||||
individual[: self.prediction_hours] = charge_discharge_mutated
|
individual[: self.config.prediction_hours] = charge_discharge_mutated
|
||||||
|
|
||||||
# Step 2: Mutate EV charging schedule if enabled
|
# Step 2: Mutate EV charging schedule if enabled
|
||||||
if self.optimize_ev:
|
if self.optimize_ev:
|
||||||
# Extract the relevant part for EV charging schedule
|
# Extract the relevant part for EV charging schedule
|
||||||
ev_charge_part = individual[self.prediction_hours : self.prediction_hours * 2]
|
ev_charge_part = individual[
|
||||||
|
self.config.prediction_hours : self.config.prediction_hours * 2
|
||||||
|
]
|
||||||
|
|
||||||
# Apply mutation on the EV charging schedule
|
# Apply mutation on the EV charging schedule
|
||||||
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
||||||
|
|
||||||
# Ensure the EV does not charge during fixed hours (set those hours to 0)
|
# Ensure the EV does not charge during fixed hours (set those hours to 0)
|
||||||
ev_charge_part_mutated[self.prediction_hours - self.fixed_eauto_hours :] = [
|
ev_charge_part_mutated[self.config.prediction_hours - self.fixed_eauto_hours :] = [
|
||||||
0
|
0
|
||||||
] * self.fixed_eauto_hours
|
] * self.fixed_eauto_hours
|
||||||
|
|
||||||
# Reassign the mutated EV charging part back to the individual
|
# Reassign the mutated EV charging part back to the individual
|
||||||
individual[self.prediction_hours : self.prediction_hours * 2] = ev_charge_part_mutated
|
individual[self.config.prediction_hours : self.config.prediction_hours * 2] = (
|
||||||
|
ev_charge_part_mutated
|
||||||
|
)
|
||||||
|
|
||||||
# Step 3: Mutate appliance start times if household appliances are part of the optimization
|
# Step 3: Mutate appliance start times if household appliances are part of the optimization
|
||||||
if self.opti_param["home_appliance"] > 0:
|
if self.opti_param["home_appliance"] > 0:
|
||||||
@ -235,13 +235,13 @@ class optimization_problem:
|
|||||||
def create_individual(self) -> list[int]:
|
def create_individual(self) -> list[int]:
|
||||||
# Start with discharge states for the individual
|
# Start with discharge states for the individual
|
||||||
individual_components = [
|
individual_components = [
|
||||||
self.toolbox.attr_discharge_state() for _ in range(self.prediction_hours)
|
self.toolbox.attr_discharge_state() for _ in range(self.config.prediction_hours)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add EV charge index values if optimize_ev is True
|
# Add EV charge index values if optimize_ev is True
|
||||||
if self.optimize_ev:
|
if self.optimize_ev:
|
||||||
individual_components += [
|
individual_components += [
|
||||||
self.toolbox.attr_ev_charge_index() for _ in range(self.prediction_hours)
|
self.toolbox.attr_ev_charge_index() for _ in range(self.config.prediction_hours)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add the start time of the household appliance if it's being optimized
|
# Add the start time of the household appliance if it's being optimized
|
||||||
@ -251,8 +251,8 @@ class optimization_problem:
|
|||||||
return creator.Individual(individual_components)
|
return creator.Individual(individual_components)
|
||||||
|
|
||||||
def split_individual(
|
def split_individual(
|
||||||
self, individual: list[int]
|
self, individual: list[float]
|
||||||
) -> tuple[list[int], Optional[list[int]], Optional[int]]:
|
) -> tuple[list[float], Optional[list[float]], Optional[int]]:
|
||||||
"""Split the individual solution into its components.
|
"""Split the individual solution into its components.
|
||||||
|
|
||||||
Components:
|
Components:
|
||||||
@ -260,9 +260,9 @@ class optimization_problem:
|
|||||||
2. Electric vehicle charge hours (float),
|
2. Electric vehicle charge hours (float),
|
||||||
3. Dishwasher start time (integer if applicable).
|
3. Dishwasher start time (integer if applicable).
|
||||||
"""
|
"""
|
||||||
discharge_hours_bin = individual[: self.prediction_hours]
|
discharge_hours_bin = individual[: self.config.prediction_hours]
|
||||||
eautocharge_hours_index = (
|
eautocharge_hours_index = (
|
||||||
individual[self.prediction_hours : self.prediction_hours * 2]
|
individual[self.config.prediction_hours : self.config.prediction_hours * 2]
|
||||||
if self.optimize_ev
|
if self.optimize_ev
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
@ -299,7 +299,7 @@ class optimization_problem:
|
|||||||
"attr_ev_charge_index",
|
"attr_ev_charge_index",
|
||||||
random.randint,
|
random.randint,
|
||||||
0,
|
0,
|
||||||
len(self._config.eos.available_charging_rates_in_percentage) - 1,
|
len(self.config.optimization_ev_available_charge_rates_percent) - 1,
|
||||||
)
|
)
|
||||||
self.toolbox.register("attr_int", random.randint, start_hour, 23)
|
self.toolbox.register("attr_int", random.randint, start_hour, 23)
|
||||||
|
|
||||||
@ -325,7 +325,7 @@ class optimization_problem:
|
|||||||
"mutate_ev_charge_index",
|
"mutate_ev_charge_index",
|
||||||
tools.mutUniformInt,
|
tools.mutUniformInt,
|
||||||
low=0,
|
low=0,
|
||||||
up=len(self._config.eos.available_charging_rates_in_percentage) - 1,
|
up=len(self.config.optimization_ev_available_charge_rates_percent) - 1,
|
||||||
indpb=0.2,
|
indpb=0.2,
|
||||||
)
|
)
|
||||||
# - Start hour mutation for household devices
|
# - Start hour mutation for household devices
|
||||||
@ -336,49 +336,51 @@ class optimization_problem:
|
|||||||
|
|
||||||
self.toolbox.register("select", tools.selTournament, tournsize=3)
|
self.toolbox.register("select", tools.selTournament, tournsize=3)
|
||||||
|
|
||||||
def evaluate_inner(
|
def evaluate_inner(self, individual: list[float]) -> dict[str, Any]:
|
||||||
self, individual: list[int], ems: EnergieManagementSystem, start_hour: int
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Simulates the energy management system (EMS) using the provided individual solution.
|
"""Simulates the energy management system (EMS) using the provided individual solution.
|
||||||
|
|
||||||
This is an internal function.
|
This is an internal function.
|
||||||
"""
|
"""
|
||||||
ems.reset()
|
self.ems.reset()
|
||||||
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
|
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
|
||||||
individual
|
individual
|
||||||
)
|
)
|
||||||
if washingstart_int is not None:
|
if self.opti_param.get("home_appliance", 0) > 0:
|
||||||
ems.set_home_appliance_start(washingstart_int, global_start_hour=start_hour)
|
self.ems.set_home_appliance_start(
|
||||||
|
washingstart_int, global_start_hour=self.ems.start_datetime.hour
|
||||||
|
)
|
||||||
|
|
||||||
ac, dc, discharge = self.decode_charge_discharge(discharge_hours_bin)
|
ac, dc, discharge = self.decode_charge_discharge(discharge_hours_bin)
|
||||||
|
|
||||||
ems.set_akku_discharge_hours(discharge)
|
self.ems.set_akku_discharge_hours(discharge)
|
||||||
# Set DC charge hours only if DC optimization is enabled
|
# Set DC charge hours only if DC optimization is enabled
|
||||||
if self.optimize_dc_charge:
|
if self.optimize_dc_charge:
|
||||||
ems.set_akku_dc_charge_hours(dc)
|
self.ems.set_akku_dc_charge_hours(dc)
|
||||||
ems.set_akku_ac_charge_hours(ac)
|
self.ems.set_akku_ac_charge_hours(ac)
|
||||||
|
|
||||||
if eautocharge_hours_index is not None:
|
if eautocharge_hours_index is not None:
|
||||||
eautocharge_hours_float = [
|
eautocharge_hours_float = np.array(
|
||||||
self._config.eos.available_charging_rates_in_percentage[i]
|
[
|
||||||
for i in eautocharge_hours_index
|
self.config.optimization_ev_available_charge_rates_percent[i]
|
||||||
]
|
for i in eautocharge_hours_index
|
||||||
ems.set_ev_charge_hours(np.array(eautocharge_hours_float))
|
],
|
||||||
|
float,
|
||||||
|
)
|
||||||
|
self.ems.set_ev_charge_hours(eautocharge_hours_float)
|
||||||
else:
|
else:
|
||||||
ems.set_ev_charge_hours(np.full(self.prediction_hours, 0))
|
self.ems.set_ev_charge_hours(np.full(self.config.prediction_hours, 0.0))
|
||||||
return ems.simuliere(start_hour)
|
return self.ems.simuliere(self.ems.start_datetime.hour)
|
||||||
|
|
||||||
def evaluate(
|
def evaluate(
|
||||||
self,
|
self,
|
||||||
individual: list[int],
|
individual: list[float],
|
||||||
ems: EnergieManagementSystem,
|
|
||||||
parameters: OptimizationParameters,
|
parameters: OptimizationParameters,
|
||||||
start_hour: int,
|
start_hour: int,
|
||||||
worst_case: bool,
|
worst_case: bool,
|
||||||
) -> Tuple[float]:
|
) -> Tuple[float]:
|
||||||
"""Evaluate the fitness of an individual solution based on the simulation results."""
|
"""Evaluate the fitness of an individual solution based on the simulation results."""
|
||||||
try:
|
try:
|
||||||
o = self.evaluate_inner(individual, ems, start_hour)
|
o = self.evaluate_inner(individual)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return (100000.0,) # Return a high penalty in case of an exception
|
return (100000.0,) # Return a high penalty in case of an exception
|
||||||
|
|
||||||
@ -388,26 +390,28 @@ class optimization_problem:
|
|||||||
|
|
||||||
# Small Penalty for not discharging
|
# Small Penalty for not discharging
|
||||||
gesamtbilanz += sum(
|
gesamtbilanz += sum(
|
||||||
0.01 for i in range(self.prediction_hours) if discharge_hours_bin[i] == 0.0
|
0.01 for i in range(self.config.prediction_hours) if discharge_hours_bin[i] == 0.0
|
||||||
)
|
)
|
||||||
|
|
||||||
# Penalty for not meeting the minimum SOC (State of Charge) requirement
|
# Penalty for not meeting the minimum SOC (State of Charge) requirement
|
||||||
# if parameters.eauto_min_soc_prozent - ems.eauto.ladezustand_in_prozent() <= 0.0 and self.optimize_ev:
|
# if parameters.eauto_min_soc_prozent - ems.eauto.ladezustand_in_prozent() <= 0.0 and self.optimize_ev:
|
||||||
# gesamtbilanz += sum(
|
# gesamtbilanz += sum(
|
||||||
# self.strafe for ladeleistung in eautocharge_hours_index if ladeleistung != 0.0
|
# self.config.optimization_penalty for ladeleistung in eautocharge_hours_float if ladeleistung != 0.0
|
||||||
# )
|
# )
|
||||||
|
|
||||||
individual.extra_data = ( # type: ignore[attr-defined]
|
individual.extra_data = ( # type: ignore[attr-defined]
|
||||||
o["Gesamtbilanz_Euro"],
|
o["Gesamtbilanz_Euro"],
|
||||||
o["Gesamt_Verluste"],
|
o["Gesamt_Verluste"],
|
||||||
parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent()
|
parameters.eauto.min_soc_prozent - self.ems.eauto.ladezustand_in_prozent()
|
||||||
if parameters.eauto and ems.eauto
|
if parameters.eauto and self.ems.eauto
|
||||||
else 0,
|
else 0,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Adjust total balance with battery value and penalties for unmet SOC
|
# Adjust total balance with battery value and penalties for unmet SOC
|
||||||
|
|
||||||
restwert_akku = ems.akku.aktueller_energieinhalt() * parameters.ems.preis_euro_pro_wh_akku
|
restwert_akku = (
|
||||||
|
self.ems.akku.aktueller_energieinhalt() * parameters.ems.preis_euro_pro_wh_akku
|
||||||
|
)
|
||||||
# print(ems.akku.aktueller_energieinhalt()," * ", parameters.ems.preis_euro_pro_wh_akku , " ", restwert_akku, " ", gesamtbilanz)
|
# print(ems.akku.aktueller_energieinhalt()," * ", parameters.ems.preis_euro_pro_wh_akku , " ", restwert_akku, " ", gesamtbilanz)
|
||||||
gesamtbilanz += -restwert_akku
|
gesamtbilanz += -restwert_akku
|
||||||
# print(gesamtbilanz)
|
# print(gesamtbilanz)
|
||||||
@ -415,11 +419,11 @@ class optimization_problem:
|
|||||||
gesamtbilanz += max(
|
gesamtbilanz += max(
|
||||||
0,
|
0,
|
||||||
(
|
(
|
||||||
parameters.eauto.min_soc_prozent - ems.eauto.ladezustand_in_prozent()
|
parameters.eauto.min_soc_prozent - self.ems.eauto.ladezustand_in_prozent()
|
||||||
if parameters.eauto and ems.eauto
|
if parameters.eauto and self.ems.eauto
|
||||||
else 0
|
else 0
|
||||||
)
|
)
|
||||||
* self.strafe,
|
* self.config.optimization_penalty,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (gesamtbilanz,)
|
return (gesamtbilanz,)
|
||||||
@ -468,29 +472,32 @@ class optimization_problem:
|
|||||||
def optimierung_ems(
|
def optimierung_ems(
|
||||||
self,
|
self,
|
||||||
parameters: OptimizationParameters,
|
parameters: OptimizationParameters,
|
||||||
start_hour: int,
|
start_hour: Optional[int] = None,
|
||||||
worst_case: bool = False,
|
worst_case: bool = False,
|
||||||
ngen: int = 600,
|
ngen: int = 600,
|
||||||
) -> OptimizeResponse:
|
) -> OptimizeResponse:
|
||||||
"""Perform EMS (Energy Management System) optimization and visualize results."""
|
"""Perform EMS (Energy Management System) optimization and visualize results."""
|
||||||
|
if start_hour is None:
|
||||||
|
start_hour = self.ems.start_datetime.hour
|
||||||
|
|
||||||
einspeiseverguetung_euro_pro_wh = np.full(
|
einspeiseverguetung_euro_pro_wh = np.full(
|
||||||
self.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
self.config.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
||||||
)
|
)
|
||||||
|
|
||||||
# Initialize PV and EV batteries
|
# Initialize PV and EV batteries
|
||||||
akku = PVAkku(
|
akku = PVAkku(
|
||||||
parameters.pv_akku,
|
parameters.pv_akku,
|
||||||
hours=self.prediction_hours,
|
hours=self.config.prediction_hours,
|
||||||
)
|
)
|
||||||
akku.set_charge_per_hour(np.full(self.prediction_hours, 1))
|
akku.set_charge_per_hour(np.full(self.config.prediction_hours, 1))
|
||||||
|
|
||||||
eauto: Optional[PVAkku] = None
|
eauto: Optional[PVAkku] = None
|
||||||
if parameters.eauto:
|
if parameters.eauto:
|
||||||
eauto = PVAkku(
|
eauto = PVAkku(
|
||||||
parameters.eauto,
|
parameters.eauto,
|
||||||
hours=self.prediction_hours,
|
hours=self.config.prediction_hours,
|
||||||
)
|
)
|
||||||
eauto.set_charge_per_hour(np.full(self.prediction_hours, 1))
|
eauto.set_charge_per_hour(np.full(self.config.prediction_hours, 1))
|
||||||
self.optimize_ev = (
|
self.optimize_ev = (
|
||||||
parameters.eauto.min_soc_prozent - parameters.eauto.start_soc_prozent >= 0
|
parameters.eauto.min_soc_prozent - parameters.eauto.start_soc_prozent >= 0
|
||||||
)
|
)
|
||||||
@ -501,7 +508,7 @@ class optimization_problem:
|
|||||||
dishwasher = (
|
dishwasher = (
|
||||||
HomeAppliance(
|
HomeAppliance(
|
||||||
parameters=parameters.dishwasher,
|
parameters=parameters.dishwasher,
|
||||||
hours=self.prediction_hours,
|
hours=self.config.prediction_hours,
|
||||||
)
|
)
|
||||||
if parameters.dishwasher is not None
|
if parameters.dishwasher is not None
|
||||||
else None
|
else None
|
||||||
@ -509,30 +516,30 @@ class optimization_problem:
|
|||||||
|
|
||||||
# Initialize the inverter and energy management system
|
# Initialize the inverter and energy management system
|
||||||
wr = Wechselrichter(parameters.wechselrichter, akku)
|
wr = Wechselrichter(parameters.wechselrichter, akku)
|
||||||
ems = EnergieManagementSystem(
|
self.ems.set_parameters(
|
||||||
self._config.eos,
|
|
||||||
parameters.ems,
|
parameters.ems,
|
||||||
wechselrichter=wr,
|
wechselrichter=wr,
|
||||||
eauto=eauto,
|
eauto=eauto,
|
||||||
home_appliance=dishwasher,
|
home_appliance=dishwasher,
|
||||||
)
|
)
|
||||||
|
self.ems.set_start_hour(start_hour)
|
||||||
|
|
||||||
# Setup the DEAP environment and optimization process
|
# Setup the DEAP environment and optimization process
|
||||||
self.setup_deap_environment({"home_appliance": 1 if dishwasher else 0}, start_hour)
|
self.setup_deap_environment({"home_appliance": 1 if dishwasher else 0}, start_hour)
|
||||||
self.toolbox.register(
|
self.toolbox.register(
|
||||||
"evaluate",
|
"evaluate",
|
||||||
lambda ind: self.evaluate(ind, ems, parameters, start_hour, worst_case),
|
lambda ind: self.evaluate(ind, parameters, start_hour, worst_case),
|
||||||
)
|
)
|
||||||
start_solution, extra_data = self.optimize(parameters.start_solution, ngen=ngen)
|
start_solution, extra_data = self.optimize(parameters.start_solution, ngen=ngen)
|
||||||
|
|
||||||
# Perform final evaluation on the best solution
|
# Perform final evaluation on the best solution
|
||||||
o = self.evaluate_inner(start_solution, ems, start_hour)
|
o = self.evaluate_inner(start_solution)
|
||||||
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
|
discharge_hours_bin, eautocharge_hours_index, washingstart_int = self.split_individual(
|
||||||
start_solution
|
start_solution
|
||||||
)
|
)
|
||||||
eautocharge_hours_float = (
|
eautocharge_hours_float = (
|
||||||
[
|
[
|
||||||
self._config.eos.available_charging_rates_in_percentage[i]
|
self.config.optimization_ev_available_charge_rates_percent[i]
|
||||||
for i in eautocharge_hours_index
|
for i in eautocharge_hours_index
|
||||||
]
|
]
|
||||||
if eautocharge_hours_index is not None
|
if eautocharge_hours_index is not None
|
||||||
@ -552,7 +559,6 @@ class optimization_problem:
|
|||||||
parameters.temperature_forecast,
|
parameters.temperature_forecast,
|
||||||
start_hour,
|
start_hour,
|
||||||
einspeiseverguetung_euro_pro_wh,
|
einspeiseverguetung_euro_pro_wh,
|
||||||
config=self._config,
|
|
||||||
extra_data=extra_data,
|
extra_data=extra_data,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -563,7 +569,7 @@ class optimization_problem:
|
|||||||
"discharge_allowed": discharge,
|
"discharge_allowed": discharge,
|
||||||
"eautocharge_hours_float": eautocharge_hours_float,
|
"eautocharge_hours_float": eautocharge_hours_float,
|
||||||
"result": SimulationResult(**o),
|
"result": SimulationResult(**o),
|
||||||
"eauto_obj": ems.eauto,
|
"eauto_obj": self.ems.eauto,
|
||||||
"start_solution": start_solution,
|
"start_solution": start_solution,
|
||||||
"washingstart": washingstart_int,
|
"washingstart": washingstart_int,
|
||||||
}
|
}
|
||||||
|
42
src/akkudoktoreos/optimization/optimization.py
Normal file
42
src/akkudoktoreos/optimization/optimization.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class OptimizationCommonSettings(SettingsBaseModel):
|
||||||
|
"""Base configuration for optimization settings.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
optimization_hours (int): Number of hours for optimizations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
optimization_hours: Optional[int] = Field(
|
||||||
|
default=24, ge=0, description="Number of hours into the future for optimizations."
|
||||||
|
)
|
||||||
|
|
||||||
|
optimization_penalty: Optional[int] = Field(
|
||||||
|
default=10, description="Penalty factor used in optimization."
|
||||||
|
)
|
||||||
|
|
||||||
|
optimization_ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
||||||
|
default=[
|
||||||
|
0.0,
|
||||||
|
6.0 / 16.0,
|
||||||
|
# 7.0 / 16.0,
|
||||||
|
8.0 / 16.0,
|
||||||
|
# 9.0 / 16.0,
|
||||||
|
10.0 / 16.0,
|
||||||
|
# 11.0 / 16.0,
|
||||||
|
12.0 / 16.0,
|
||||||
|
# 13.0 / 16.0,
|
||||||
|
14.0 / 16.0,
|
||||||
|
# 15.0 / 16.0,
|
||||||
|
1.0,
|
||||||
|
],
|
||||||
|
description="Charge rates available for the EV in percent of maximum charge.",
|
||||||
|
)
|
25
src/akkudoktoreos/optimization/optimizationabc.py
Normal file
25
src/akkudoktoreos/optimization/optimizationabc.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
"""Abstract and base classes for optimization."""
|
||||||
|
|
||||||
|
from pydantic import ConfigDict
|
||||||
|
|
||||||
|
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class OptimizationBase(ConfigMixin, PredictionMixin, PydanticBaseModel):
|
||||||
|
"""Base class for handling optimization data.
|
||||||
|
|
||||||
|
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
||||||
|
`prediction`).
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Validation on assignment of the Pydantic model is disabled to speed up optimization runs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Disable validation on assignment to speed up optimization runs.
|
||||||
|
model_config = ConfigDict(
|
||||||
|
validate_assignment=False,
|
||||||
|
)
|
11
src/akkudoktoreos/prediction/elecprice.py
Normal file
11
src/akkudoktoreos/prediction/elecprice.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class ElecPriceCommonSettings(SettingsBaseModel):
|
||||||
|
elecprice_provider: Optional[str] = Field(
|
||||||
|
"ElecPriceAkkudoktor", description="Electicity price provider id of provider to be used."
|
||||||
|
)
|
62
src/akkudoktoreos/prediction/elecpriceabc.py
Normal file
62
src/akkudoktoreos/prediction/elecpriceabc.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
"""Abstract and base classes for electricity price predictions.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Ensure appropriate API keys or configurations are set up if required by external data sources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ElecPriceDataRecord(PredictionRecord):
|
||||||
|
"""Represents a electricity price data record containing various price attributes at a specific datetime.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
date_time (Optional[AwareDatetime]): The datetime of the record.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
elecprice_marketprice: Optional[float] = Field(
|
||||||
|
None, description="Electricity market price (€/KWh)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ElecPriceProvider(PredictionProvider):
|
||||||
|
"""Abstract base class for electricity price providers.
|
||||||
|
|
||||||
|
WeatherProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
|
Configuration variables:
|
||||||
|
electricity price_provider (str): Prediction provider for electricity price.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
|
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
|
calculated based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
|
based on `start_datetime` and `prediction_historic_hours`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[ElecPriceDataRecord] = Field(
|
||||||
|
default_factory=list, description="List of ElecPriceDataRecord records"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
return "ElecPriceProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
return self.provider_id() == self.config.elecprice_provider
|
164
src/akkudoktoreos/prediction/elecpriceakkudoktor.py
Normal file
164
src/akkudoktoreos/prediction/elecpriceakkudoktor.py
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
"""Retrieves and processes electricity price forecast data from Akkudoktor.
|
||||||
|
|
||||||
|
This module provides classes and mappings to manage electricity price data obtained from the
|
||||||
|
Akkudoktor API, including support for various electricity price attributes such as temperature,
|
||||||
|
humidity, cloud cover, and solar irradiance. The data is mapped to the `ElecPriceDataRecord`
|
||||||
|
format, enabling consistent access to forecasted and historical electricity price attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, List, Optional, Union
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from pydantic import ValidationError
|
||||||
|
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
from akkudoktoreos.prediction.elecpriceabc import ElecPriceDataRecord, ElecPriceProvider
|
||||||
|
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AkkudoktorElecPriceMeta(PydanticBaseModel):
|
||||||
|
start_timestamp: int
|
||||||
|
end_timestamp: int
|
||||||
|
start: str
|
||||||
|
end: str
|
||||||
|
|
||||||
|
|
||||||
|
class AkkudoktorElecPriceValue(PydanticBaseModel):
|
||||||
|
start_timestamp: int
|
||||||
|
end_timestamp: int
|
||||||
|
start: str
|
||||||
|
end: str
|
||||||
|
marketprice: float
|
||||||
|
unit: str
|
||||||
|
marketpriceEurocentPerKWh: float
|
||||||
|
|
||||||
|
|
||||||
|
class AkkudoktorElecPrice(PydanticBaseModel):
|
||||||
|
meta: AkkudoktorElecPriceMeta
|
||||||
|
values: List[AkkudoktorElecPriceValue]
|
||||||
|
|
||||||
|
|
||||||
|
class ElecPriceAkkudoktor(ElecPriceProvider):
|
||||||
|
"""Fetch and process electricity price forecast data from Akkudoktor.
|
||||||
|
|
||||||
|
ElecPriceAkkudoktor is a singleton-based class that retrieves electricity price forecast data
|
||||||
|
from the Akkudoktor API and maps it to `ElecPriceDataRecord` fields, applying
|
||||||
|
any necessary scaling or unit corrections. It manages the forecast over a range
|
||||||
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
||||||
|
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
|
_request_forecast(): Fetches the forecast from the Akkudoktor API.
|
||||||
|
_update_data(): Processes and updates forecast data from Akkudoktor in ElecPriceDataRecord format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the Akkudoktor provider."""
|
||||||
|
return "Akkudoktor"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _validate_data(cls, json_str: Union[bytes, Any]) -> AkkudoktorElecPrice:
|
||||||
|
"""Validate Akkudoktor Electricity Price forecast data."""
|
||||||
|
try:
|
||||||
|
akkudoktor_data = AkkudoktorElecPrice.model_validate_json(json_str)
|
||||||
|
except ValidationError as e:
|
||||||
|
error_msg = ""
|
||||||
|
for error in e.errors():
|
||||||
|
field = " -> ".join(str(x) for x in error["loc"])
|
||||||
|
message = error["msg"]
|
||||||
|
error_type = error["type"]
|
||||||
|
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
||||||
|
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
return akkudoktor_data
|
||||||
|
|
||||||
|
@cache_in_file(with_ttl="1 hour")
|
||||||
|
def _request_forecast(self) -> AkkudoktorElecPrice:
|
||||||
|
"""Fetch electricity price forecast data from Akkudoktor API.
|
||||||
|
|
||||||
|
This method sends a request to Akkudoktor's API to retrieve forecast data for a specified
|
||||||
|
date range. The response data is parsed and returned as JSON for further processing.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The parsed JSON response from Akkudoktor API containing forecast data.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the API response does not include expected `electricity price` data.
|
||||||
|
"""
|
||||||
|
source = "https://api.akkudoktor.net"
|
||||||
|
date = to_datetime(self.start_datetime, as_string="%Y-%m-%d")
|
||||||
|
last_date = to_datetime(self.end_datetime, as_string="%Y-%m-%d")
|
||||||
|
response = requests.get(
|
||||||
|
f"{source}/prices?date={date}&last_date={last_date}&tz={self.config.timezone}"
|
||||||
|
)
|
||||||
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
|
logger.debug(f"Response from {source}: {response}")
|
||||||
|
akkudoktor_data = self._validate_data(response.content)
|
||||||
|
# We are working on fresh data (no cache), report update time
|
||||||
|
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
||||||
|
return akkudoktor_data
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
"""Update forecast data in the ElecPriceDataRecord format.
|
||||||
|
|
||||||
|
Retrieves data from Akkudoktor, maps each Akkudoktor field to the corresponding
|
||||||
|
`ElecPriceDataRecord` and applies any necessary scaling.
|
||||||
|
|
||||||
|
The final mapped and processed data is inserted into the sequence as `ElecPriceDataRecord`.
|
||||||
|
"""
|
||||||
|
# Get Akkudoktor electricity price data
|
||||||
|
akkudoktor_data = self._request_forecast(force_update=force_update) # type: ignore
|
||||||
|
|
||||||
|
# Assumption that all lists are the same length and are ordered chronologically
|
||||||
|
# in ascending order and have the same timestamps.
|
||||||
|
values_len = len(akkudoktor_data.values)
|
||||||
|
if values_len < 1:
|
||||||
|
# Expect one value set per prediction hour
|
||||||
|
raise ValueError(
|
||||||
|
f"The forecast must have at least one dataset, "
|
||||||
|
f"but only {values_len} data sets are given in forecast data."
|
||||||
|
)
|
||||||
|
|
||||||
|
previous_price = akkudoktor_data.values[0].marketpriceEurocentPerKWh
|
||||||
|
for i in range(values_len):
|
||||||
|
original_datetime = akkudoktor_data.values[i].start
|
||||||
|
dt = to_datetime(original_datetime, in_timezone=self.config.timezone)
|
||||||
|
|
||||||
|
if compare_datetimes(dt, self.start_datetime).le:
|
||||||
|
# forecast data is too old
|
||||||
|
previous_price = akkudoktor_data.values[i].marketpriceEurocentPerKWh
|
||||||
|
continue
|
||||||
|
|
||||||
|
record = ElecPriceDataRecord(
|
||||||
|
date_time=dt,
|
||||||
|
elecprice_marketprice=akkudoktor_data.values[i].marketpriceEurocentPerKWh,
|
||||||
|
)
|
||||||
|
self.append(record)
|
||||||
|
# Assure price starts at start_time
|
||||||
|
if compare_datetimes(self[0].date_time, self.start_datetime).gt:
|
||||||
|
record = ElecPriceDataRecord(
|
||||||
|
date_time=self.start_datetime,
|
||||||
|
elecprice_marketprice=previous_price,
|
||||||
|
)
|
||||||
|
self.insert(0, record)
|
||||||
|
# Assure price ends at end_time
|
||||||
|
if compare_datetimes(self[-1].date_time, self.end_datetime).lt:
|
||||||
|
record = ElecPriceDataRecord(
|
||||||
|
date_time=self.end_datetime,
|
||||||
|
elecprice_marketprice=self[-1].elecprice_marketprice,
|
||||||
|
)
|
||||||
|
self.append(record)
|
||||||
|
# If some of the hourly values are missing, they will be interpolated when using
|
||||||
|
# `key_to_array`.
|
68
src/akkudoktoreos/prediction/elecpriceimport.py
Normal file
68
src/akkudoktoreos/prediction/elecpriceimport.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
"""Retrieves elecprice forecast data from an import file.
|
||||||
|
|
||||||
|
This module provides classes and mappings to manage elecprice data obtained from
|
||||||
|
an import file, including support for various elecprice attributes such as temperature,
|
||||||
|
humidity, cloud cover, and solar irradiance. The data is mapped to the `ElecPriceDataRecord`
|
||||||
|
format, enabling consistent access to forecasted and historical elecprice attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
||||||
|
"""Common settings for elecprice data import from file."""
|
||||||
|
|
||||||
|
elecpriceimport_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import elecprice data from."
|
||||||
|
)
|
||||||
|
|
||||||
|
elecpriceimport_json: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="JSON string, dictionary of electricity price forecast value lists.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validators
|
||||||
|
@field_validator("elecpriceimport_file_path", mode="after")
|
||||||
|
@classmethod
|
||||||
|
def validate_elecpriceimport_file_path(
|
||||||
|
cls, value: Optional[Union[str, Path]]
|
||||||
|
) -> Optional[Path]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = Path(value)
|
||||||
|
"""Ensure file is available."""
|
||||||
|
value.resolve()
|
||||||
|
if not value.is_file():
|
||||||
|
raise ValueError(f"Import file path '{value}' is not a file.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class ElecPriceImport(ElecPriceProvider, PredictionImportProvider):
|
||||||
|
"""Fetch PV forecast data from import file or JSON string.
|
||||||
|
|
||||||
|
ElecPriceImport is a singleton-based class that retrieves elecprice forecast data
|
||||||
|
from a file or JSON string and maps it to `ElecPriceDataRecord` fields. It manages the forecast
|
||||||
|
over a range of hours into the future and retains historical data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the ElecPriceImport provider."""
|
||||||
|
return "ElecPriceImport"
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
if self.config.elecpriceimport_file_path is not None:
|
||||||
|
self.import_from_file(self.config.elecpriceimport_file_path, key_prefix="elecprice")
|
||||||
|
if self.config.elecpriceimport_json is not None:
|
||||||
|
self.import_from_json(self.config.elecpriceimport_json, key_prefix="elecprice")
|
61
src/akkudoktoreos/prediction/load.py
Normal file
61
src/akkudoktoreos/prediction/load.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
"""Load forecast module for load predictions."""
|
||||||
|
|
||||||
|
from typing import Optional, Set
|
||||||
|
|
||||||
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LoadCommonSettings(SettingsBaseModel):
|
||||||
|
# Load 0
|
||||||
|
load0_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Load provider id of provider to be used."
|
||||||
|
)
|
||||||
|
load0_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||||
|
|
||||||
|
# Load 1
|
||||||
|
load1_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Load provider id of provider to be used."
|
||||||
|
)
|
||||||
|
load1_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||||
|
|
||||||
|
# Load 2
|
||||||
|
load2_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Load provider id of provider to be used."
|
||||||
|
)
|
||||||
|
load2_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||||
|
|
||||||
|
# Load 3
|
||||||
|
load3_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Load provider id of provider to be used."
|
||||||
|
)
|
||||||
|
load3_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||||
|
|
||||||
|
# Load 4
|
||||||
|
load4_provider: Optional[str] = Field(
|
||||||
|
default=None, description="Load provider id of provider to be used."
|
||||||
|
)
|
||||||
|
load4_name: Optional[str] = Field(default=None, description="Name of the load source.")
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def load_count(self) -> int:
|
||||||
|
"""Maximum number of loads."""
|
||||||
|
return 5
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def load_providers(self) -> Set[str]:
|
||||||
|
"""Load providers."""
|
||||||
|
providers = []
|
||||||
|
for i in range(self.load_count):
|
||||||
|
load_provider_attr = f"load{i}_provider"
|
||||||
|
value = getattr(self, load_provider_attr)
|
||||||
|
if value:
|
||||||
|
providers.append(value)
|
||||||
|
return set(providers)
|
102
src/akkudoktoreos/prediction/loadabc.py
Normal file
102
src/akkudoktoreos/prediction/loadabc.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
"""Abstract and base classes for load predictions.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Ensure appropriate API keys or configurations are set up if required by external data sources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LoadDataRecord(PredictionRecord):
|
||||||
|
"""Represents a load data record containing various load attributes at a specific datetime."""
|
||||||
|
|
||||||
|
load0_mean: Optional[float] = Field(default=None, description="Load 0 mean value (W)")
|
||||||
|
load0_std: Optional[float] = Field(default=None, description="Load 0 standard deviation (W)")
|
||||||
|
load1_mean: Optional[float] = Field(default=None, description="Load 1 mean value (W)")
|
||||||
|
load1_std: Optional[float] = Field(default=None, description="Load 1 standard deviation (W)")
|
||||||
|
load2_mean: Optional[float] = Field(default=None, description="Load 2 mean value (W)")
|
||||||
|
load2_std: Optional[float] = Field(default=None, description="Load 2 standard deviation (W)")
|
||||||
|
load3_mean: Optional[float] = Field(default=None, description="Load 3 mean value (W)")
|
||||||
|
load3_std: Optional[float] = Field(default=None, description="Load 3 standard deviation (W)")
|
||||||
|
load4_mean: Optional[float] = Field(default=None, description="Load 4 mean value (W)")
|
||||||
|
load4_std: Optional[float] = Field(default=None, description="Load 4 standard deviation (W)")
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def load_total_mean(self) -> float:
|
||||||
|
"""Total load mean value (W)."""
|
||||||
|
total_mean = 0.0
|
||||||
|
for i in range(5):
|
||||||
|
load_mean_attr = f"load{i}_mean"
|
||||||
|
value = getattr(self, load_mean_attr)
|
||||||
|
if value:
|
||||||
|
total_mean += value
|
||||||
|
return total_mean
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def load_total_std(self) -> float:
|
||||||
|
"""Total load standard deviation (W)."""
|
||||||
|
total_std = 0.0
|
||||||
|
for i in range(5):
|
||||||
|
load_std_attr = f"load{i}_std"
|
||||||
|
value = getattr(self, load_std_attr)
|
||||||
|
if value:
|
||||||
|
total_std += value
|
||||||
|
return total_std
|
||||||
|
|
||||||
|
|
||||||
|
class LoadProvider(PredictionProvider):
|
||||||
|
"""Abstract base class for load providers.
|
||||||
|
|
||||||
|
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
|
Configuration variables:
|
||||||
|
load_provider (str): Prediction provider for load.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
|
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
|
calculated based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
|
based on `start_datetime` and `prediction_historic_hours`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[LoadDataRecord] = Field(
|
||||||
|
default_factory=list, description="List of LoadDataRecord records"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
return "LoadProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
logger.debug(
|
||||||
|
f"LoadProvider ID {self.provider_id()} vs. config {self.config.load_providers}"
|
||||||
|
)
|
||||||
|
return self.provider_id() == self.config.load_providers
|
||||||
|
|
||||||
|
def loads(self) -> List[str]:
|
||||||
|
"""Returns a list of key prefixes of the loads managed by this provider."""
|
||||||
|
loads_prefix = []
|
||||||
|
for i in range(self.config.load_count):
|
||||||
|
load_provider_attr = f"load{i}_provider"
|
||||||
|
value = getattr(self.config, load_provider_attr)
|
||||||
|
if value == self.provider_id():
|
||||||
|
loads_prefix.append(f"load{i}")
|
||||||
|
return loads_prefix
|
69
src/akkudoktoreos/prediction/loadakkudoktor.py
Normal file
69
src/akkudoktoreos/prediction/loadakkudoktor.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
"""Retrieves load forecast data from Akkudoktor load profiles."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.loadabc import LoadProvider
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LoadAkkudoktorCommonSettings(SettingsBaseModel):
|
||||||
|
"""Common settings for load data import from file."""
|
||||||
|
|
||||||
|
loadakkudoktor_year_energy: Optional[float] = Field(
|
||||||
|
default=None, description="Yearly energy consumption (kWh)."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LoadAkkudoktor(LoadProvider):
|
||||||
|
"""Fetch Load forecast data from Akkudoktor load profiles."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the LoadAkkudoktor provider."""
|
||||||
|
return "LoadAkkudoktor"
|
||||||
|
|
||||||
|
def load_data(self) -> np.ndarray:
|
||||||
|
"""Loads data from the Akkudoktor load file."""
|
||||||
|
load_file = Path(__file__).parent.parent.joinpath("data/load_profiles.npz")
|
||||||
|
data_year_energy = None
|
||||||
|
try:
|
||||||
|
file_data = np.load(load_file)
|
||||||
|
profile_data = np.array(
|
||||||
|
list(zip(file_data["yearly_profiles"], file_data["yearly_profiles_std"]))
|
||||||
|
)
|
||||||
|
data_year_energy = profile_data * self.config.loadakkudoktor_year_energy
|
||||||
|
# pprint(self.data_year_energy)
|
||||||
|
except FileNotFoundError:
|
||||||
|
error_msg = f"Error: File {load_file} not found."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise FileNotFoundError(error_msg)
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"An error occurred while loading data: {e}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
return data_year_energy
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
"""Adds the load means and standard deviations."""
|
||||||
|
data_year_energy = self.load_data()
|
||||||
|
for load in self.loads():
|
||||||
|
attr_load_mean = f"{load}_mean"
|
||||||
|
attr_load_std = f"{load}_std"
|
||||||
|
date = self.start_datetime
|
||||||
|
for i in range(self.config.prediction_hours):
|
||||||
|
# Extract mean and standard deviation for the given day and hour
|
||||||
|
# Day indexing starts at 0, -1 because of that
|
||||||
|
hourly_stats = data_year_energy[date.day_of_year - 1, :, date.hour]
|
||||||
|
self.update_value(date, attr_load_mean, hourly_stats[0])
|
||||||
|
self.update_value(date, attr_load_std, hourly_stats[1])
|
||||||
|
date += to_duration("1 hour")
|
||||||
|
# We are working on fresh data (no cache), report update time
|
||||||
|
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
100
src/akkudoktoreos/prediction/loadimport.py
Normal file
100
src/akkudoktoreos/prediction/loadimport.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
"""Retrieves load forecast data from an import file.
|
||||||
|
|
||||||
|
This module provides classes and mappings to manage load data obtained from
|
||||||
|
an import file, including support for various load attributes such as temperature,
|
||||||
|
humidity, cloud cover, and solar irradiance. The data is mapped to the `LoadDataRecord`
|
||||||
|
format, enabling consistent access to forecasted and historical load attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.loadabc import LoadProvider
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LoadImportCommonSettings(SettingsBaseModel):
|
||||||
|
"""Common settings for load data import from file."""
|
||||||
|
|
||||||
|
load0_import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import load data from."
|
||||||
|
)
|
||||||
|
load0_import_json: Optional[str] = Field(
|
||||||
|
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||||
|
)
|
||||||
|
load1_import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import load data from."
|
||||||
|
)
|
||||||
|
load1_import_json: Optional[str] = Field(
|
||||||
|
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||||
|
)
|
||||||
|
load2_import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import load data from."
|
||||||
|
)
|
||||||
|
load2_import_json: Optional[str] = Field(
|
||||||
|
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||||
|
)
|
||||||
|
load3_import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import load data from."
|
||||||
|
)
|
||||||
|
load3_import_json: Optional[str] = Field(
|
||||||
|
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||||
|
)
|
||||||
|
load4_import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import load data from."
|
||||||
|
)
|
||||||
|
load4_import_json: Optional[str] = Field(
|
||||||
|
default=None, description="JSON string, dictionary of load forecast value lists."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validators
|
||||||
|
@field_validator(
|
||||||
|
"load0_import_file_path",
|
||||||
|
"load1_import_file_path",
|
||||||
|
"load2_import_file_path",
|
||||||
|
"load3_import_file_path",
|
||||||
|
"load4_import_file_path",
|
||||||
|
mode="after",
|
||||||
|
)
|
||||||
|
@classmethod
|
||||||
|
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = Path(value)
|
||||||
|
"""Ensure file is available."""
|
||||||
|
value.resolve()
|
||||||
|
if not value.is_file():
|
||||||
|
raise ValueError(f"Import file path '{value}' is not a file.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class LoadImport(LoadProvider, PredictionImportProvider):
|
||||||
|
"""Fetch Load data from import file or JSON string.
|
||||||
|
|
||||||
|
LoadImport is a singleton-based class that retrieves load forecast data
|
||||||
|
from a file or JSON string and maps it to `LoadDataRecord` fields. It manages the forecast
|
||||||
|
over a range of hours into the future and retains historical data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the LoadImport provider."""
|
||||||
|
return "LoadImport"
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
for load in self.loads():
|
||||||
|
attr_file_path = f"{load}_import_file_path"
|
||||||
|
attr_json = f"{load}_import_json"
|
||||||
|
import_file_path = getattr(self.config, attr_file_path)
|
||||||
|
if import_file_path is not None:
|
||||||
|
self.import_from_file(import_file_path, key_prefix=load)
|
||||||
|
import_json = getattr(self.config, attr_json)
|
||||||
|
if import_json is not None:
|
||||||
|
self.import_from_json(import_json, key_prefix=load)
|
173
src/akkudoktoreos/prediction/prediction.py
Normal file
173
src/akkudoktoreos/prediction/prediction.py
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
"""Prediction module for weather and photovoltaic forecasts.
|
||||||
|
|
||||||
|
This module provides a `Prediction` class to manage and update a sequence of
|
||||||
|
prediction providers. The `Prediction` class is a subclass of `PredictionContainer`
|
||||||
|
and is initialized with a set of forecast providers, such as `WeatherBrightSky`,
|
||||||
|
`WeatherClearOutside`, and `PVForecastAkkudoktor`.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Instantiate the `Prediction` class with the required providers, maintaining
|
||||||
|
the necessary order. Then call the `update` method to refresh forecasts from
|
||||||
|
all providers in sequence.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# Create singleton prediction instance with prediction providers
|
||||||
|
from akkudoktoreos.prediction.prediction import prediction
|
||||||
|
|
||||||
|
prediction.update_data()
|
||||||
|
print("Prediction:", prediction)
|
||||||
|
|
||||||
|
Classes:
|
||||||
|
Prediction: Manages a list of forecast providers to fetch and update predictions.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
pvforecast_akkudoktor (PVForecastAkkudoktor): Forecast provider for photovoltaic data.
|
||||||
|
weather_brightsky (WeatherBrightSky): Weather forecast provider using BrightSky.
|
||||||
|
weather_clearoutside (WeatherClearOutside): Weather forecast provider using ClearOutside.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.elecpriceakkudoktor import ElecPriceAkkudoktor
|
||||||
|
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImport
|
||||||
|
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktor
|
||||||
|
from akkudoktoreos.prediction.loadimport import LoadImport
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionContainer
|
||||||
|
from akkudoktoreos.prediction.pvforecastakkudoktor import PVForecastAkkudoktor
|
||||||
|
from akkudoktoreos.prediction.pvforecastimport import PVForecastImport
|
||||||
|
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
||||||
|
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
||||||
|
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_timezone
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionCommonSettings(SettingsBaseModel):
|
||||||
|
"""Base configuration for prediction settings, including forecast duration, geographic location, and time zone.
|
||||||
|
|
||||||
|
This class provides configuration for prediction settings, allowing users to specify
|
||||||
|
parameters such as the forecast duration (in hours) and location (latitude and longitude).
|
||||||
|
Validators ensure each parameter is within a specified range. A computed property, `timezone`,
|
||||||
|
determines the time zone based on latitude and longitude.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (Optional[int]): Number of hours into the future for predictions.
|
||||||
|
Must be non-negative.
|
||||||
|
prediction_historic_hours (Optional[int]): Number of hours into the past for historical data.
|
||||||
|
Must be non-negative.
|
||||||
|
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
||||||
|
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
||||||
|
|
||||||
|
Properties:
|
||||||
|
timezone (Optional[str]): Computed time zone string based on the specified latitude
|
||||||
|
and longitude.
|
||||||
|
|
||||||
|
Validators:
|
||||||
|
validate_prediction_hours (int): Ensures `prediction_hours` is a non-negative integer.
|
||||||
|
validate_prediction_historic_hours (int): Ensures `prediction_historic_hours` is a non-negative integer.
|
||||||
|
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
||||||
|
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
||||||
|
"""
|
||||||
|
|
||||||
|
prediction_hours: Optional[int] = Field(
|
||||||
|
default=48, ge=0, description="Number of hours into the future for predictions"
|
||||||
|
)
|
||||||
|
prediction_historic_hours: Optional[int] = Field(
|
||||||
|
default=48,
|
||||||
|
ge=0,
|
||||||
|
description="Number of hours into the past for historical predictions data",
|
||||||
|
)
|
||||||
|
latitude: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
ge=-90.0,
|
||||||
|
le=90.0,
|
||||||
|
description="Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)",
|
||||||
|
)
|
||||||
|
longitude: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
ge=-180.0,
|
||||||
|
le=180.0,
|
||||||
|
description="Longitude in decimal degrees, within -180 to 180 (°)",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def timezone(self) -> Optional[str]:
|
||||||
|
"""Compute timezone based on latitude and longitude."""
|
||||||
|
if self.latitude and self.longitude:
|
||||||
|
return to_timezone(location=(self.latitude, self.longitude), as_string=True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class Prediction(PredictionContainer):
|
||||||
|
"""Prediction container to manage multiple prediction providers.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
providers (List[Union[PVForecastAkkudoktor, WeatherBrightSky, WeatherClearOutside]]):
|
||||||
|
List of forecast provider instances, in the order they should be updated.
|
||||||
|
Providers may depend on updates from others.
|
||||||
|
"""
|
||||||
|
|
||||||
|
providers: List[
|
||||||
|
Union[
|
||||||
|
ElecPriceAkkudoktor,
|
||||||
|
ElecPriceImport,
|
||||||
|
LoadAkkudoktor,
|
||||||
|
LoadImport,
|
||||||
|
PVForecastAkkudoktor,
|
||||||
|
PVForecastImport,
|
||||||
|
WeatherBrightSky,
|
||||||
|
WeatherClearOutside,
|
||||||
|
WeatherImport,
|
||||||
|
]
|
||||||
|
] = Field(default_factory=list, description="List of prediction providers")
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize forecast providers, all are singletons.
|
||||||
|
elecprice_akkudoktor = ElecPriceAkkudoktor()
|
||||||
|
elecprice_import = ElecPriceImport()
|
||||||
|
load_akkudoktor = LoadAkkudoktor()
|
||||||
|
load_import = LoadImport()
|
||||||
|
pvforecast_akkudoktor = PVForecastAkkudoktor()
|
||||||
|
pvforecast_import = PVForecastImport()
|
||||||
|
weather_brightsky = WeatherBrightSky()
|
||||||
|
weather_clearoutside = WeatherClearOutside()
|
||||||
|
weather_import = WeatherImport()
|
||||||
|
|
||||||
|
|
||||||
|
def get_prediction() -> Prediction:
|
||||||
|
"""Gets the EOS prediction data."""
|
||||||
|
# Initialize Prediction instance with providers in the required order
|
||||||
|
# Care for provider sequence as providers may rely on others to be updated before.
|
||||||
|
prediction = Prediction(
|
||||||
|
providers=[
|
||||||
|
elecprice_akkudoktor,
|
||||||
|
elecprice_import,
|
||||||
|
load_akkudoktor,
|
||||||
|
load_import,
|
||||||
|
pvforecast_akkudoktor,
|
||||||
|
pvforecast_import,
|
||||||
|
weather_brightsky,
|
||||||
|
weather_clearoutside,
|
||||||
|
weather_import,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
return prediction
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main function to update and display predictions.
|
||||||
|
|
||||||
|
This function initializes and updates the forecast providers in sequence
|
||||||
|
according to the `Prediction` instance, then prints the updated prediction data.
|
||||||
|
"""
|
||||||
|
prediction = get_prediction()
|
||||||
|
prediction.update_data()
|
||||||
|
print(f"Prediction: {prediction}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
242
src/akkudoktoreos/prediction/predictionabc.py
Normal file
242
src/akkudoktoreos/prediction/predictionabc.py
Normal file
@ -0,0 +1,242 @@
|
|||||||
|
"""Abstract and base classes for predictions.
|
||||||
|
|
||||||
|
This module provides classes for managing and processing prediction data in a flexible, configurable manner.
|
||||||
|
It includes classes to handle configurations, record structures, sequences, and containers for prediction data,
|
||||||
|
enabling efficient storage, retrieval, and manipulation of prediction records.
|
||||||
|
|
||||||
|
This module is designed for use in predictive modeling workflows, facilitating the organization, serialization,
|
||||||
|
and manipulation of configuration and prediction data in a clear, scalable, and structured manner.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pendulum import DateTime
|
||||||
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.core.dataabc import (
|
||||||
|
DataBase,
|
||||||
|
DataContainer,
|
||||||
|
DataImportProvider,
|
||||||
|
DataProvider,
|
||||||
|
DataRecord,
|
||||||
|
DataSequence,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionBase(DataBase):
|
||||||
|
"""Base class for handling prediction data.
|
||||||
|
|
||||||
|
Enables access to EOS configuration data (attribute `config`).
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionRecord(DataRecord):
|
||||||
|
"""Base class for prediction records, enabling dynamic access to fields defined in derived classes.
|
||||||
|
|
||||||
|
Fields can be accessed and mutated both using dictionary-style access (`record['field_name']`)
|
||||||
|
and attribute-style access (`record.field_name`).
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
date_time (Optional[AwareDatetime]): Aware datetime indicating when the prediction record applies.
|
||||||
|
|
||||||
|
Configurations:
|
||||||
|
- Allows mutation after creation.
|
||||||
|
- Supports non-standard data types like `datetime`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionSequence(DataSequence):
|
||||||
|
"""A managed sequence of PredictionRecord instances with list-like behavior.
|
||||||
|
|
||||||
|
The PredictionSequence class provides an ordered, mutable collection of PredictionRecord
|
||||||
|
instances, allowing list-style access for adding, deleting, and retrieving records. It also
|
||||||
|
supports advanced data operations such as JSON serialization, conversion to Pandas Series,
|
||||||
|
and sorting by timestamp.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
records (List[PredictionRecord]): A list of PredictionRecord instances representing
|
||||||
|
individual prediction data points.
|
||||||
|
record_keys (Optional[List[str]]): A list of field names (keys) expected in each
|
||||||
|
PredictionRecord.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Derived classes have to provide their own records field with correct record type set.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
# Example of creating, adding, and using PredictionSequence
|
||||||
|
class DerivedSequence(PredictionSquence):
|
||||||
|
records: List[DerivedPredictionRecord] = Field(default_factory=list,
|
||||||
|
description="List of prediction records")
|
||||||
|
|
||||||
|
seq = DerivedSequence()
|
||||||
|
seq.insert(DerivedPredictionRecord(date_time=datetime.now(), temperature=72))
|
||||||
|
seq.insert(DerivedPredictionRecord(date_time=datetime.now(), temperature=75))
|
||||||
|
|
||||||
|
# Convert to JSON and back
|
||||||
|
json_data = seq.to_json()
|
||||||
|
new_seq = DerivedSequence.from_json(json_data)
|
||||||
|
|
||||||
|
# Convert to Pandas Series
|
||||||
|
series = seq.key_to_series('temperature')
|
||||||
|
"""
|
||||||
|
|
||||||
|
# To be overloaded by derived classes.
|
||||||
|
records: List[PredictionRecord] = Field(
|
||||||
|
default_factory=list, description="List of prediction records"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionStartEndKeepMixin(PredictionBase):
|
||||||
|
"""A mixin to manage start, end, and historical retention datetimes for prediction data.
|
||||||
|
|
||||||
|
The starting datetime for prediction data generation is provided by the energy management
|
||||||
|
system. Predictions cannot be computed if this value is `None`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Computed field for end_datetime and keep_datetime
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def end_datetime(self) -> Optional[DateTime]:
|
||||||
|
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
||||||
|
|
||||||
|
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||||
|
"""
|
||||||
|
if self.start_datetime and self.config.prediction_hours:
|
||||||
|
end_datetime = self.start_datetime + to_duration(
|
||||||
|
f"{self.config.prediction_hours} hours"
|
||||||
|
)
|
||||||
|
dst_change = end_datetime.offset_hours - self.start_datetime.offset_hours
|
||||||
|
logger.debug(f"Pre: {self.start_datetime}..{end_datetime}: DST change: {dst_change}")
|
||||||
|
if dst_change < 0:
|
||||||
|
end_datetime = end_datetime + to_duration(f"{abs(int(dst_change))} hours")
|
||||||
|
elif dst_change > 0:
|
||||||
|
end_datetime = end_datetime - to_duration(f"{abs(int(dst_change))} hours")
|
||||||
|
logger.debug(f"Pst: {self.start_datetime}..{end_datetime}: DST change: {dst_change}")
|
||||||
|
return end_datetime
|
||||||
|
return None
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def keep_datetime(self) -> Optional[DateTime]:
|
||||||
|
"""Compute the keep datetime for historical data retention.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[DateTime]: The calculated retention cutoff datetime, or `None` if inputs are missing.
|
||||||
|
"""
|
||||||
|
if self.start_datetime and self.config.prediction_historic_hours:
|
||||||
|
return self.start_datetime - to_duration(
|
||||||
|
f"{int(self.config.prediction_historic_hours)} hours"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def total_hours(self) -> Optional[int]:
|
||||||
|
"""Compute the hours from `start_datetime` to `end_datetime`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[pendulum.period]: The duration hours, or `None` if either datetime is unavailable.
|
||||||
|
"""
|
||||||
|
end_dt = self.end_datetime
|
||||||
|
if end_dt is None:
|
||||||
|
return None
|
||||||
|
duration = end_dt - self.start_datetime
|
||||||
|
return int(duration.total_hours())
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def keep_hours(self) -> Optional[int]:
|
||||||
|
"""Compute the hours from `keep_datetime` to `start_datetime`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[pendulum.period]: The duration hours, or `None` if either datetime is unavailable.
|
||||||
|
"""
|
||||||
|
keep_dt = self.keep_datetime
|
||||||
|
if keep_dt is None:
|
||||||
|
return None
|
||||||
|
duration = self.start_datetime - keep_dt
|
||||||
|
return int(duration.total_hours())
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionProvider(PredictionStartEndKeepMixin, DataProvider):
|
||||||
|
"""Abstract base class for prediction providers with singleton thread-safety and configurable prediction parameters.
|
||||||
|
|
||||||
|
This class serves as a base for managing prediction data, providing an interface for derived
|
||||||
|
classes to maintain a single instance across threads. It offers attributes for managing
|
||||||
|
prediction and historical data retention.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Derived classes have to provide their own records field with correct record type set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def update_data(
|
||||||
|
self,
|
||||||
|
force_enable: Optional[bool] = False,
|
||||||
|
force_update: Optional[bool] = False,
|
||||||
|
) -> None:
|
||||||
|
"""Update prediction parameters and call the custom update function.
|
||||||
|
|
||||||
|
Updates the configuration, deletes outdated records, and performs the custom update logic.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
force_enable (bool, optional): If True, forces the update even if the provider is disabled.
|
||||||
|
force_update (bool, optional): If True, forces the provider to update the data even if still cached.
|
||||||
|
"""
|
||||||
|
# Update prediction configuration
|
||||||
|
self.config.update()
|
||||||
|
|
||||||
|
# Check after configuration is updated.
|
||||||
|
if not force_enable and not self.enabled():
|
||||||
|
return
|
||||||
|
|
||||||
|
# Delete outdated records before updating
|
||||||
|
self.delete_by_datetime(end_datetime=self.keep_datetime)
|
||||||
|
|
||||||
|
# Call the custom update logic
|
||||||
|
self._update_data(force_update=force_update)
|
||||||
|
|
||||||
|
# Assure records are sorted.
|
||||||
|
self.sort_by_datetime()
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionImportProvider(PredictionProvider, DataImportProvider):
|
||||||
|
"""Abstract base class for prediction providers that import prediction data.
|
||||||
|
|
||||||
|
This class is designed to handle prediction data provided in the form of a key-value dictionary.
|
||||||
|
- **Keys**: Represent identifiers from the record keys of a specific prediction.
|
||||||
|
- **Values**: Are lists of prediction values starting at a specified `start_datetime`, where
|
||||||
|
each value corresponds to a subsequent time interval (e.g., hourly).
|
||||||
|
|
||||||
|
Subclasses must implement the logic for managing prediction data based on the imported records.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PredictionContainer(PredictionStartEndKeepMixin, DataContainer):
|
||||||
|
"""A container for managing multiple PredictionProvider instances.
|
||||||
|
|
||||||
|
This class enables access to data from multiple prediction providers, supporting retrieval and
|
||||||
|
aggregation of their data as Pandas Series objects. It acts as a dictionary-like structure
|
||||||
|
where each key represents a specific data field, and the value is a Pandas Series containing
|
||||||
|
combined data from all PredictionProvider instances for that key.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Derived classes have to provide their own providers field with correct provider type set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# To be overloaded by derived classes.
|
||||||
|
providers: List[PredictionProvider] = Field(
|
||||||
|
default_factory=list, description="List of prediction providers"
|
||||||
|
)
|
@ -1,144 +0,0 @@
|
|||||||
import hashlib
|
|
||||||
import json
|
|
||||||
import zoneinfo
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Sequence
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from akkudoktoreos.config import AppConfig, SetupIncomplete
|
|
||||||
|
|
||||||
|
|
||||||
def repeat_to_shape(array: np.ndarray, target_shape: Sequence[int]) -> np.ndarray:
|
|
||||||
# Check if the array fits the target shape
|
|
||||||
if len(target_shape) != array.ndim:
|
|
||||||
raise ValueError("Array and target shape must have the same number of dimensions")
|
|
||||||
|
|
||||||
# Number of repetitions per dimension
|
|
||||||
repeats = tuple(target_shape[i] // array.shape[i] for i in range(array.ndim))
|
|
||||||
|
|
||||||
# Use np.tile to expand the array
|
|
||||||
expanded_array = np.tile(array, repeats)
|
|
||||||
return expanded_array
|
|
||||||
|
|
||||||
|
|
||||||
class HourlyElectricityPriceForecast:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
source: str | Path,
|
|
||||||
config: AppConfig,
|
|
||||||
charges: float = 0.000228,
|
|
||||||
use_cache: bool = True,
|
|
||||||
): # 228
|
|
||||||
self.cache_dir = config.working_dir / config.directories.cache
|
|
||||||
self.use_cache = use_cache
|
|
||||||
if not self.cache_dir.is_dir():
|
|
||||||
raise SetupIncomplete(f"Output path does not exist: {self.cache_dir}.")
|
|
||||||
|
|
||||||
self.cache_time_file = self.cache_dir / "cache_timestamp.txt"
|
|
||||||
self.prices = self.load_data(source)
|
|
||||||
self.charges = charges
|
|
||||||
self.prediction_hours = config.eos.prediction_hours
|
|
||||||
|
|
||||||
def load_data(self, source: str | Path) -> list[dict[str, Any]]:
|
|
||||||
cache_file = self.get_cache_file(source)
|
|
||||||
if isinstance(source, str):
|
|
||||||
if cache_file.is_file() and not self.is_cache_expired() and self.use_cache:
|
|
||||||
print("Loading data from cache...")
|
|
||||||
with cache_file.open("r") as file:
|
|
||||||
json_data = json.load(file)
|
|
||||||
else:
|
|
||||||
print("Loading data from the URL...")
|
|
||||||
response = requests.get(source)
|
|
||||||
if response.status_code == 200:
|
|
||||||
json_data = response.json()
|
|
||||||
with cache_file.open("w") as file:
|
|
||||||
json.dump(json_data, file)
|
|
||||||
self.update_cache_timestamp()
|
|
||||||
else:
|
|
||||||
raise Exception(f"Error fetching data: {response.status_code}")
|
|
||||||
elif source.is_file():
|
|
||||||
with source.open("r") as file:
|
|
||||||
json_data = json.load(file)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Input is not a valid path: {source}")
|
|
||||||
return json_data["values"]
|
|
||||||
|
|
||||||
def get_cache_file(self, url: str | Path) -> Path:
|
|
||||||
if isinstance(url, Path):
|
|
||||||
url = str(url)
|
|
||||||
hash_object = hashlib.sha256(url.encode())
|
|
||||||
hex_dig = hash_object.hexdigest()
|
|
||||||
return self.cache_dir / f"cache_{hex_dig}.json"
|
|
||||||
|
|
||||||
def is_cache_expired(self) -> bool:
|
|
||||||
if not self.cache_time_file.is_file():
|
|
||||||
return True
|
|
||||||
with self.cache_time_file.open("r") as file:
|
|
||||||
timestamp_str = file.read()
|
|
||||||
last_cache_time = datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S")
|
|
||||||
return datetime.now() - last_cache_time > timedelta(hours=1)
|
|
||||||
|
|
||||||
def update_cache_timestamp(self) -> None:
|
|
||||||
with self.cache_time_file.open("w") as file:
|
|
||||||
file.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
||||||
|
|
||||||
def get_price_for_date(self, date_str: str) -> np.ndarray:
|
|
||||||
"""Returns all prices for the specified date, including the price from 00:00 of the previous day."""
|
|
||||||
# Convert date string to datetime object
|
|
||||||
date_obj = datetime.strptime(date_str, "%Y-%m-%d")
|
|
||||||
|
|
||||||
# Calculate the previous day
|
|
||||||
previous_day = date_obj - timedelta(days=1)
|
|
||||||
previous_day_str = previous_day.strftime("%Y-%m-%d")
|
|
||||||
|
|
||||||
# Extract the price from 00:00 of the previous day
|
|
||||||
previous_day_prices = [
|
|
||||||
entry["marketpriceEurocentPerKWh"] + self.charges
|
|
||||||
for entry in self.prices
|
|
||||||
if previous_day_str in entry["end"]
|
|
||||||
]
|
|
||||||
last_price_of_previous_day = previous_day_prices[-1] if previous_day_prices else 0
|
|
||||||
|
|
||||||
# Extract all prices for the specified date
|
|
||||||
date_prices = [
|
|
||||||
entry["marketpriceEurocentPerKWh"] + self.charges
|
|
||||||
for entry in self.prices
|
|
||||||
if date_str in entry["end"]
|
|
||||||
]
|
|
||||||
print(f"getPrice: {len(date_prices)}")
|
|
||||||
|
|
||||||
# Add the last price of the previous day at the start of the list
|
|
||||||
if len(date_prices) == 23:
|
|
||||||
date_prices.insert(0, last_price_of_previous_day)
|
|
||||||
|
|
||||||
return np.array(date_prices) / (1000.0 * 100.0) + self.charges
|
|
||||||
|
|
||||||
def get_price_for_daterange(self, start_date_str: str, end_date_str: str) -> np.ndarray:
|
|
||||||
"""Returns all prices between the start and end dates."""
|
|
||||||
print(start_date_str)
|
|
||||||
print(end_date_str)
|
|
||||||
start_date_utc = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc)
|
|
||||||
end_date_utc = datetime.strptime(end_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc)
|
|
||||||
start_date = start_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin"))
|
|
||||||
end_date = end_date_utc.astimezone(zoneinfo.ZoneInfo("Europe/Berlin"))
|
|
||||||
|
|
||||||
price_list: list[float] = []
|
|
||||||
|
|
||||||
while start_date < end_date:
|
|
||||||
date_str = start_date.strftime("%Y-%m-%d")
|
|
||||||
daily_prices = self.get_price_for_date(date_str)
|
|
||||||
|
|
||||||
if daily_prices.size == 24:
|
|
||||||
price_list.extend(daily_prices)
|
|
||||||
start_date += timedelta(days=1)
|
|
||||||
|
|
||||||
price_list_np = np.array(price_list)
|
|
||||||
|
|
||||||
# If prediction hours are greater than 0, reshape the price list
|
|
||||||
if self.prediction_hours > 0:
|
|
||||||
price_list_np = repeat_to_shape(price_list_np, (self.prediction_hours,))
|
|
||||||
|
|
||||||
return price_list_np
|
|
@ -1,682 +0,0 @@
|
|||||||
"""PV Power Forecasting Module.
|
|
||||||
|
|
||||||
This module contains classes and methods to retrieve, process, and display photovoltaic (PV)
|
|
||||||
power forecast data, including temperature, windspeed, DC power, and AC power forecasts.
|
|
||||||
The module supports caching of forecast data to reduce redundant network requests and includes
|
|
||||||
functions to update AC power measurements and retrieve forecasts within a specified date range.
|
|
||||||
|
|
||||||
Classes
|
|
||||||
ForecastData: Represents a single forecast entry, including DC power, AC power,
|
|
||||||
temperature, and windspeed.
|
|
||||||
PVForecast: Retrieves, processes, and stores PV power forecast data, either from
|
|
||||||
a file or URL, with optional caching. It also provides methods to query
|
|
||||||
and update the forecast data, convert it to a DataFrame, and output key
|
|
||||||
metrics like AC power.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
# Initialize PVForecast class with an URL
|
|
||||||
forecast = PVForecast(
|
|
||||||
prediction_hours=24,
|
|
||||||
url="https://api.akkudoktor.net/forecast?lat=52.52&lon=13.405..."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update the AC power measurement for a specific date and time
|
|
||||||
forecast.update_ac_power_measurement(ac_power_measurement=1000, date_time=datetime.now())
|
|
||||||
|
|
||||||
# Print the forecast data with DC and AC power details
|
|
||||||
forecast.print_ac_power_and_measurement()
|
|
||||||
|
|
||||||
# Get the forecast data as a Pandas DataFrame
|
|
||||||
df = forecast.get_forecast_dataframe()
|
|
||||||
print(df)
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
prediction_hours (int): Number of forecast hours. Defaults to 48.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
from datetime import date, datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, List, Optional, Union
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import pandas as pd
|
|
||||||
import requests
|
|
||||||
from pydantic import BaseModel, ValidationError
|
|
||||||
|
|
||||||
from akkudoktoreos.utils.cachefilestore import cache_in_file
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
|
||||||
from akkudoktoreos.utils.logutil import get_logger
|
|
||||||
|
|
||||||
logger = get_logger(__name__, logging_level="DEBUG")
|
|
||||||
|
|
||||||
|
|
||||||
class AkkudoktorForecastHorizon(BaseModel):
|
|
||||||
altitude: int
|
|
||||||
azimuthFrom: int
|
|
||||||
azimuthTo: int
|
|
||||||
|
|
||||||
|
|
||||||
class AkkudoktorForecastMeta(BaseModel):
|
|
||||||
lat: float
|
|
||||||
lon: float
|
|
||||||
power: List[int]
|
|
||||||
azimuth: List[int]
|
|
||||||
tilt: List[int]
|
|
||||||
timezone: str
|
|
||||||
albedo: float
|
|
||||||
past_days: int
|
|
||||||
inverterEfficiency: float
|
|
||||||
powerInverter: List[int]
|
|
||||||
cellCoEff: float
|
|
||||||
range: bool
|
|
||||||
horizont: List[List[AkkudoktorForecastHorizon]]
|
|
||||||
horizontString: List[str]
|
|
||||||
|
|
||||||
|
|
||||||
class AkkudoktorForecastValue(BaseModel):
|
|
||||||
datetime: str
|
|
||||||
dcPower: float
|
|
||||||
power: float
|
|
||||||
sunTilt: float
|
|
||||||
sunAzimuth: float
|
|
||||||
temperature: float
|
|
||||||
relativehumidity_2m: float
|
|
||||||
windspeed_10m: float
|
|
||||||
|
|
||||||
|
|
||||||
class AkkudoktorForecast(BaseModel):
|
|
||||||
meta: AkkudoktorForecastMeta
|
|
||||||
values: List[List[AkkudoktorForecastValue]]
|
|
||||||
|
|
||||||
|
|
||||||
def validate_pv_forecast_data(data: dict[str, Any]) -> Optional[str]:
|
|
||||||
"""Validate PV forecast data."""
|
|
||||||
try:
|
|
||||||
AkkudoktorForecast.model_validate(data)
|
|
||||||
data_type = "Akkudoktor"
|
|
||||||
except ValidationError as e:
|
|
||||||
error_msg = ""
|
|
||||||
for error in e.errors():
|
|
||||||
field = " -> ".join(str(x) for x in error["loc"])
|
|
||||||
message = error["msg"]
|
|
||||||
error_type = error["type"]
|
|
||||||
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
|
||||||
logger.debug(f"Validation did not succeed: {error_msg}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return data_type
|
|
||||||
|
|
||||||
|
|
||||||
class ForecastResponse(BaseModel):
|
|
||||||
temperature: list[float]
|
|
||||||
pvpower: list[float]
|
|
||||||
|
|
||||||
|
|
||||||
class ForecastData:
|
|
||||||
"""Stores forecast data for PV power and weather parameters.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
date_time (datetime): The date and time of the forecast.
|
|
||||||
dc_power (float): The direct current (DC) power in watts.
|
|
||||||
ac_power (float): The alternating current (AC) power in watts.
|
|
||||||
windspeed_10m (float, optional): Wind speed at 10 meters altitude.
|
|
||||||
temperature (float, optional): Temperature in degrees Celsius.
|
|
||||||
ac_power_measurement (float, optional): Measured AC power.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
date_time: datetime,
|
|
||||||
dc_power: float,
|
|
||||||
ac_power: float,
|
|
||||||
windspeed_10m: Optional[float] = None,
|
|
||||||
temperature: Optional[float] = None,
|
|
||||||
ac_power_measurement: Optional[float] = None,
|
|
||||||
):
|
|
||||||
"""Initializes the ForecastData instance.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
date_time (datetime): The date and time of the forecast.
|
|
||||||
dc_power (float): The DC power in watts.
|
|
||||||
ac_power (float): The AC power in watts.
|
|
||||||
windspeed_10m (float, optional): Wind speed at 10 meters altitude. Defaults to None.
|
|
||||||
temperature (float, optional): Temperature in degrees Celsius. Defaults to None.
|
|
||||||
ac_power_measurement (float, optional): Measured AC power. Defaults to None.
|
|
||||||
"""
|
|
||||||
self.date_time = date_time
|
|
||||||
self.dc_power = dc_power
|
|
||||||
self.ac_power = ac_power
|
|
||||||
self.windspeed_10m = windspeed_10m
|
|
||||||
self.temperature = temperature
|
|
||||||
self.ac_power_measurement = ac_power_measurement
|
|
||||||
|
|
||||||
def get_date_time(self) -> datetime:
|
|
||||||
"""Returns the forecast date and time.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
datetime: The date and time of the forecast.
|
|
||||||
"""
|
|
||||||
return self.date_time
|
|
||||||
|
|
||||||
def get_dc_power(self) -> float:
|
|
||||||
"""Returns the DC power.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
float: DC power in watts.
|
|
||||||
"""
|
|
||||||
return self.dc_power
|
|
||||||
|
|
||||||
def get_ac_power_measurement(self) -> Optional[float]:
|
|
||||||
"""Returns the measured AC power.
|
|
||||||
|
|
||||||
It returns the measured AC power if available; otherwise None.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
float: Measured AC power in watts or None
|
|
||||||
"""
|
|
||||||
return self.ac_power_measurement
|
|
||||||
|
|
||||||
def get_ac_power(self) -> float:
|
|
||||||
"""Returns the AC power.
|
|
||||||
|
|
||||||
If a measured value is available, it returns the measured AC power;
|
|
||||||
otherwise, it returns the forecasted AC power.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
float: AC power in watts.
|
|
||||||
"""
|
|
||||||
if self.ac_power_measurement is not None:
|
|
||||||
return self.ac_power_measurement
|
|
||||||
else:
|
|
||||||
return self.ac_power
|
|
||||||
|
|
||||||
def get_windspeed_10m(self) -> Optional[float]:
|
|
||||||
"""Returns the wind speed at 10 meters altitude.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
float: Wind speed in meters per second.
|
|
||||||
"""
|
|
||||||
return self.windspeed_10m
|
|
||||||
|
|
||||||
def get_temperature(self) -> Optional[float]:
|
|
||||||
"""Returns the temperature.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
float: Temperature in degrees Celsius.
|
|
||||||
"""
|
|
||||||
return self.temperature
|
|
||||||
|
|
||||||
|
|
||||||
class PVForecast:
|
|
||||||
"""Manages PV (photovoltaic) power forecasts and weather data.
|
|
||||||
|
|
||||||
Forecast data can be loaded from different sources (in-memory data, file, or URL).
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
meta (dict): Metadata related to the forecast (e.g., source, location).
|
|
||||||
forecast_data (list): A list of forecast data points of `ForecastData` objects.
|
|
||||||
prediction_hours (int): The number of hours into the future the forecast covers.
|
|
||||||
current_measurement (Optional[float]): The current AC power measurement in watts (or None if unavailable).
|
|
||||||
data (Optional[dict]): JSON data containing the forecast information (if provided).
|
|
||||||
filepath (Optional[str]): Filepath to the forecast data file (if provided).
|
|
||||||
url (Optional[str]): URL to retrieve forecast data from an API (if provided).
|
|
||||||
_forecast_start (Optional[date]): Start datetime for the forecast period.
|
|
||||||
tz_name (Optional[str]): The time zone name of the forecast data, if applicable.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
data: Optional[dict[str, Any]] = None,
|
|
||||||
filepath: Optional[str | Path] = None,
|
|
||||||
url: Optional[str] = None,
|
|
||||||
forecast_start: Union[datetime, date, str, int, float, None] = None,
|
|
||||||
prediction_hours: Optional[int] = None,
|
|
||||||
):
|
|
||||||
"""Initializes a `PVForecast` instance.
|
|
||||||
|
|
||||||
Forecast data can be loaded from in-memory `data`, a file specified by `filepath`, or
|
|
||||||
fetched from a remote `url`. If none are provided, an empty forecast will be initialized.
|
|
||||||
The `forecast_start` and `prediction_hours` parameters can be specified to control the
|
|
||||||
forecasting time period.
|
|
||||||
|
|
||||||
Use `process_data()` to fill an empty forecast later on.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (Optional[dict]): In-memory JSON data containing forecast information. Defaults to None.
|
|
||||||
filepath (Optional[str]): Path to a local file containing forecast data in JSON format. Defaults to None.
|
|
||||||
url (Optional[str]): URL to an API providing forecast data. Defaults to None.
|
|
||||||
forecast_start (Union[datetime, date, str, int, float]): The start datetime for the forecast period.
|
|
||||||
Can be a `datetime`, `date`, `str` (formatted date), `int` (timestamp), `float`, or None. Defaults to None.
|
|
||||||
prediction_hours (Optional[int]): The number of hours to forecast into the future. Defaults to 48 hours.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
forecast = PVForecast(data=my_forecast_data, forecast_start="2024-10-13", prediction_hours=72)
|
|
||||||
"""
|
|
||||||
self.meta: dict[str, Any] = {}
|
|
||||||
self.forecast_data: list[ForecastData] = []
|
|
||||||
self.current_measurement: Optional[float] = None
|
|
||||||
self.data = data
|
|
||||||
self.filepath = filepath
|
|
||||||
self.url = url
|
|
||||||
self._forecast_start: Optional[datetime] = None
|
|
||||||
if forecast_start:
|
|
||||||
self._forecast_start = to_datetime(forecast_start, to_naiv=True, to_maxtime=False)
|
|
||||||
self.prediction_hours = prediction_hours
|
|
||||||
self._tz_name = None
|
|
||||||
|
|
||||||
if self.data or self.filepath or self.url:
|
|
||||||
self.process_data(
|
|
||||||
data=self.data,
|
|
||||||
filepath=self.filepath,
|
|
||||||
url=self.url,
|
|
||||||
forecast_start=self._forecast_start,
|
|
||||||
prediction_hours=self.prediction_hours,
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_ac_power_measurement(
|
|
||||||
self,
|
|
||||||
ac_power_measurement: float,
|
|
||||||
date_time: Union[datetime, date, str, int, float, None] = None,
|
|
||||||
) -> bool:
|
|
||||||
"""Updates the AC power measurement for a specific time.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
date_time (datetime): The date and time of the measurement.
|
|
||||||
ac_power_measurement (float): Measured AC power.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a matching timestamp was found, False otherwise.
|
|
||||||
"""
|
|
||||||
found = False
|
|
||||||
input_date_hour = to_datetime(
|
|
||||||
date_time, to_timezone=self._tz_name, to_naiv=True, to_maxtime=False
|
|
||||||
).replace(minute=0, second=0, microsecond=0)
|
|
||||||
|
|
||||||
for forecast in self.forecast_data:
|
|
||||||
forecast_date_hour = to_datetime(forecast.date_time, to_naiv=True).replace(
|
|
||||||
minute=0, second=0, microsecond=0
|
|
||||||
)
|
|
||||||
if forecast_date_hour == input_date_hour:
|
|
||||||
forecast.ac_power_measurement = ac_power_measurement
|
|
||||||
found = True
|
|
||||||
logger.debug(
|
|
||||||
f"AC Power measurement updated at date {input_date_hour}: {ac_power_measurement}"
|
|
||||||
)
|
|
||||||
break
|
|
||||||
return found
|
|
||||||
|
|
||||||
def process_data(
|
|
||||||
self,
|
|
||||||
data: Optional[dict[str, Any]] = None,
|
|
||||||
filepath: Optional[str | Path] = None,
|
|
||||||
url: Optional[str] = None,
|
|
||||||
forecast_start: Union[datetime, date, str, int, float, None] = None,
|
|
||||||
prediction_hours: Optional[int] = None,
|
|
||||||
) -> None:
|
|
||||||
"""Processes the forecast data from the provided source (in-memory `data`, `filepath`, or `url`).
|
|
||||||
|
|
||||||
If `forecast_start` and `prediction_hours` are provided, they define the forecast period.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (Optional[dict]): JSON data containing forecast values. Defaults to None.
|
|
||||||
filepath (Optional[str]): Path to a file with forecast data. Defaults to None.
|
|
||||||
url (Optional[str]): API URL to retrieve forecast data from. Defaults to None.
|
|
||||||
forecast_start (Union[datetime, date, str, int, float, None]): Start datetime of the forecast
|
|
||||||
period. Defaults to None. If given before it is cached.
|
|
||||||
prediction_hours (Optional[int]): The number of hours to forecast into the future.
|
|
||||||
Defaults to None. If given before it is cached.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
None
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
FileNotFoundError: If the specified `filepath` does not exist.
|
|
||||||
ValueError: If no valid data source or data is provided.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
forecast = PVForecast(
|
|
||||||
url="https://api.akkudoktor.net/forecast?lat=52.52&lon=13.405&"
|
|
||||||
"power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&"
|
|
||||||
"power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&"
|
|
||||||
"power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&"
|
|
||||||
"power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&"
|
|
||||||
"past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&"
|
|
||||||
"timezone=Europe%2FBerlin&hourly=relativehumidity_2m%2Cwindspeed_10m",
|
|
||||||
prediction_hours = 24,
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
# Get input forecast data
|
|
||||||
if data:
|
|
||||||
pass
|
|
||||||
elif filepath:
|
|
||||||
data = self.load_data_from_file(filepath)
|
|
||||||
elif url:
|
|
||||||
data = self.load_data_from_url_with_caching(url)
|
|
||||||
elif self.data or self.filepath or self.url:
|
|
||||||
# Re-process according to previous arguments
|
|
||||||
if self.data:
|
|
||||||
data = self.data
|
|
||||||
elif self.filepath:
|
|
||||||
data = self.load_data_from_file(self.filepath)
|
|
||||||
elif self.url:
|
|
||||||
data = self.load_data_from_url_with_caching(self.url)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Re-processing for None input is not implemented!"
|
|
||||||
) # Invalid path
|
|
||||||
else:
|
|
||||||
raise ValueError("No prediction input data available.")
|
|
||||||
assert data is not None # make mypy happy
|
|
||||||
# Validate input data to be of a known format
|
|
||||||
data_format = validate_pv_forecast_data(data)
|
|
||||||
if data_format != "Akkudoktor":
|
|
||||||
raise ValueError(f"Prediction input data are of unknown format: '{data_format}'.")
|
|
||||||
|
|
||||||
# Assure we have a forecast start datetime
|
|
||||||
if forecast_start is None:
|
|
||||||
forecast_start = self._forecast_start
|
|
||||||
if forecast_start is None:
|
|
||||||
forecast_start = datetime(1970, 1, 1)
|
|
||||||
|
|
||||||
# Assure we have prediction hours set
|
|
||||||
if prediction_hours is None:
|
|
||||||
prediction_hours = self.prediction_hours
|
|
||||||
if prediction_hours is None:
|
|
||||||
prediction_hours = 48
|
|
||||||
self.prediction_hours = prediction_hours
|
|
||||||
|
|
||||||
if data_format == "Akkudoktor":
|
|
||||||
# --------------------------------------------
|
|
||||||
# From here Akkudoktor PV forecast data format
|
|
||||||
# ---------------------------------------------
|
|
||||||
self.meta = data.get("meta", {})
|
|
||||||
all_values = data.get("values")
|
|
||||||
|
|
||||||
# timezone of the PV system
|
|
||||||
self._tz_name = self.meta.get("timezone", None)
|
|
||||||
if not self._tz_name:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Processing without PV system timezone info ist not implemented!"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Assumption that all lists are the same length and are ordered chronologically
|
|
||||||
# in ascending order and have the same timestamps.
|
|
||||||
values_len = len(all_values[0])
|
|
||||||
if values_len < self.prediction_hours:
|
|
||||||
# Expect one value set per prediction hour
|
|
||||||
raise ValueError(
|
|
||||||
f"The forecast must cover at least {self.prediction_hours} hours, "
|
|
||||||
f"but only {values_len} data sets are given in forecast data."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Convert forecast_start to timezone of PV system and make it a naiv datetime
|
|
||||||
self._forecast_start = to_datetime(
|
|
||||||
forecast_start, to_timezone=self._tz_name, to_naiv=True
|
|
||||||
)
|
|
||||||
logger.debug(f"Forecast start set to {self._forecast_start}")
|
|
||||||
|
|
||||||
for i in range(values_len):
|
|
||||||
# Zeige die ursprünglichen und berechneten Zeitstempel an
|
|
||||||
original_datetime = all_values[0][i].get("datetime")
|
|
||||||
# print(original_datetime," ",sum_dc_power," ",all_values[0][i]['dcPower'])
|
|
||||||
dt = to_datetime(original_datetime, to_timezone=self._tz_name, to_naiv=True)
|
|
||||||
# iso_datetime = parser.parse(original_datetime).isoformat() # Konvertiere zu ISO-Format
|
|
||||||
# print()
|
|
||||||
# Optional: 2 Stunden abziehen, um die Zeitanpassung zu testen
|
|
||||||
# adjusted_datetime = parser.parse(original_datetime) - timedelta(hours=2)
|
|
||||||
# print(f"Angepasste Zeitstempel: {adjusted_datetime.isoformat()}")
|
|
||||||
|
|
||||||
if dt < self._forecast_start:
|
|
||||||
# forecast data are too old
|
|
||||||
continue
|
|
||||||
|
|
||||||
sum_dc_power = sum(values[i]["dcPower"] for values in all_values)
|
|
||||||
sum_ac_power = sum(values[i]["power"] for values in all_values)
|
|
||||||
|
|
||||||
forecast = ForecastData(
|
|
||||||
date_time=dt, # Verwende angepassten Zeitstempel
|
|
||||||
dc_power=sum_dc_power,
|
|
||||||
ac_power=sum_ac_power,
|
|
||||||
windspeed_10m=all_values[0][i].get("windspeed_10m"),
|
|
||||||
temperature=all_values[0][i].get("temperature"),
|
|
||||||
)
|
|
||||||
self.forecast_data.append(forecast)
|
|
||||||
|
|
||||||
if len(self.forecast_data) < self.prediction_hours:
|
|
||||||
raise ValueError(
|
|
||||||
f"The forecast must cover at least {self.prediction_hours} hours, "
|
|
||||||
f"but only {len(self.forecast_data)} hours starting from {forecast_start} "
|
|
||||||
f"were predicted."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Adapt forecast start to actual value
|
|
||||||
self._forecast_start = self.forecast_data[0].get_date_time()
|
|
||||||
logger.debug(f"Forecast start adapted to {self._forecast_start}")
|
|
||||||
|
|
||||||
def load_data_from_file(self, filepath: str | Path) -> dict[str, Any]:
|
|
||||||
"""Loads forecast data from a file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filepath (str): Path to the file containing the forecast data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
data (dict): JSON data containing forecast values.
|
|
||||||
"""
|
|
||||||
with open(filepath, "r") as file:
|
|
||||||
data = json.load(file)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def load_data_from_url(self, url: str) -> dict[str, Any]:
|
|
||||||
"""Loads forecast data from a URL.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
https://api.akkudoktor.net/forecast?lat=52.52&lon=13.405&power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&hourly=relativehumidity_2m%2Cwindspeed_10m
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url (str): URL of the API providing forecast data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
data (dict): JSON data containing forecast values.
|
|
||||||
"""
|
|
||||||
response = requests.get(url)
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
else:
|
|
||||||
data = f"Failed to load data from `{url}`. Status Code: {response.status_code}"
|
|
||||||
logger.error(data)
|
|
||||||
return data
|
|
||||||
|
|
||||||
@cache_in_file() # use binary mode by default as we have python objects not text
|
|
||||||
def load_data_from_url_with_caching(self, url: str) -> dict[str, Any]:
|
|
||||||
"""Loads data from a URL or from the cache if available.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url (str): URL of the API providing forecast data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
data (dict): JSON data containing forecast values.
|
|
||||||
"""
|
|
||||||
response = requests.get(url)
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
logger.debug(f"Data fetched from URL `{url} and cached.")
|
|
||||||
else:
|
|
||||||
data = f"Failed to load data from `{url}`. Status Code: {response.status_code}"
|
|
||||||
logger.error(data)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def get_forecast_data(self) -> list[ForecastData]:
|
|
||||||
"""Returns the forecast data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: List of ForecastData objects.
|
|
||||||
"""
|
|
||||||
return self.forecast_data
|
|
||||||
|
|
||||||
def get_temperature_forecast_for_date(
|
|
||||||
self, input_date: Union[datetime, date, str, int, float, None]
|
|
||||||
) -> np.ndarray:
|
|
||||||
"""Returns the temperature forecast for a specific date.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_date (str): Date
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
np.array: Array of temperature forecasts.
|
|
||||||
"""
|
|
||||||
if not self._tz_name:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Processing without PV system timezone info ist not implemented!"
|
|
||||||
)
|
|
||||||
input_date = to_datetime(input_date, to_timezone=self._tz_name, to_naiv=True).date()
|
|
||||||
daily_forecast_obj = [
|
|
||||||
data for data in self.forecast_data if data.get_date_time().date() == input_date
|
|
||||||
]
|
|
||||||
daily_forecast = []
|
|
||||||
for d in daily_forecast_obj:
|
|
||||||
daily_forecast.append(d.get_temperature())
|
|
||||||
|
|
||||||
return np.array(daily_forecast)
|
|
||||||
|
|
||||||
def get_pv_forecast_for_date_range(
|
|
||||||
self,
|
|
||||||
start_date: Union[datetime, date, str, int, float, None],
|
|
||||||
end_date: Union[datetime, date, str, int, float, None],
|
|
||||||
) -> np.ndarray:
|
|
||||||
"""Returns the PV forecast for a date range.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_date_str (str): Start date in the format YYYY-MM-DD.
|
|
||||||
end_date_str (str): End date in the format YYYY-MM-DD.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
pd.DataFrame: DataFrame containing the forecast data.
|
|
||||||
"""
|
|
||||||
if not self._tz_name:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Processing without PV system timezone info ist not implemented!"
|
|
||||||
)
|
|
||||||
start_date = to_datetime(start_date, to_timezone=self._tz_name, to_naiv=True).date()
|
|
||||||
end_date = to_datetime(end_date, to_timezone=self._tz_name, to_naiv=True).date()
|
|
||||||
date_range_forecast = []
|
|
||||||
|
|
||||||
for data in self.forecast_data:
|
|
||||||
data_date = data.get_date_time().date()
|
|
||||||
if start_date <= data_date <= end_date:
|
|
||||||
date_range_forecast.append(data)
|
|
||||||
# print(data.get_date_time(), " ", data.get_ac_power())
|
|
||||||
|
|
||||||
ac_power_forecast = np.array([data.get_ac_power() for data in date_range_forecast])
|
|
||||||
|
|
||||||
return np.array(ac_power_forecast)[: self.prediction_hours]
|
|
||||||
|
|
||||||
def get_temperature_for_date_range(
|
|
||||||
self,
|
|
||||||
start_date: Union[datetime, date, str, int, float, None],
|
|
||||||
end_date: Union[datetime, date, str, int, float, None],
|
|
||||||
) -> np.ndarray:
|
|
||||||
"""Returns the temperature forecast for a given date range.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_date (datetime | date | str | int | float | None): Start date.
|
|
||||||
end_date (datetime | date | str | int | float | None): End date.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
np.array: Array containing temperature forecasts for each hour within the date range.
|
|
||||||
"""
|
|
||||||
if not self._tz_name:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Processing without PV system timezone info ist not implemented!"
|
|
||||||
)
|
|
||||||
start_date = to_datetime(start_date, to_timezone=self._tz_name, to_naiv=True).date()
|
|
||||||
end_date = to_datetime(end_date, to_timezone=self._tz_name, to_naiv=True).date()
|
|
||||||
date_range_forecast = []
|
|
||||||
|
|
||||||
for data in self.forecast_data:
|
|
||||||
data_date = data.get_date_time().date()
|
|
||||||
if start_date <= data_date <= end_date:
|
|
||||||
date_range_forecast.append(data)
|
|
||||||
|
|
||||||
temperature_forecast = [data.get_temperature() for data in date_range_forecast]
|
|
||||||
return np.array(temperature_forecast)[: self.prediction_hours]
|
|
||||||
|
|
||||||
def get_forecast_dataframe(self) -> pd.DataFrame:
|
|
||||||
"""Converts the forecast data into a Pandas DataFrame.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
pd.DataFrame: A DataFrame containing the forecast data with columns for date/time,
|
|
||||||
DC power, AC power, windspeed, and temperature.
|
|
||||||
"""
|
|
||||||
data = [
|
|
||||||
{
|
|
||||||
"date_time": f.get_date_time(),
|
|
||||||
"dc_power": f.get_dc_power(),
|
|
||||||
"ac_power": f.get_ac_power(),
|
|
||||||
"windspeed_10m": f.get_windspeed_10m(),
|
|
||||||
"temperature": f.get_temperature(),
|
|
||||||
}
|
|
||||||
for f in self.forecast_data
|
|
||||||
]
|
|
||||||
|
|
||||||
# Erstelle ein DataFrame
|
|
||||||
df = pd.DataFrame(data)
|
|
||||||
return df
|
|
||||||
|
|
||||||
def get_forecast_start(self) -> Optional[datetime]:
|
|
||||||
"""Return the start of the forecast data in local timezone.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
forecast_start (datetime | None): The start datetime or None if no data available.
|
|
||||||
"""
|
|
||||||
if not self._forecast_start:
|
|
||||||
return None
|
|
||||||
return to_datetime(
|
|
||||||
self._forecast_start, to_timezone=self._tz_name, to_naiv=True, to_maxtime=False
|
|
||||||
)
|
|
||||||
|
|
||||||
def report_ac_power_and_measurement(self) -> str:
|
|
||||||
"""Report DC/ AC power, and AC power measurement for each forecast hour.
|
|
||||||
|
|
||||||
For each forecast entry, the time, DC power, forecasted AC power, measured AC power
|
|
||||||
(if available), and the value returned by the `get_ac_power` method is provided.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: The report.
|
|
||||||
"""
|
|
||||||
rep = ""
|
|
||||||
for forecast in self.forecast_data:
|
|
||||||
date_time = forecast.date_time
|
|
||||||
dc_pow = round(forecast.dc_power, 2) if forecast.dc_power else None
|
|
||||||
ac_pow = round(forecast.ac_power, 2) if forecast.ac_power else None
|
|
||||||
ac_pow_measurement = (
|
|
||||||
round(forecast.ac_power_measurement, 2) if forecast.ac_power_measurement else None
|
|
||||||
)
|
|
||||||
get_ac_pow = round(forecast.get_ac_power(), 2) if forecast.get_ac_power() else None
|
|
||||||
rep += (
|
|
||||||
f"Date&Time: {date_time}, DC: {dc_pow}, AC: {ac_pow}, "
|
|
||||||
f"AC measured: {ac_pow_measurement}, AC GET: {get_ac_pow}"
|
|
||||||
"\n"
|
|
||||||
)
|
|
||||||
return rep
|
|
||||||
|
|
||||||
|
|
||||||
# Example of how to use the PVForecast class
|
|
||||||
if __name__ == "__main__":
|
|
||||||
"""Main execution block to demonstrate the use of the PVForecast class.
|
|
||||||
|
|
||||||
Fetches PV power forecast data from a given URL, updates the AC power measurement
|
|
||||||
for the current date/time, and prints the DC and AC power information.
|
|
||||||
"""
|
|
||||||
forecast = PVForecast(
|
|
||||||
prediction_hours=24,
|
|
||||||
url="https://api.akkudoktor.net/forecast?lat=52.52&lon=13.405&"
|
|
||||||
"power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&"
|
|
||||||
"power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&"
|
|
||||||
"power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&"
|
|
||||||
"power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&"
|
|
||||||
"past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&"
|
|
||||||
"hourly=relativehumidity_2m%2Cwindspeed_10m",
|
|
||||||
)
|
|
||||||
forecast.update_ac_power_measurement(ac_power_measurement=1000, date_time=datetime.now())
|
|
||||||
print(forecast.report_ac_power_and_measurement())
|
|
453
src/akkudoktoreos/prediction/pvforecast.py
Normal file
453
src/akkudoktoreos/prediction/pvforecast.py
Normal file
@ -0,0 +1,453 @@
|
|||||||
|
"""PV forecast module for PV power predictions."""
|
||||||
|
|
||||||
|
from typing import Any, ClassVar, List, Optional
|
||||||
|
|
||||||
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastCommonSettings(SettingsBaseModel):
|
||||||
|
# General plane parameters
|
||||||
|
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html
|
||||||
|
# Inverter Parameters
|
||||||
|
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/inverter.html
|
||||||
|
|
||||||
|
pvforecast_provider: Optional[str] = Field(
|
||||||
|
default=None, description="PVForecast provider id of provider to be used."
|
||||||
|
)
|
||||||
|
# pvforecast0_latitude: Optional[float] = Field(default=None, description="Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)")
|
||||||
|
# Plane 0
|
||||||
|
pvforecast0_surface_tilt: Optional[float] = Field(
|
||||||
|
default=0, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
||||||
|
)
|
||||||
|
pvforecast0_surface_azimuth: Optional[float] = Field(
|
||||||
|
default=180,
|
||||||
|
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
||||||
|
)
|
||||||
|
pvforecast0_userhorizon: Optional[List[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
||||||
|
)
|
||||||
|
pvforecast0_peakpower: Optional[float] = Field(
|
||||||
|
default=None, description="Nominal power of PV system in kW."
|
||||||
|
)
|
||||||
|
pvforecast0_pvtechchoice: Optional[str] = Field(
|
||||||
|
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
||||||
|
)
|
||||||
|
pvforecast0_mountingplace: Optional[str] = Field(
|
||||||
|
default="free",
|
||||||
|
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
||||||
|
)
|
||||||
|
pvforecast0_loss: Optional[float] = Field(
|
||||||
|
default=None, description="Sum of PV system losses in percent"
|
||||||
|
)
|
||||||
|
pvforecast0_trackingtype: Optional[int] = Field(
|
||||||
|
default=0,
|
||||||
|
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
||||||
|
)
|
||||||
|
pvforecast0_optimal_surface_tilt: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast0_optimalangles: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast0_albedo: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Proportion of the light hitting the ground that it reflects back.",
|
||||||
|
)
|
||||||
|
pvforecast0_module_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the PV modules of this plane."
|
||||||
|
)
|
||||||
|
pvforecast0_inverter_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the inverter of this plane."
|
||||||
|
)
|
||||||
|
pvforecast0_inverter_paco: Optional[int] = Field(
|
||||||
|
default=None, description="AC power rating of the inverter. [W]"
|
||||||
|
)
|
||||||
|
pvforecast0_modules_per_string: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the PV modules of the strings of this plane."
|
||||||
|
)
|
||||||
|
pvforecast0_strings_per_inverter: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the strings of the inverter of this plane."
|
||||||
|
)
|
||||||
|
# Plane 1
|
||||||
|
pvforecast1_surface_tilt: Optional[float] = Field(
|
||||||
|
default=0, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
||||||
|
)
|
||||||
|
pvforecast1_surface_azimuth: Optional[float] = Field(
|
||||||
|
default=180,
|
||||||
|
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
||||||
|
)
|
||||||
|
pvforecast1_userhorizon: Optional[List[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
||||||
|
)
|
||||||
|
pvforecast1_peakpower: Optional[float] = Field(
|
||||||
|
default=None, description="Nominal power of PV system in kW."
|
||||||
|
)
|
||||||
|
pvforecast1_pvtechchoice: Optional[str] = Field(
|
||||||
|
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
||||||
|
)
|
||||||
|
pvforecast1_mountingplace: Optional[str] = Field(
|
||||||
|
default="free",
|
||||||
|
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
||||||
|
)
|
||||||
|
pvforecast1_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
|
||||||
|
pvforecast1_trackingtype: Optional[int] = Field(
|
||||||
|
default=0,
|
||||||
|
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
||||||
|
)
|
||||||
|
pvforecast1_optimal_surface_tilt: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast1_optimalangles: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast1_albedo: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Proportion of the light hitting the ground that it reflects back.",
|
||||||
|
)
|
||||||
|
pvforecast1_module_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the PV modules of this plane."
|
||||||
|
)
|
||||||
|
pvforecast1_inverter_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the inverter of this plane."
|
||||||
|
)
|
||||||
|
pvforecast1_inverter_paco: Optional[int] = Field(
|
||||||
|
default=None, description="AC power rating of the inverter. [W]"
|
||||||
|
)
|
||||||
|
pvforecast1_modules_per_string: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the PV modules of the strings of this plane."
|
||||||
|
)
|
||||||
|
pvforecast1_strings_per_inverter: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the strings of the inverter of this plane."
|
||||||
|
)
|
||||||
|
# Plane 2
|
||||||
|
pvforecast2_surface_tilt: Optional[float] = Field(
|
||||||
|
default=0, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
||||||
|
)
|
||||||
|
pvforecast2_surface_azimuth: Optional[float] = Field(
|
||||||
|
default=180,
|
||||||
|
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
||||||
|
)
|
||||||
|
pvforecast2_userhorizon: Optional[List[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
||||||
|
)
|
||||||
|
pvforecast2_peakpower: Optional[float] = Field(
|
||||||
|
default=None, description="Nominal power of PV system in kW."
|
||||||
|
)
|
||||||
|
pvforecast2_pvtechchoice: Optional[str] = Field(
|
||||||
|
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
||||||
|
)
|
||||||
|
pvforecast2_mountingplace: Optional[str] = Field(
|
||||||
|
default="free",
|
||||||
|
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
||||||
|
)
|
||||||
|
pvforecast2_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
|
||||||
|
pvforecast2_trackingtype: Optional[int] = Field(
|
||||||
|
default=0,
|
||||||
|
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
||||||
|
)
|
||||||
|
pvforecast2_optimal_surface_tilt: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast2_optimalangles: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast2_albedo: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Proportion of the light hitting the ground that it reflects back.",
|
||||||
|
)
|
||||||
|
pvforecast2_module_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the PV modules of this plane."
|
||||||
|
)
|
||||||
|
pvforecast2_inverter_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the inverter of this plane."
|
||||||
|
)
|
||||||
|
pvforecast2_inverter_paco: Optional[int] = Field(
|
||||||
|
default=None, description="AC power rating of the inverter. [W]"
|
||||||
|
)
|
||||||
|
pvforecast2_modules_per_string: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the PV modules of the strings of this plane."
|
||||||
|
)
|
||||||
|
pvforecast2_strings_per_inverter: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the strings of the inverter of this plane."
|
||||||
|
)
|
||||||
|
# Plane 3
|
||||||
|
pvforecast3_surface_tilt: Optional[float] = Field(
|
||||||
|
default=0, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
||||||
|
)
|
||||||
|
pvforecast3_surface_azimuth: Optional[float] = Field(
|
||||||
|
default=180,
|
||||||
|
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
||||||
|
)
|
||||||
|
pvforecast3_userhorizon: Optional[List[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
||||||
|
)
|
||||||
|
pvforecast3_peakpower: Optional[float] = Field(
|
||||||
|
default=None, description="Nominal power of PV system in kW."
|
||||||
|
)
|
||||||
|
pvforecast3_pvtechchoice: Optional[str] = Field(
|
||||||
|
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
||||||
|
)
|
||||||
|
pvforecast3_mountingplace: Optional[str] = Field(
|
||||||
|
default="free",
|
||||||
|
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
||||||
|
)
|
||||||
|
pvforecast3_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
|
||||||
|
pvforecast3_trackingtype: Optional[int] = Field(
|
||||||
|
default=0,
|
||||||
|
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
||||||
|
)
|
||||||
|
pvforecast3_optimal_surface_tilt: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast3_optimalangles: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast3_albedo: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Proportion of the light hitting the ground that it reflects back.",
|
||||||
|
)
|
||||||
|
pvforecast3_module_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the PV modules of this plane."
|
||||||
|
)
|
||||||
|
pvforecast3_inverter_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the inverter of this plane."
|
||||||
|
)
|
||||||
|
pvforecast3_inverter_paco: Optional[int] = Field(
|
||||||
|
default=None, description="AC power rating of the inverter. [W]"
|
||||||
|
)
|
||||||
|
pvforecast3_modules_per_string: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the PV modules of the strings of this plane."
|
||||||
|
)
|
||||||
|
pvforecast3_strings_per_inverter: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the strings of the inverter of this plane."
|
||||||
|
)
|
||||||
|
# Plane 4
|
||||||
|
pvforecast4_surface_tilt: Optional[float] = Field(
|
||||||
|
default=0, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
||||||
|
)
|
||||||
|
pvforecast4_surface_azimuth: Optional[float] = Field(
|
||||||
|
default=180,
|
||||||
|
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
||||||
|
)
|
||||||
|
pvforecast4_userhorizon: Optional[List[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
||||||
|
)
|
||||||
|
pvforecast4_peakpower: Optional[float] = Field(
|
||||||
|
default=None, description="Nominal power of PV system in kW."
|
||||||
|
)
|
||||||
|
pvforecast4_pvtechchoice: Optional[str] = Field(
|
||||||
|
"crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
||||||
|
)
|
||||||
|
pvforecast4_mountingplace: Optional[str] = Field(
|
||||||
|
default="free",
|
||||||
|
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
||||||
|
)
|
||||||
|
pvforecast4_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
|
||||||
|
pvforecast4_trackingtype: Optional[int] = Field(
|
||||||
|
default=0,
|
||||||
|
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
||||||
|
)
|
||||||
|
pvforecast4_optimal_surface_tilt: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast4_optimalangles: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast4_albedo: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Proportion of the light hitting the ground that it reflects back.",
|
||||||
|
)
|
||||||
|
pvforecast4_module_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the PV modules of this plane."
|
||||||
|
)
|
||||||
|
pvforecast4_inverter_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the inverter of this plane."
|
||||||
|
)
|
||||||
|
pvforecast4_inverter_paco: Optional[int] = Field(
|
||||||
|
default=None, description="AC power rating of the inverter. [W]"
|
||||||
|
)
|
||||||
|
pvforecast4_modules_per_string: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the PV modules of the strings of this plane."
|
||||||
|
)
|
||||||
|
pvforecast4_strings_per_inverter: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the strings of the inverter of this plane."
|
||||||
|
)
|
||||||
|
# Plane 5
|
||||||
|
pvforecast5_surface_tilt: Optional[float] = Field(
|
||||||
|
default=0, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
||||||
|
)
|
||||||
|
pvforecast5_surface_azimuth: Optional[float] = Field(
|
||||||
|
default=180,
|
||||||
|
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
||||||
|
)
|
||||||
|
pvforecast5_userhorizon: Optional[List[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
||||||
|
)
|
||||||
|
pvforecast5_peakpower: Optional[float] = Field(
|
||||||
|
default=None, description="Nominal power of PV system in kW."
|
||||||
|
)
|
||||||
|
pvforecast5_pvtechchoice: Optional[str] = Field(
|
||||||
|
"crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
||||||
|
)
|
||||||
|
pvforecast5_mountingplace: Optional[str] = Field(
|
||||||
|
default="free",
|
||||||
|
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
||||||
|
)
|
||||||
|
pvforecast5_loss: Optional[float] = Field(0, description="Sum of PV system losses in percent")
|
||||||
|
pvforecast5_trackingtype: Optional[int] = Field(
|
||||||
|
default=0,
|
||||||
|
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
||||||
|
)
|
||||||
|
pvforecast5_optimal_surface_tilt: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast5_optimalangles: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
||||||
|
)
|
||||||
|
pvforecast5_albedo: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Proportion of the light hitting the ground that it reflects back.",
|
||||||
|
)
|
||||||
|
pvforecast5_module_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the PV modules of this plane."
|
||||||
|
)
|
||||||
|
pvforecast5_inverter_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the inverter of this plane."
|
||||||
|
)
|
||||||
|
pvforecast5_inverter_paco: Optional[int] = Field(
|
||||||
|
default=None, description="AC power rating of the inverter. [W]"
|
||||||
|
)
|
||||||
|
pvforecast5_modules_per_string: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the PV modules of the strings of this plane."
|
||||||
|
)
|
||||||
|
pvforecast5_strings_per_inverter: Optional[str] = Field(
|
||||||
|
default=None, description="Number of the strings of the inverter of this plane."
|
||||||
|
)
|
||||||
|
|
||||||
|
pvforecast_max_planes: ClassVar[int] = 6 # Maximum number of planes that can be set
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def pvforecast_planes(self) -> List[str]:
|
||||||
|
"""Compute a list of active planes."""
|
||||||
|
active_planes = []
|
||||||
|
|
||||||
|
# Loop through pvforecast0 to pvforecast4
|
||||||
|
for i in range(self.pvforecast_max_planes):
|
||||||
|
peakpower_attr = f"pvforecast{i}_peakpower"
|
||||||
|
modules_attr = f"pvforecast{i}_modules_per_string"
|
||||||
|
|
||||||
|
# Check if either attribute is set and add to active planes
|
||||||
|
if getattr(self, peakpower_attr, None) or getattr(self, modules_attr, None):
|
||||||
|
active_planes.append(f"pvforecast{i}")
|
||||||
|
|
||||||
|
return active_planes
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def pvforecast_planes_peakpower(self) -> List[float]:
|
||||||
|
"""Compute a list of the peak power per active planes."""
|
||||||
|
planes_peakpower = []
|
||||||
|
|
||||||
|
for plane in self.pvforecast_planes:
|
||||||
|
peakpower_attr = f"{plane}_peakpower"
|
||||||
|
peakpower = getattr(self, peakpower_attr, None)
|
||||||
|
if peakpower:
|
||||||
|
planes_peakpower.append(float(peakpower))
|
||||||
|
continue
|
||||||
|
# TODO calculate peak power from modules/strings
|
||||||
|
planes_peakpower.append(float(5000))
|
||||||
|
|
||||||
|
return planes_peakpower
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def pvforecast_planes_azimuth(self) -> List[float]:
|
||||||
|
"""Compute a list of the azimuths per active planes."""
|
||||||
|
planes_azimuth = []
|
||||||
|
|
||||||
|
for plane in self.pvforecast_planes:
|
||||||
|
azimuth_attr = f"{plane}_azimuth"
|
||||||
|
azimuth = getattr(self, azimuth_attr, None)
|
||||||
|
if azimuth:
|
||||||
|
planes_azimuth.append(float(azimuth))
|
||||||
|
continue
|
||||||
|
# TODO Use default
|
||||||
|
planes_azimuth.append(float(180))
|
||||||
|
|
||||||
|
return planes_azimuth
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def pvforecast_planes_tilt(self) -> List[float]:
|
||||||
|
"""Compute a list of the tilts per active planes."""
|
||||||
|
planes_tilt = []
|
||||||
|
|
||||||
|
for plane in self.pvforecast_planes:
|
||||||
|
tilt_attr = f"{plane}_tilt"
|
||||||
|
tilt = getattr(self, tilt_attr, None)
|
||||||
|
if tilt:
|
||||||
|
planes_tilt.append(float(tilt))
|
||||||
|
continue
|
||||||
|
# TODO Use default
|
||||||
|
planes_tilt.append(float(0))
|
||||||
|
|
||||||
|
return planes_tilt
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def pvforecast_planes_userhorizon(self) -> Any:
|
||||||
|
"""Compute a list of the user horizon per active planes."""
|
||||||
|
planes_userhorizon = []
|
||||||
|
|
||||||
|
for plane in self.pvforecast_planes:
|
||||||
|
userhorizon_attr = f"{plane}_userhorizon"
|
||||||
|
userhorizon = getattr(self, userhorizon_attr, None)
|
||||||
|
if userhorizon:
|
||||||
|
planes_userhorizon.append(userhorizon)
|
||||||
|
continue
|
||||||
|
# TODO Use default
|
||||||
|
planes_userhorizon.append([float(0), float(0)])
|
||||||
|
|
||||||
|
return planes_userhorizon
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def pvforecast_planes_inverter_paco(self) -> Any:
|
||||||
|
"""Compute a list of the maximum power rating of the inverter per active planes."""
|
||||||
|
planes_inverter_paco = []
|
||||||
|
|
||||||
|
for plane in self.pvforecast_planes:
|
||||||
|
inverter_paco_attr = f"{plane}_inverter_paco"
|
||||||
|
inverter_paco = getattr(self, inverter_paco_attr, None)
|
||||||
|
if inverter_paco:
|
||||||
|
planes_inverter_paco.append(inverter_paco)
|
||||||
|
continue
|
||||||
|
# TODO Use default - no clipping
|
||||||
|
planes_inverter_paco.append(25000)
|
||||||
|
|
||||||
|
return planes_inverter_paco
|
59
src/akkudoktoreos/prediction/pvforecastabc.py
Normal file
59
src/akkudoktoreos/prediction/pvforecastabc.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
"""Abstract and base classes for pvforecast predictions.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Ensure appropriate API keys or configurations are set up if required by external data sources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastDataRecord(PredictionRecord):
|
||||||
|
"""Represents a pvforecast data record containing various pvforecast attributes at a specific datetime."""
|
||||||
|
|
||||||
|
pvforecast_dc_power: Optional[float] = Field(default=None, description="Total DC power (W)")
|
||||||
|
pvforecast_ac_power: Optional[float] = Field(default=None, description="Total AC power (W)")
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastProvider(PredictionProvider):
|
||||||
|
"""Abstract base class for pvforecast providers.
|
||||||
|
|
||||||
|
PVForecastProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
|
Configuration variables:
|
||||||
|
pvforecast_provider (str): Prediction provider for pvforecast.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
|
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
|
start_datetime (datetime, optional): The starting datetime for predictions (inlcusive), defaults to the current datetime if unspecified.
|
||||||
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range (exclusive),
|
||||||
|
calculated based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data (inclusive), calculated
|
||||||
|
based on `start_datetime` and `prediction_historic_hours`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[PVForecastDataRecord] = Field(
|
||||||
|
default_factory=list, description="List of PVForecastDataRecord records"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
return "PVForecastProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
logger.debug(
|
||||||
|
f"PVForecastProvider ID {self.provider_id()} vs. config {self.config.pvforecast_provider}"
|
||||||
|
)
|
||||||
|
return self.provider_id() == self.config.pvforecast_provider
|
396
src/akkudoktoreos/prediction/pvforecastakkudoktor.py
Normal file
396
src/akkudoktoreos/prediction/pvforecastakkudoktor.py
Normal file
@ -0,0 +1,396 @@
|
|||||||
|
"""PV Power Forecasting with Akkudoktor.
|
||||||
|
|
||||||
|
This module provides classes and methods to retrieve, process, and display photovoltaic (PV) power forecast data. It includes features for working with environmental data such as temperature, wind speed, DC power, and AC power. Data retrieval is designed to work with Akkudoktor.net, and caching is implemented to reduce redundant network requests. Additionally, the module supports management of historical data for analysis over time.
|
||||||
|
|
||||||
|
Classes:
|
||||||
|
AkkudoktorForecastHorizon: Represents details about the orientation of PV system horizons.
|
||||||
|
AkkudoktorForecastMeta: Metadata configuration for the forecast, including location, system settings, and timezone.
|
||||||
|
AkkudoktorForecastValue: Represents a single forecast data entry with information on temperature, wind speed, and solar orientation.
|
||||||
|
AkkudoktorForecast: The main container for forecast data, holding both metadata and individual forecast entries.
|
||||||
|
PVForecastAkkudoktorDataRecord: A specialized data record format for PV forecast data, including forecasted and actual AC power measurements.
|
||||||
|
PVForecastAkkudoktorSettings: Contains configuration settings for constructing the Akkudoktor forecast API URL.
|
||||||
|
PVForecastAkkudoktor: Primary class to manage PV power forecasts, handle data retrieval, caching, and integration with Akkudoktor.net.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# Set up the configuration with necessary fields for URL generation
|
||||||
|
settings_data = {
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"prediction_historic_hours": 24,
|
||||||
|
"latitude": 52.52,
|
||||||
|
"longitude": 13.405,
|
||||||
|
"pvforecast_provider": "Akkudoktor",
|
||||||
|
"pvforecast0_peakpower": 5.0,
|
||||||
|
"pvforecast0_surface_azimuth": -10,
|
||||||
|
"pvforecast0_surface_tilt": 7,
|
||||||
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
|
"pvforecast0_inverter_paco": 10000,
|
||||||
|
"pvforecast1_peakpower": 4.8,
|
||||||
|
"pvforecast1_surface_azimuth": -90,
|
||||||
|
"pvforecast1_surface_tilt": 7,
|
||||||
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
|
"pvforecast1_inverter_paco": 10000,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create the config instance from the provided data
|
||||||
|
config = PVForecastAkkudoktorSettings(**settings_data)
|
||||||
|
|
||||||
|
# Initialize the forecast object with the generated configuration
|
||||||
|
forecast = PVForecastAkkudoktor(settings=config)
|
||||||
|
|
||||||
|
# Get an actual forecast
|
||||||
|
forecast.update_data()
|
||||||
|
|
||||||
|
# Update the AC power measurement for a specific date and time
|
||||||
|
forecast.update_value(to_datetime(None, to_maxtime=False), "pvforecastakkudoktor_ac_power_measured", 1000.0)
|
||||||
|
|
||||||
|
# Report the DC and AC power forecast along with AC measurements
|
||||||
|
print(forecast.report_ac_power_and_measurement())
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int): Number of hours into the future to forecast. Default is 48.
|
||||||
|
prediction_historic_hours (int): Number of past hours to retain for analysis. Default is 24.
|
||||||
|
latitude (float): Latitude for the forecast location.
|
||||||
|
longitude (float): Longitude for the forecast location.
|
||||||
|
start_datetime (datetime): Start time for the forecast, defaulting to current datetime.
|
||||||
|
end_datetime (datetime): Computed end datetime based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime): Computed threshold datetime for retaining historical data.
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
provider_id(): Returns the unique identifier for the Akkudoktor provider.
|
||||||
|
_request_forecast(): Retrieves forecast data from the Akkudoktor API.
|
||||||
|
_update_data(): Updates forecast data within the PVForecastAkkudoktorDataRecord structure.
|
||||||
|
report_ac_power_and_measurement(): Generates a report on AC and DC power forecasts and actual measurements.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, List, Optional, Union
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from pydantic import Field, ValidationError, computed_field
|
||||||
|
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
from akkudoktoreos.prediction.pvforecastabc import (
|
||||||
|
PVForecastDataRecord,
|
||||||
|
PVForecastProvider,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AkkudoktorForecastHorizon(PydanticBaseModel):
|
||||||
|
altitude: int
|
||||||
|
azimuthFrom: int
|
||||||
|
azimuthTo: int
|
||||||
|
|
||||||
|
|
||||||
|
class AkkudoktorForecastMeta(PydanticBaseModel):
|
||||||
|
lat: float
|
||||||
|
lon: float
|
||||||
|
power: List[int]
|
||||||
|
azimuth: List[int]
|
||||||
|
tilt: List[int]
|
||||||
|
timezone: str
|
||||||
|
albedo: float
|
||||||
|
past_days: int
|
||||||
|
inverterEfficiency: float
|
||||||
|
powerInverter: List[int]
|
||||||
|
cellCoEff: float
|
||||||
|
range: bool
|
||||||
|
horizont: List[List[AkkudoktorForecastHorizon]]
|
||||||
|
horizontString: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class AkkudoktorForecastValue(PydanticBaseModel):
|
||||||
|
datetime: str
|
||||||
|
dcPower: float
|
||||||
|
power: float
|
||||||
|
sunTilt: float
|
||||||
|
sunAzimuth: float
|
||||||
|
temperature: float
|
||||||
|
relativehumidity_2m: float
|
||||||
|
windspeed_10m: float
|
||||||
|
|
||||||
|
|
||||||
|
class AkkudoktorForecast(PydanticBaseModel):
|
||||||
|
meta: AkkudoktorForecastMeta
|
||||||
|
values: List[List[AkkudoktorForecastValue]]
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastAkkudoktorDataRecord(PVForecastDataRecord):
|
||||||
|
"""Represents a Akkudoktor specific pvforecast data record containing various pvforecast attributes at a specific datetime."""
|
||||||
|
|
||||||
|
pvforecastakkudoktor_ac_power_measured: Optional[float] = Field(
|
||||||
|
default=None, description="Total AC power measured (W)"
|
||||||
|
)
|
||||||
|
pvforecastakkudoktor_wind_speed_10m: Optional[float] = Field(
|
||||||
|
default=None, description="Wind Speed 10m (kmph)"
|
||||||
|
)
|
||||||
|
pvforecastakkudoktor_temp_air: Optional[float] = Field(
|
||||||
|
default=None, description="Temperature (°C)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def pvforecastakkudoktor_ac_power_any(self) -> Optional[float]:
|
||||||
|
"""Returns the AC power.
|
||||||
|
|
||||||
|
If a measured value is available, it returns the measured AC power;
|
||||||
|
otherwise, it returns the forecasted AC power.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: AC power in watts or None if no forecast data is available.
|
||||||
|
"""
|
||||||
|
if self.pvforecastakkudoktor_ac_power_measured is not None:
|
||||||
|
return self.pvforecastakkudoktor_ac_power_measured
|
||||||
|
else:
|
||||||
|
return self.pvforecast_ac_power
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastAkkudoktor(PVForecastProvider):
|
||||||
|
"""Fetch and process PV forecast data from akkudoktor.net.
|
||||||
|
|
||||||
|
PVForecastAkkudoktor is a singleton-based class that retrieves weather forecast data
|
||||||
|
from the PVForecastAkkudoktor API and maps it to `PVForecastDataRecord` fields, applying
|
||||||
|
any necessary scaling or unit corrections. It manages the forecast over a range
|
||||||
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
||||||
|
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
|
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||||
|
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||||
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
|
_request_forecast(): Fetches the forecast from the Akkudoktor API.
|
||||||
|
_update_data(): Processes and updates forecast data from Akkudoktor in PVForecastDataRecord format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[PVForecastAkkudoktorDataRecord] = Field(
|
||||||
|
default_factory=list, description="List of PVForecastAkkudoktorDataRecord records"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the Akkudoktor provider."""
|
||||||
|
return "PVForecastAkkudoktor"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _validate_data(cls, json_str: Union[bytes, Any]) -> AkkudoktorForecast:
|
||||||
|
"""Validate Akkudoktor PV forecast data."""
|
||||||
|
try:
|
||||||
|
akkudoktor_data = AkkudoktorForecast.model_validate_json(json_str)
|
||||||
|
except ValidationError as e:
|
||||||
|
error_msg = ""
|
||||||
|
for error in e.errors():
|
||||||
|
field = " -> ".join(str(x) for x in error["loc"])
|
||||||
|
message = error["msg"]
|
||||||
|
error_type = error["type"]
|
||||||
|
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
||||||
|
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
return akkudoktor_data
|
||||||
|
|
||||||
|
def _url(self) -> str:
|
||||||
|
"""Build akkudoktor.net API request URL."""
|
||||||
|
url = f"https://api.akkudoktor.net/forecast?lat={self.config.latitude}&lon={self.config.longitude}&"
|
||||||
|
planes_peakpower = self.config.pvforecast_planes_peakpower
|
||||||
|
planes_azimuth = self.config.pvforecast_planes_azimuth
|
||||||
|
planes_tilt = self.config.pvforecast_planes_tilt
|
||||||
|
planes_inverter_paco = self.config.pvforecast_planes_inverter_paco
|
||||||
|
planes_userhorizon = self.config.pvforecast_planes_userhorizon
|
||||||
|
for i, plane in enumerate(self.config.pvforecast_planes):
|
||||||
|
url += f"power={int(planes_peakpower[i]*1000)}&"
|
||||||
|
url += f"azimuth={int(planes_azimuth[i])}&"
|
||||||
|
url += f"tilt={int(planes_tilt[i])}&"
|
||||||
|
url += f"powerInverter={int(planes_inverter_paco[i])}&"
|
||||||
|
url += "horizont="
|
||||||
|
for horizon in planes_userhorizon[i]:
|
||||||
|
url += f"{int(horizon)},"
|
||||||
|
url = url[:-1] # remove trailing comma
|
||||||
|
url += "&"
|
||||||
|
url += "past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&"
|
||||||
|
url += f"timezone={self.config.timezone}&"
|
||||||
|
url += "hourly=relativehumidity_2m%2Cwindspeed_10m"
|
||||||
|
logger.debug(f"Akkudoktor URL: {url}")
|
||||||
|
return url
|
||||||
|
|
||||||
|
@cache_in_file(with_ttl="1 hour")
|
||||||
|
def _request_forecast(self) -> AkkudoktorForecast:
|
||||||
|
"""Fetch PV forecast data from Akkudoktor API.
|
||||||
|
|
||||||
|
This method sends a request to Akkudoktor API to retrieve forecast data
|
||||||
|
for a specified date range and location. The response data is parsed and
|
||||||
|
returned as JSON for further processing.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The parsed JSON response from Akkudoktor API containing forecast data.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the API response does not include expected `meta` data.
|
||||||
|
"""
|
||||||
|
response = requests.get(self._url())
|
||||||
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
|
logger.debug(f"Response from {self._url()}: {response}")
|
||||||
|
akkudoktor_data = self._validate_data(response.content)
|
||||||
|
# We are working on fresh data (no cache), report update time
|
||||||
|
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
||||||
|
return akkudoktor_data
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
"""Update forecast data in the PVForecastAkkudoktorDataRecord format.
|
||||||
|
|
||||||
|
Retrieves data from Akkudoktor. The processed data is inserted into the sequence as
|
||||||
|
`PVForecastAkkudoktorDataRecord`.
|
||||||
|
"""
|
||||||
|
# Assure we have something to request PV power for.
|
||||||
|
if len(self.config.pvforecast_planes) == 0:
|
||||||
|
# No planes for PV
|
||||||
|
error_msg = "Requested PV forecast, but no planes configured."
|
||||||
|
logger.error(f"Configuration error: {error_msg}")
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# Get Akkudoktor PV Forecast data for the given configuration.
|
||||||
|
akkudoktor_data = self._request_forecast(force_update=force_update) # type: ignore
|
||||||
|
|
||||||
|
# Timezone of the PV system
|
||||||
|
if self.config.timezone != akkudoktor_data.meta.timezone:
|
||||||
|
error_msg = f"Configured timezone '{self.config.timezone}' does not match Akkudoktor timezone '{akkudoktor_data.meta.timezone}'."
|
||||||
|
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# Assumption that all lists are the same length and are ordered chronologically
|
||||||
|
# in ascending order and have the same timestamps.
|
||||||
|
values_len = len(akkudoktor_data.values[0])
|
||||||
|
if values_len < self.config.prediction_hours:
|
||||||
|
# Expect one value set per prediction hour
|
||||||
|
error_msg = (
|
||||||
|
f"The forecast must cover at least {self.config.prediction_hours} hours, "
|
||||||
|
f"but only {values_len} data sets are given in forecast data."
|
||||||
|
)
|
||||||
|
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
for i in range(values_len):
|
||||||
|
original_datetime = akkudoktor_data.values[0][i].datetime
|
||||||
|
dt = to_datetime(original_datetime, in_timezone=self.config.timezone)
|
||||||
|
|
||||||
|
# iso_datetime = parser.parse(original_datetime).isoformat() # Konvertiere zu ISO-Format
|
||||||
|
# print()
|
||||||
|
# Optional: 2 Stunden abziehen, um die Zeitanpassung zu testen
|
||||||
|
# adjusted_datetime = parser.parse(original_datetime) - timedelta(hours=2)
|
||||||
|
# print(f"Angepasste Zeitstempel: {adjusted_datetime.isoformat()}")
|
||||||
|
|
||||||
|
if compare_datetimes(dt, self.start_datetime).lt:
|
||||||
|
# forecast data is too old
|
||||||
|
continue
|
||||||
|
|
||||||
|
sum_dc_power = sum(values[i].dcPower for values in akkudoktor_data.values)
|
||||||
|
sum_ac_power = sum(values[i].power for values in akkudoktor_data.values)
|
||||||
|
|
||||||
|
record = PVForecastAkkudoktorDataRecord(
|
||||||
|
date_time=dt, # Verwende angepassten Zeitstempel
|
||||||
|
pvforecast_dc_power=sum_dc_power,
|
||||||
|
pvforecast_ac_power=sum_ac_power,
|
||||||
|
pvforecastakkudoktor_wind_speed_10m=akkudoktor_data.values[0][i].windspeed_10m,
|
||||||
|
pvforecastakkudoktor_temp_air=akkudoktor_data.values[0][i].temperature,
|
||||||
|
)
|
||||||
|
self.append(record)
|
||||||
|
|
||||||
|
if len(self) < self.config.prediction_hours:
|
||||||
|
raise ValueError(
|
||||||
|
f"The forecast must cover at least {self.config.prediction_hours} hours, "
|
||||||
|
f"but only {len(self)} hours starting from {self.start_datetime} "
|
||||||
|
f"were predicted."
|
||||||
|
)
|
||||||
|
|
||||||
|
def report_ac_power_and_measurement(self) -> str:
|
||||||
|
"""Report DC/ AC power, and AC power measurement for each forecast hour.
|
||||||
|
|
||||||
|
For each forecast entry, the time, DC power, forecasted AC power, measured AC power
|
||||||
|
(if available), and the value returned by the `get_ac_power` method is provided.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The report.
|
||||||
|
"""
|
||||||
|
rep = ""
|
||||||
|
for record in self.records:
|
||||||
|
date_time = record.date_time
|
||||||
|
dc_pow = round(record.pvforecast_dc_power, 2) if record.pvforecast_dc_power else None
|
||||||
|
ac_pow = round(record.pvforecast_ac_power, 2) if record.pvforecast_ac_power else None
|
||||||
|
ac_pow_measurement = (
|
||||||
|
round(record.pvforecastakkudoktor_ac_power_measured, 2)
|
||||||
|
if record.pvforecastakkudoktor_ac_power_measured
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
ac_pow_any = (
|
||||||
|
round(record.pvforecastakkudoktor_ac_power_any, 2)
|
||||||
|
if record.pvforecastakkudoktor_ac_power_any
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
rep += (
|
||||||
|
f"Date&Time: {date_time}, DC: {dc_pow}, AC: {ac_pow}, "
|
||||||
|
f"AC sampled: {ac_pow_measurement}, AC any: {ac_pow_any}"
|
||||||
|
"\n"
|
||||||
|
)
|
||||||
|
return rep
|
||||||
|
|
||||||
|
|
||||||
|
# Example of how to use the PVForecastAkkudoktor class
|
||||||
|
if __name__ == "__main__":
|
||||||
|
"""Main execution block to demonstrate the use of the PVForecastAkkudoktor class.
|
||||||
|
|
||||||
|
Sets up the forecast configuration fields, fetches PV power forecast data,
|
||||||
|
updates the AC power measurement for the current date/time, and prints
|
||||||
|
the DC and AC power information.
|
||||||
|
"""
|
||||||
|
# Set up the configuration with necessary fields for URL generation
|
||||||
|
settings_data = {
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"prediction_historic_hours": 24,
|
||||||
|
"latitude": 52.52,
|
||||||
|
"longitude": 13.405,
|
||||||
|
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||||
|
"pvforecast0_peakpower": 5.0,
|
||||||
|
"pvforecast0_surface_azimuth": -10,
|
||||||
|
"pvforecast0_surface_tilt": 7,
|
||||||
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
|
"pvforecast0_inverter_paco": 10000,
|
||||||
|
"pvforecast1_peakpower": 4.8,
|
||||||
|
"pvforecast1_surface_azimuth": -90,
|
||||||
|
"pvforecast1_surface_tilt": 7,
|
||||||
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
|
"pvforecast1_inverter_paco": 10000,
|
||||||
|
"pvforecast2_peakpower": 1.4,
|
||||||
|
"pvforecast2_surface_azimuth": -40,
|
||||||
|
"pvforecast2_surface_tilt": 60,
|
||||||
|
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||||
|
"pvforecast2_inverter_paco": 2000,
|
||||||
|
"pvforecast3_peakpower": 1.6,
|
||||||
|
"pvforecast3_surface_azimuth": 5,
|
||||||
|
"pvforecast3_surface_tilt": 45,
|
||||||
|
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||||
|
"pvforecast3_inverter_paco": 1400,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize the forecast object with the generated configuration
|
||||||
|
forecast = PVForecastAkkudoktor()
|
||||||
|
|
||||||
|
# Get an actual forecast
|
||||||
|
forecast.update_data()
|
||||||
|
|
||||||
|
# Update the AC power measurement for a specific date and time
|
||||||
|
forecast.update_value(
|
||||||
|
to_datetime(None, to_maxtime=False), "pvforecastakkudoktor_ac_power_measured", 1000.0
|
||||||
|
)
|
||||||
|
|
||||||
|
# Report the DC and AC power forecast along with AC measurements
|
||||||
|
print(forecast.report_ac_power_and_measurement())
|
69
src/akkudoktoreos/prediction/pvforecastimport.py
Normal file
69
src/akkudoktoreos/prediction/pvforecastimport.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
"""Retrieves pvforecast forecast data from an import file.
|
||||||
|
|
||||||
|
This module provides classes and mappings to manage pvforecast data obtained from
|
||||||
|
an import file, including support for various pvforecast attributes such as temperature,
|
||||||
|
humidity, cloud cover, and solar irradiance. The data is mapped to the `PVForecastDataRecord`
|
||||||
|
format, enabling consistent access to forecasted and historical pvforecast attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
|
||||||
|
from akkudoktoreos.prediction.pvforecastabc import PVForecastProvider
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastImportCommonSettings(SettingsBaseModel):
|
||||||
|
"""Common settings for pvforecast data import from file."""
|
||||||
|
|
||||||
|
pvforecastimport_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import pvforecast data from."
|
||||||
|
)
|
||||||
|
|
||||||
|
pvforecastimport_json: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="JSON string, dictionary of PV forecast float value lists."
|
||||||
|
"Keys are 'pvforecast_dc_power', 'pvforecast_ac_power'.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validators
|
||||||
|
@field_validator("pvforecastimport_file_path", mode="after")
|
||||||
|
@classmethod
|
||||||
|
def validate_pvforecastimport_file_path(
|
||||||
|
cls, value: Optional[Union[str, Path]]
|
||||||
|
) -> Optional[Path]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = Path(value)
|
||||||
|
"""Ensure file is available."""
|
||||||
|
value.resolve()
|
||||||
|
if not value.is_file():
|
||||||
|
raise ValueError(f"Import file path '{value}' is not a file.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastImport(PVForecastProvider, PredictionImportProvider):
|
||||||
|
"""Fetch PV forecast data from import file or JSON string.
|
||||||
|
|
||||||
|
PVForecastImport is a singleton-based class that retrieves pvforecast forecast data
|
||||||
|
from a file or JSON string and maps it to `PVForecastDataRecord` fields. It manages the forecast
|
||||||
|
over a range of hours into the future and retains historical data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the PVForecastImport provider."""
|
||||||
|
return "PVForecastImport"
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
if self.config.pvforecastimport_file_path is not None:
|
||||||
|
self.import_from_file(self.config.pvforecastimport_file_path, key_prefix="pvforecast")
|
||||||
|
if self.config.pvforecastimport_json is not None:
|
||||||
|
self.import_from_json(self.config.pvforecastimport_json, key_prefix="pvforecast")
|
13
src/akkudoktoreos/prediction/weather.py
Normal file
13
src/akkudoktoreos/prediction/weather.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""Weather forecast module for weather predictions."""
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherCommonSettings(SettingsBaseModel):
|
||||||
|
weather_provider: Optional[str] = Field(
|
||||||
|
default="ClearOutside", description="Weather provider id of provider to be used."
|
||||||
|
)
|
198
src/akkudoktoreos/prediction/weatherabc.py
Normal file
198
src/akkudoktoreos/prediction/weatherabc.py
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
"""Abstract and base classes for weather predictions.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Supported weather sources can be expanded by adding new fetch methods within the
|
||||||
|
WeatherForecast class.
|
||||||
|
- Ensure appropriate API keys or configurations are set up if required by external data sources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pvlib
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionProvider, PredictionRecord
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherDataRecord(PredictionRecord):
|
||||||
|
"""Represents a weather data record containing various weather attributes at a specific datetime.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
date_time (Optional[AwareDatetime]): The datetime of the record.
|
||||||
|
total_clouds (Optional[float]): Total cloud cover as a percentage of the sky obscured.
|
||||||
|
low_clouds (Optional[float]): Cloud cover in the lower atmosphere (% sky obscured).
|
||||||
|
medium_clouds (Optional[float]): Cloud cover in the middle atmosphere (% sky obscured).
|
||||||
|
high_clouds (Optional[float]): Cloud cover in the upper atmosphere (% sky obscured).
|
||||||
|
visibility (Optional[float]): Horizontal visibility in meters.
|
||||||
|
fog (Optional[float]): Fog cover percentage.
|
||||||
|
precip_type (Optional[str]): Type of precipitation (e.g., "rain", "snow").
|
||||||
|
precip_prob (Optional[float]): Probability of precipitation as a percentage.
|
||||||
|
precip_amt (Optional[float]): Precipitation amount in millimeters.
|
||||||
|
preciptable_water (Optional[float]): Precipitable water in centimeters.
|
||||||
|
wind_speed (Optional[float]): Wind speed in kilometers per hour.
|
||||||
|
wind_direction (Optional[float]): Wind direction in degrees (0-360°).
|
||||||
|
frost_chance (Optional[str]): Probability of frost.
|
||||||
|
temp_air (Optional[float]): Air temperature in degrees Celsius.
|
||||||
|
feels_like (Optional[float]): Feels-like temperature in degrees Celsius.
|
||||||
|
dew_point (Optional[float]): Dew point in degrees Celsius.
|
||||||
|
relative_humidity (Optional[float]): Relative humidity in percentage.
|
||||||
|
pressure (Optional[float]): Atmospheric pressure in millibars.
|
||||||
|
ozone (Optional[float]): Ozone concentration in Dobson units.
|
||||||
|
ghi (Optional[float]): Global Horizontal Irradiance in watts per square meter (W/m²).
|
||||||
|
dni (Optional[float]): Direct Normal Irradiance in watts per square meter (W/m²).
|
||||||
|
dhi (Optional[float]): Diffuse Horizontal Irradiance in watts per square meter (W/m²).
|
||||||
|
"""
|
||||||
|
|
||||||
|
weather_total_clouds: Optional[float] = Field(
|
||||||
|
default=None, description="Total Clouds (% Sky Obscured)"
|
||||||
|
)
|
||||||
|
weather_low_clouds: Optional[float] = Field(
|
||||||
|
default=None, description="Low Clouds (% Sky Obscured)"
|
||||||
|
)
|
||||||
|
weather_medium_clouds: Optional[float] = Field(
|
||||||
|
None, description="Medium Clouds (% Sky Obscured)"
|
||||||
|
)
|
||||||
|
weather_high_clouds: Optional[float] = Field(
|
||||||
|
default=None, description="High Clouds (% Sky Obscured)"
|
||||||
|
)
|
||||||
|
weather_visibility: Optional[float] = Field(default=None, description="Visibility (m)")
|
||||||
|
weather_fog: Optional[float] = Field(default=None, description="Fog (%)")
|
||||||
|
weather_precip_type: Optional[str] = Field(default=None, description="Precipitation Type")
|
||||||
|
weather_precip_prob: Optional[float] = Field(
|
||||||
|
default=None, description="Precipitation Probability (%)"
|
||||||
|
)
|
||||||
|
weather_precip_amt: Optional[float] = Field(
|
||||||
|
default=None, description="Precipitation Amount (mm)"
|
||||||
|
)
|
||||||
|
weather_preciptable_water: Optional[float] = Field(
|
||||||
|
default=None, description="Precipitable Water (cm)"
|
||||||
|
)
|
||||||
|
weather_wind_speed: Optional[float] = Field(default=None, description="Wind Speed (kmph)")
|
||||||
|
weather_wind_direction: Optional[float] = Field(default=None, description="Wind Direction (°)")
|
||||||
|
weather_frost_chance: Optional[str] = Field(default=None, description="Chance of Frost")
|
||||||
|
weather_temp_air: Optional[float] = Field(default=None, description="Temperature (°C)")
|
||||||
|
weather_feels_like: Optional[float] = Field(default=None, description="Feels Like (°C)")
|
||||||
|
weather_dew_point: Optional[float] = Field(default=None, description="Dew Point (°C)")
|
||||||
|
weather_relative_humidity: Optional[float] = Field(
|
||||||
|
default=None, description="Relative Humidity (%)"
|
||||||
|
)
|
||||||
|
weather_pressure: Optional[float] = Field(default=None, description="Pressure (mb)")
|
||||||
|
weather_ozone: Optional[float] = Field(default=None, description="Ozone (du)")
|
||||||
|
weather_ghi: Optional[float] = Field(
|
||||||
|
default=None, description="Global Horizontal Irradiance (W/m2)"
|
||||||
|
)
|
||||||
|
weather_dni: Optional[float] = Field(
|
||||||
|
default=None, description="Direct Normal Irradiance (W/m2)"
|
||||||
|
)
|
||||||
|
weather_dhi: Optional[float] = Field(
|
||||||
|
default=None, description="Diffuse Horizontal Irradiance (W/m2)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherProvider(PredictionProvider):
|
||||||
|
"""Abstract base class for weather providers.
|
||||||
|
|
||||||
|
WeatherProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
|
Configuration variables:
|
||||||
|
weather_provider (str): Prediction provider for weather.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
|
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
|
calculated based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
|
based on `start_datetime` and `prediction_historic_hours`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[WeatherDataRecord] = Field(
|
||||||
|
default_factory=list, description="List of WeatherDataRecord records"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
return "WeatherProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
return self.provider_id() == self.config.weather_provider
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def estimate_irradiance_from_cloud_cover(
|
||||||
|
cls, lat: float, lon: float, cloud_cover: pd.Series, offset: int = 35
|
||||||
|
) -> tuple:
|
||||||
|
"""Estimates irradiance values (GHI, DNI, DHI) based on cloud cover.
|
||||||
|
|
||||||
|
This method estimates solar irradiance in several steps:
|
||||||
|
1. **Clear Sky GHI Calculation**: Determines the Global Horizontal Irradiance (GHI) under clear sky conditions using the Ineichen model and climatological turbidity data.
|
||||||
|
2. **Cloudy Sky GHI Estimation**: Adjusts the clear sky GHI based on the provided cloud cover percentage to estimate cloudy sky GHI.
|
||||||
|
3. **Direct Normal Irradiance (DNI) Estimation**: Uses the DISC model to estimate the DNI from the adjusted GHI.
|
||||||
|
4. **Diffuse Horizontal Irradiance (DHI) Calculation**: Computes DHI from the estimated GHI and DNI values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
lat (float): Latitude of the location for irradiance estimation.
|
||||||
|
lon (float): Longitude of the location for irradiance estimation.
|
||||||
|
cloud_cover (pd.Series): Series of cloud cover values (0-100%) indexed by datetime.
|
||||||
|
offset (Optional[sint]): Baseline for GHI adjustment as a percentage (default is 35).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: Lists of estimated irradiance values in the order of GHI, DNI, and DHI.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This method is based on the implementation from PVLib and is adapted from
|
||||||
|
https://github.com/davidusb-geek/emhass/blob/master/src/emhass/forecast.py (MIT License).
|
||||||
|
"""
|
||||||
|
# Adjust offset percentage to scaling factor
|
||||||
|
offset_fraction = offset / 100.0
|
||||||
|
|
||||||
|
# Get cloud cover datetimes
|
||||||
|
cloud_cover_times = cloud_cover.index
|
||||||
|
|
||||||
|
# Create a location object
|
||||||
|
location = pvlib.location.Location(latitude=lat, longitude=lon)
|
||||||
|
|
||||||
|
# Get solar position and clear-sky GHI using the Ineichen model
|
||||||
|
solpos = location.get_solarposition(cloud_cover_times)
|
||||||
|
clear_sky = location.get_clearsky(cloud_cover_times, model="ineichen")
|
||||||
|
|
||||||
|
# Convert cloud cover percentage to a scaling factor
|
||||||
|
cloud_cover_fraction = np.array(cloud_cover) / 100.0
|
||||||
|
|
||||||
|
# Calculate adjusted GHI with proportional offset adjustment
|
||||||
|
adjusted_ghi = clear_sky["ghi"] * (
|
||||||
|
offset_fraction + (1 - offset_fraction) * (1 - cloud_cover_fraction)
|
||||||
|
)
|
||||||
|
adjusted_ghi.fillna(0.0, inplace=True)
|
||||||
|
|
||||||
|
# Apply DISC model to estimate Direct Normal Irradiance (DNI) from adjusted GHI
|
||||||
|
disc_output = pvlib.irradiance.disc(adjusted_ghi, solpos["zenith"], cloud_cover_times)
|
||||||
|
adjusted_dni = disc_output["dni"]
|
||||||
|
adjusted_dni.fillna(0.0, inplace=True)
|
||||||
|
|
||||||
|
# Calculate Diffuse Horizontal Irradiance (DHI) as DHI = GHI - DNI * cos(zenith)
|
||||||
|
zenith_rad = np.radians(solpos["zenith"])
|
||||||
|
adjusted_dhi = adjusted_ghi - adjusted_dni * np.cos(zenith_rad)
|
||||||
|
adjusted_dhi.fillna(0.0, inplace=True)
|
||||||
|
|
||||||
|
# Return GHI, DNI, DHI lists
|
||||||
|
ghi = adjusted_ghi.to_list()
|
||||||
|
dni = adjusted_dni.to_list()
|
||||||
|
dhi = adjusted_dhi.to_list()
|
||||||
|
return ghi, dni, dhi
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def estimate_preciptable_water(
|
||||||
|
cls, temperature: pd.Series, relative_humidity: pd.Series
|
||||||
|
) -> pd.Series:
|
||||||
|
return pvlib.atmosphere.gueymard94_pw(temperature, relative_humidity)
|
229
src/akkudoktoreos/prediction/weatherbrightsky.py
Normal file
229
src/akkudoktoreos/prediction/weatherbrightsky.py
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
"""Retrieves and processes weather forecast data from BrightSky.
|
||||||
|
|
||||||
|
This module provides classes and mappings to manage weather data obtained from the
|
||||||
|
BrightSky API, including support for various weather attributes such as temperature,
|
||||||
|
humidity, cloud cover, and solar irradiance. The data is mapped to the `WeatherDataRecord`
|
||||||
|
format, enabling consistent access to forecasted and historical weather attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import pvlib
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from akkudoktoreos.prediction.weatherabc import WeatherDataRecord, WeatherProvider
|
||||||
|
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
WheaterDataBrightSkyMapping: List[Tuple[str, Optional[str], Optional[float]]] = [
|
||||||
|
# brightsky_key, description, corr_factor
|
||||||
|
("timestamp", "DateTime", None),
|
||||||
|
("precipitation", "Precipitation Amount (mm)", 1),
|
||||||
|
("pressure_msl", "Pressure (mb)", 1),
|
||||||
|
("sunshine", None, None),
|
||||||
|
("temperature", "Temperature (°C)", 1),
|
||||||
|
("wind_direction", "Wind Direction (°)", 1),
|
||||||
|
("wind_speed", "Wind Speed (kmph)", 1),
|
||||||
|
("cloud_cover", "Total Clouds (% Sky Obscured)", 1),
|
||||||
|
("dew_point", "Dew Point (°C)", 1),
|
||||||
|
("relative_humidity", "Relative Humidity (%)", 1),
|
||||||
|
("visibility", "Visibility (m)", 1),
|
||||||
|
("wind_gust_direction", None, None),
|
||||||
|
("wind_gust_speed", None, None),
|
||||||
|
("condition", None, None),
|
||||||
|
("precipitation_probability", "Precipitation Probability (%)", 1),
|
||||||
|
("precipitation_probability_6h", None, None),
|
||||||
|
("solar", "Global Horizontal Irradiance (W/m2)", 1000),
|
||||||
|
("fallback_source_ids", None, None),
|
||||||
|
("icon", None, None),
|
||||||
|
]
|
||||||
|
"""Mapping of BrightSky weather data keys to WeatherDataRecord field descriptions.
|
||||||
|
|
||||||
|
Each tuple represents a field in the BrightSky data, with:
|
||||||
|
- The BrightSky field key,
|
||||||
|
- The corresponding `WeatherDataRecord` description, if applicable,
|
||||||
|
- A correction factor for unit or value scaling.
|
||||||
|
Fields without descriptions or correction factors are mapped to `None`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherBrightSky(WeatherProvider):
|
||||||
|
"""Fetch and process weather forecast data from BrightSky.
|
||||||
|
|
||||||
|
WeatherBrightSky is a singleton-based class that retrieves weather forecast data
|
||||||
|
from the BrightSky API and maps it to `WeatherDataRecord` fields, applying
|
||||||
|
any necessary scaling or unit corrections. It manages the forecast over a range
|
||||||
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
||||||
|
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
|
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||||
|
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||||
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
|
_request_forecast(): Fetches the forecast from the BrightSky API.
|
||||||
|
_update_data(): Processes and updates forecast data from BrightSky in WeatherDataRecord format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the BrightSky provider."""
|
||||||
|
return "BrightSky"
|
||||||
|
|
||||||
|
@cache_in_file(with_ttl="1 hour")
|
||||||
|
def _request_forecast(self) -> dict:
|
||||||
|
"""Fetch weather forecast data from BrightSky API.
|
||||||
|
|
||||||
|
This method sends a request to BrightSky's API to retrieve forecast data
|
||||||
|
for a specified date range and location. The response data is parsed and
|
||||||
|
returned as JSON for further processing.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The parsed JSON response from BrightSky API containing forecast data.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the API response does not include expected `weather` data.
|
||||||
|
"""
|
||||||
|
source = "https://api.brightsky.dev"
|
||||||
|
date = to_datetime(self.start_datetime, as_string="%Y-%m-%d")
|
||||||
|
last_date = to_datetime(self.end_datetime, as_string="%Y-%m-%d")
|
||||||
|
response = requests.get(
|
||||||
|
f"{source}/weather?lat={self.config.latitude}&lon={self.config.longitude}&date={date}&last_date={last_date}&tz={self.config.timezone}"
|
||||||
|
)
|
||||||
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
|
logger.debug(f"Response from {source}: {response}")
|
||||||
|
brightsky_data = json.loads(response.content)
|
||||||
|
if "weather" not in brightsky_data:
|
||||||
|
error_msg = f"BrightSky schema change. `wheather`expected to be part of BrightSky data: {brightsky_data}."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
# We are working on fresh data (no cache), report update time
|
||||||
|
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
||||||
|
return brightsky_data
|
||||||
|
|
||||||
|
def _description_to_series(self, description: str) -> pd.Series:
|
||||||
|
"""Retrieve a pandas Series corresponding to a weather data description.
|
||||||
|
|
||||||
|
This method fetches the key associated with the provided description
|
||||||
|
and retrieves the data series mapped to that key. If the description
|
||||||
|
does not correspond to a valid key, a `ValueError` is raised.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
description (str): The description of the WeatherDataRecord to retrieve.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
pd.Series: The data series corresponding to the description.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If no key is found for the provided description.
|
||||||
|
"""
|
||||||
|
key = WeatherDataRecord.key_from_description(description)
|
||||||
|
if key is None:
|
||||||
|
error_msg = f"No WeatherDataRecord key for '{description}'"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
return self.key_to_series(key)
|
||||||
|
|
||||||
|
def _description_from_series(self, description: str, data: pd.Series) -> None:
|
||||||
|
"""Update a weather data with a pandas Series based on its description.
|
||||||
|
|
||||||
|
This method fetches the key associated with the provided description
|
||||||
|
and updates the weather data with the provided data series. If the description
|
||||||
|
does not correspond to a valid key, a `ValueError` is raised.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
description (str): The description of the weather data to update.
|
||||||
|
data (pd.Series): The pandas Series containing the data to update.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If no key is found for the provided description.
|
||||||
|
"""
|
||||||
|
key = WeatherDataRecord.key_from_description(description)
|
||||||
|
if key is None:
|
||||||
|
error_msg = f"No WeatherDataRecord key for '{description}'"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
self.key_from_series(key, data)
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
"""Update forecast data in the WeatherDataRecord format.
|
||||||
|
|
||||||
|
Retrieves data from BrightSky, maps each BrightSky field to the corresponding
|
||||||
|
`WeatherDataRecord` attribute using `WheaterDataBrightSkyMapping`, and applies
|
||||||
|
any necessary scaling. Forecast data such as cloud cover, temperature, and
|
||||||
|
humidity is further processed to estimate solar irradiance and precipitable water.
|
||||||
|
|
||||||
|
The final mapped and processed data is inserted into the sequence as `WeatherDataRecord`.
|
||||||
|
"""
|
||||||
|
# Get BrightSky weather data for the given coordinates
|
||||||
|
brightsky_data = self._request_forecast(force_update=force_update) # type: ignore
|
||||||
|
|
||||||
|
# Get key mapping from description
|
||||||
|
brightsky_key_mapping: Dict[str, Tuple[Optional[str], Optional[float]]] = {}
|
||||||
|
for brightsky_key, description, corr_factor in WheaterDataBrightSkyMapping:
|
||||||
|
if description is None:
|
||||||
|
brightsky_key_mapping[brightsky_key] = (None, None)
|
||||||
|
continue
|
||||||
|
weatherdata_key = WeatherDataRecord.key_from_description(description)
|
||||||
|
if weatherdata_key is None:
|
||||||
|
# Should not happen
|
||||||
|
error_msg = "No WeatherDataRecord key for 'description'"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
brightsky_key_mapping[brightsky_key] = (weatherdata_key, corr_factor)
|
||||||
|
|
||||||
|
for brightsky_record in brightsky_data["weather"]:
|
||||||
|
weather_record = WeatherDataRecord()
|
||||||
|
for brightsky_key, item in brightsky_key_mapping.items():
|
||||||
|
key = item[0]
|
||||||
|
if key is None:
|
||||||
|
continue
|
||||||
|
value = brightsky_record[brightsky_key]
|
||||||
|
corr_factor = item[1]
|
||||||
|
if value and corr_factor:
|
||||||
|
value = value * corr_factor
|
||||||
|
setattr(weather_record, key, value)
|
||||||
|
self.insert_by_datetime(weather_record)
|
||||||
|
|
||||||
|
# Converting the cloud cover into Irradiance (GHI, DNI, DHI)
|
||||||
|
description = "Total Clouds (% Sky Obscured)"
|
||||||
|
cloud_cover = self._description_to_series(description)
|
||||||
|
ghi, dni, dhi = self.estimate_irradiance_from_cloud_cover(
|
||||||
|
self.config.latitude, self.config.longitude, cloud_cover
|
||||||
|
)
|
||||||
|
|
||||||
|
description = "Global Horizontal Irradiance (W/m2)"
|
||||||
|
ghi = pd.Series(data=ghi, index=cloud_cover.index)
|
||||||
|
self._description_from_series(description, ghi)
|
||||||
|
|
||||||
|
description = "Direct Normal Irradiance (W/m2)"
|
||||||
|
dni = pd.Series(data=dni, index=cloud_cover.index)
|
||||||
|
self._description_from_series(description, dni)
|
||||||
|
|
||||||
|
description = "Diffuse Horizontal Irradiance (W/m2)"
|
||||||
|
dhi = pd.Series(data=dhi, index=cloud_cover.index)
|
||||||
|
self._description_from_series(description, dhi)
|
||||||
|
|
||||||
|
# Add Preciptable Water (PWAT) with a PVLib method.
|
||||||
|
description = "Temperature (°C)"
|
||||||
|
temperature = self._description_to_series(description)
|
||||||
|
|
||||||
|
description = "Relative Humidity (%)"
|
||||||
|
humidity = self._description_to_series(description)
|
||||||
|
|
||||||
|
pwat = pd.Series(
|
||||||
|
data=pvlib.atmosphere.gueymard94_pw(temperature, humidity), index=temperature.index
|
||||||
|
)
|
||||||
|
description = "Preciptable Water (cm)"
|
||||||
|
self._description_from_series(description, pwat)
|
342
src/akkudoktoreos/prediction/weatherclearoutside.py
Normal file
342
src/akkudoktoreos/prediction/weatherclearoutside.py
Normal file
@ -0,0 +1,342 @@
|
|||||||
|
"""Weather Forecast.
|
||||||
|
|
||||||
|
This module provides classes and methods to retrieve, manage, and process weather forecast data
|
||||||
|
from various online sources. It includes structured representations of weather data and utilities
|
||||||
|
for fetching forecasts for specific locations and time ranges. By integrating multiple data sources,
|
||||||
|
the module enables flexible access to weather information based on latitude, longitude, and
|
||||||
|
desired time periods.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Supported weather sources can be expanded by adding new fetch methods within the
|
||||||
|
WeatherForecast class.
|
||||||
|
- Ensure appropriate API keys or configurations are set up if required by external data sources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
from akkudoktoreos.prediction.weatherabc import WeatherDataRecord, WeatherProvider
|
||||||
|
from akkudoktoreos.utils.cacheutil import cache_in_file
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration, to_timezone
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
WheaterDataClearOutsideMapping: List[Tuple[str, Optional[str], Optional[float]]] = [
|
||||||
|
# clearoutside_key, description, corr_factor
|
||||||
|
("DateTime", "DateTime", None),
|
||||||
|
("Total Clouds (% Sky Obscured)", "Total Clouds (% Sky Obscured)", 1),
|
||||||
|
("Low Clouds (% Sky Obscured)", "Low Clouds (% Sky Obscured)", 1),
|
||||||
|
("Medium Clouds (% Sky Obscured)", "Medium Clouds (% Sky Obscured)", 1),
|
||||||
|
("High Clouds (% Sky Obscured)", "High Clouds (% Sky Obscured)", 1),
|
||||||
|
("ISS Passover", None, None),
|
||||||
|
("Visibility (miles)", "Visibility (m)", 1609.34),
|
||||||
|
("Fog (%)", "Fog (%)", 1),
|
||||||
|
("Precipitation Type", "Precipitation Type", None),
|
||||||
|
("Precipitation Probability (%)", "Precipitation Probability (%)", 1),
|
||||||
|
("Precipitation Amount (mm)", "Precipitation Amount (mm)", 1),
|
||||||
|
("Wind Speed (mph)", "Wind Speed (kmph)", 1.60934),
|
||||||
|
("Chance of Frost", "Chance of Frost", None),
|
||||||
|
("Temperature (°C)", "Temperature (°C)", 1),
|
||||||
|
("Feels Like (°C)", "Feels Like (°C)", 1),
|
||||||
|
("Dew Point (°C)", "Dew Point (°C)", 1),
|
||||||
|
("Relative Humidity (%)", "Relative Humidity (%)", 1),
|
||||||
|
("Pressure (mb)", "Pressure (mb)", 1),
|
||||||
|
("Ozone (du)", "Ozone (du)", 1),
|
||||||
|
# Extra extraction
|
||||||
|
("Wind Direction (°)", "Wind Direction (°)", 1),
|
||||||
|
# Generated from above
|
||||||
|
("Preciptable Water (cm)", "Preciptable Water (cm)", 1),
|
||||||
|
("Global Horizontal Irradiance (W/m2)", "Global Horizontal Irradiance (W/m2)", 1),
|
||||||
|
("Direct Normal Irradiance (W/m2)", "Direct Normal Irradiance (W/m2)", 1),
|
||||||
|
("Diffuse Horizontal Irradiance (W/m2)", "Diffuse Horizontal Irradiance (W/m2)", 1),
|
||||||
|
]
|
||||||
|
"""Mapping of ClearOutside weather data keys to WeatherDataRecord field description.
|
||||||
|
|
||||||
|
A list of tuples: (ClearOutside key, field description, correction factor).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherClearOutside(WeatherProvider):
|
||||||
|
"""Retrieves and processes weather forecast data from ClearOutside.
|
||||||
|
|
||||||
|
WeatherClearOutside is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
|
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
|
calculated based on `start_datetime` and `prediction_hours`.
|
||||||
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
|
based on `start_datetime` and `prediction_historic_hours`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
return "ClearOutside"
|
||||||
|
|
||||||
|
@cache_in_file(with_ttl="1 hour")
|
||||||
|
def _request_forecast(self) -> requests.Response:
|
||||||
|
"""Requests weather forecast from ClearOutside.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
response: Weather forecast request reponse from ClearOutside.
|
||||||
|
"""
|
||||||
|
source = "https://clearoutside.com/forecast"
|
||||||
|
latitude = round(self.config.latitude, 2)
|
||||||
|
longitude = round(self.config.longitude, 2)
|
||||||
|
response = requests.get(f"{source}/{latitude}/{longitude}?desktop=true")
|
||||||
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
|
logger.debug(f"Response from {source}: {response}")
|
||||||
|
# We are working on fresh data (no cache), report update time
|
||||||
|
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = None) -> None:
|
||||||
|
"""Scrape weather forecast data from ClearOutside's website.
|
||||||
|
|
||||||
|
This method requests weather forecast data from ClearOutside based on latitude
|
||||||
|
and longitude, then processes and structures this data for further use in analysis.
|
||||||
|
|
||||||
|
The forecast data includes a variety of weather parameters such as cloud cover, temperature,
|
||||||
|
humidity, visibility, precipitation, wind speed, and additional irradiance values
|
||||||
|
calculated using the cloud cover data.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the HTML structure of ClearOutside's website changes, causing
|
||||||
|
extraction issues with forecast dates, timezone, or expected data sections.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
- The function partly builds on code from https://github.com/davidusb-geek/emhass/blob/master/src/emhass/forecast.py (MIT License).
|
||||||
|
- Uses `pvlib` to estimate irradiance (GHI, DNI, DHI) based on cloud cover data.
|
||||||
|
|
||||||
|
Workflow:
|
||||||
|
1. **Retrieve Web Content**: Uses a helper method to fetch or retrieve cached ClearOutside HTML content.
|
||||||
|
2. **Extract Forecast Date and Timezone**:
|
||||||
|
- Parses the forecast's start and end dates and the UTC offset from the "Generated" header.
|
||||||
|
3. **Extract Weather Data**:
|
||||||
|
- For each day in the 7-day forecast, the function finds detailed weather parameters
|
||||||
|
and associates values for each hour.
|
||||||
|
- Parameters include cloud cover, temperature, humidity, visibility, and precipitation type, among others.
|
||||||
|
4. **Irradiance Calculation**:
|
||||||
|
- Calculates irradiance (GHI, DNI, DHI) values using cloud cover data and the `pvlib` library.
|
||||||
|
5. **Store Data**:
|
||||||
|
- Combines all hourly data into `WeatherDataRecord` objects, with keys
|
||||||
|
standardized according to `WeatherDataRecord` attributes.
|
||||||
|
"""
|
||||||
|
# Get ClearOutside web content - either from site or cached
|
||||||
|
response = self._request_forecast(force_update=force_update) # type: ignore
|
||||||
|
|
||||||
|
# Scrape the data
|
||||||
|
soup = BeautifulSoup(response.content, "html.parser")
|
||||||
|
|
||||||
|
# Find generation data
|
||||||
|
p_generated = soup.find("h2", string=lambda text: text and text.startswith("Generated:"))
|
||||||
|
if not p_generated:
|
||||||
|
error_msg = f"Clearoutside schema change. Could not get '<h2>Generated:', got {p_generated} from {str(response.content)}."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
# Extract forecast start and end dates
|
||||||
|
forecast_pattern = r"Forecast: (\d{2}/\d{2}/\d{2}) to (\d{2}/\d{2}/\d{2})"
|
||||||
|
forecast_match = re.search(forecast_pattern, p_generated.get_text())
|
||||||
|
if forecast_match:
|
||||||
|
forecast_start_date = forecast_match.group(1)
|
||||||
|
forecast_end_date = forecast_match.group(2)
|
||||||
|
else:
|
||||||
|
error_msg = f"Clearoutside schema change. Could not extract forecast start and end dates from {p_generated}."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# Extract timezone offset
|
||||||
|
timezone_pattern = r"Timezone: UTC([+-]\d+)\.(\d+)"
|
||||||
|
timezone_match = re.search(timezone_pattern, p_generated.get_text())
|
||||||
|
if timezone_match:
|
||||||
|
hours = int(timezone_match.group(1))
|
||||||
|
# Convert the decimal part to minutes (e.g., .50 -> 30 minutes)
|
||||||
|
minutes = int(timezone_match.group(2)) * 6 # Multiply by 6 to convert to minutes
|
||||||
|
|
||||||
|
# Create the timezone object using offset
|
||||||
|
utc_offset = float(hours) + float(minutes) / 60.0
|
||||||
|
forecast_timezone = to_timezone(utc_offset=utc_offset)
|
||||||
|
else:
|
||||||
|
error_msg = "Clearoutside schema change. Could not extract forecast timezone."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
forecast_start_datetime = to_datetime(
|
||||||
|
forecast_start_date, in_timezone=forecast_timezone, to_maxtime=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get key mapping from description
|
||||||
|
clearoutside_key_mapping: Dict[str, Tuple[Optional[str], Optional[float]]] = {}
|
||||||
|
for clearoutside_key, description, corr_factor in WheaterDataClearOutsideMapping:
|
||||||
|
if description is None:
|
||||||
|
clearoutside_key_mapping[clearoutside_key] = (None, None)
|
||||||
|
continue
|
||||||
|
weatherdata_key = WeatherDataRecord.key_from_description(description)
|
||||||
|
if weatherdata_key is None:
|
||||||
|
# Should not happen
|
||||||
|
error_msg = f"No WeatherDataRecord key for '{description}'"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
clearoutside_key_mapping[clearoutside_key] = (weatherdata_key, corr_factor)
|
||||||
|
|
||||||
|
# Find all paragraphs with id 'day_<x>'. There should be seven.
|
||||||
|
p_days = soup.find_all(id=re.compile(r"day_[0-9]"))
|
||||||
|
if len(p_days) != 7:
|
||||||
|
error_msg = f"Clearoutside schema change. Found {len(p_days)} day tables, expected 7."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# Delete all records that will be newly added
|
||||||
|
self.delete_by_datetime(start_datetime=forecast_start_datetime)
|
||||||
|
|
||||||
|
# Collect weather data, loop over all days
|
||||||
|
for day, p_day in enumerate(p_days):
|
||||||
|
# Within day_x paragraph find the details labels
|
||||||
|
p_detail_labels = p_day.find_all(class_="fc_detail_label")
|
||||||
|
detail_names = [p.get_text() for p in p_detail_labels]
|
||||||
|
|
||||||
|
# Check for schema changes
|
||||||
|
if len(detail_names) < 18:
|
||||||
|
error_msg = f"Clearoutside schema change. Unexpected number ({len(detail_names)}) of `fc_detail_label`."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
for detail_name in detail_names:
|
||||||
|
if detail_name not in clearoutside_key_mapping:
|
||||||
|
warning_msg = (
|
||||||
|
f"Clearoutside schema change. Unexpected detail name {detail_name}."
|
||||||
|
)
|
||||||
|
logger.warning(warning_msg)
|
||||||
|
|
||||||
|
# Find all the paragraphs that are associated to the details.
|
||||||
|
# Beware there is one ul paragraph before that is not associated to a detail
|
||||||
|
p_detail_tables = p_day.find_all("ul")
|
||||||
|
if len(p_detail_tables) != len(detail_names) + 1:
|
||||||
|
error_msg = f"Clearoutside schema change. Unexpected number ({p_detail_tables}) of `ul` for details {len(detail_names)}. Should be one extra only."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
p_detail_tables.pop(0)
|
||||||
|
|
||||||
|
# Create clearout data
|
||||||
|
clearout_data = {}
|
||||||
|
# Replace some detail names that we use differently
|
||||||
|
detail_names = [
|
||||||
|
s.replace("Wind Speed/Direction (mph)", "Wind Speed (mph)") for s in detail_names
|
||||||
|
]
|
||||||
|
# Number of detail values. On last day may be less than 24.
|
||||||
|
detail_values_count = None
|
||||||
|
# Add data values
|
||||||
|
scrape_detail_names = detail_names.copy() # do not change list during iteration!
|
||||||
|
for i, detail_name in enumerate(scrape_detail_names):
|
||||||
|
p_detail_values = p_detail_tables[i].find_all("li")
|
||||||
|
|
||||||
|
# Assure the number of values fits
|
||||||
|
p_detail_values_count = len(p_detail_values)
|
||||||
|
if (day == 6 and p_detail_values_count > 24) or (
|
||||||
|
day < 6 and p_detail_values_count != 24
|
||||||
|
):
|
||||||
|
error_msg = f"Clearoutside schema change. Unexpected number ({p_detail_values_count}) of `li` for detail `{detail_name}` data. Should be 24 or less on day 7. Table is `{p_detail_tables[i]}`."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
if detail_values_count is None:
|
||||||
|
# Remember detail values count only once
|
||||||
|
detail_values_count = p_detail_values_count
|
||||||
|
if p_detail_values_count != detail_values_count:
|
||||||
|
# Value count for details differ.
|
||||||
|
error_msg = f"Clearoutside schema change. Number ({p_detail_values_count}) of `li` for detail `{detail_name}` data is different than last one {detail_values_count}. Table is `{p_detail_tables[i]}`."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# Scrape the detail values
|
||||||
|
detail_data = []
|
||||||
|
extra_detail_name = None
|
||||||
|
extra_detail_data = []
|
||||||
|
for p_detail_value in p_detail_values:
|
||||||
|
if detail_name == "Wind Speed (mph)":
|
||||||
|
# Get the usual value
|
||||||
|
value_str = p_detail_value.get_text()
|
||||||
|
# Also extract extra data
|
||||||
|
extra_detail_name = "Wind Direction (°)"
|
||||||
|
extra_value = None
|
||||||
|
match = re.search(r"(\d+)°", str(p_detail_value))
|
||||||
|
if match:
|
||||||
|
extra_value = float(match.group(1))
|
||||||
|
else:
|
||||||
|
error_msg = f"Clearoutside schema change. Can't extract direction angle from `{p_detail_value}` for detail `{extra_detail_name}`. Table is `{p_detail_tables[i]}`."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
extra_detail_data.append(extra_value)
|
||||||
|
elif (
|
||||||
|
detail_name in ("Precipitation Type", "Chance of Frost")
|
||||||
|
and hasattr(p_detail_value, "title")
|
||||||
|
and p_detail_value.title
|
||||||
|
):
|
||||||
|
value_str = p_detail_value.title.string
|
||||||
|
else:
|
||||||
|
value_str = p_detail_value.get_text()
|
||||||
|
try:
|
||||||
|
value = float(value_str)
|
||||||
|
except ValueError:
|
||||||
|
value = value_str
|
||||||
|
detail_data.append(value)
|
||||||
|
clearout_data[detail_name] = detail_data
|
||||||
|
if extra_detail_name:
|
||||||
|
if extra_detail_name not in detail_names:
|
||||||
|
detail_names.append(extra_detail_name)
|
||||||
|
clearout_data[extra_detail_name] = extra_detail_data
|
||||||
|
logger.debug(f"Added extra data {extra_detail_name} with {extra_detail_data}")
|
||||||
|
|
||||||
|
# Add datetimes of the scrapped data
|
||||||
|
clearout_data["DateTime"] = [
|
||||||
|
forecast_start_datetime + to_duration(f"{day} days {i} hours")
|
||||||
|
for i in range(0, detail_values_count) # type: ignore[arg-type]
|
||||||
|
]
|
||||||
|
detail_names.append("DateTime")
|
||||||
|
|
||||||
|
# Converting the cloud cover into Irradiance (GHI, DNI, DHI)
|
||||||
|
cloud_cover = pd.Series(
|
||||||
|
data=clearout_data["Total Clouds (% Sky Obscured)"], index=clearout_data["DateTime"]
|
||||||
|
)
|
||||||
|
ghi, dni, dhi = self.estimate_irradiance_from_cloud_cover(
|
||||||
|
self.config.latitude, self.config.longitude, cloud_cover
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add GHI, DNI, DHI to clearout data
|
||||||
|
clearout_data["Global Horizontal Irradiance (W/m2)"] = ghi
|
||||||
|
detail_names.append("Global Horizontal Irradiance (W/m2)")
|
||||||
|
clearout_data["Direct Normal Irradiance (W/m2)"] = dni
|
||||||
|
detail_names.append("Direct Normal Irradiance (W/m2)")
|
||||||
|
clearout_data["Diffuse Horizontal Irradiance (W/m2)"] = dhi
|
||||||
|
detail_names.append("Diffuse Horizontal Irradiance (W/m2)")
|
||||||
|
|
||||||
|
# Add Preciptable Water (PWAT) with a PVLib method.
|
||||||
|
clearout_data["Preciptable Water (cm)"] = self.estimate_preciptable_water(
|
||||||
|
pd.Series(data=clearout_data["Temperature (°C)"]),
|
||||||
|
pd.Series(data=clearout_data["Relative Humidity (%)"]),
|
||||||
|
).to_list()
|
||||||
|
detail_names.append("Preciptable Water (cm)")
|
||||||
|
|
||||||
|
# Add weather data
|
||||||
|
# Add the records from clearout
|
||||||
|
for row_index in range(0, len(clearout_data["DateTime"])):
|
||||||
|
weather_record = WeatherDataRecord()
|
||||||
|
for detail_name in detail_names:
|
||||||
|
key = clearoutside_key_mapping[detail_name][0]
|
||||||
|
if key is None:
|
||||||
|
continue
|
||||||
|
if detail_name in clearout_data:
|
||||||
|
value = clearout_data[detail_name][row_index]
|
||||||
|
corr_factor = clearoutside_key_mapping[detail_name][1]
|
||||||
|
if corr_factor:
|
||||||
|
value = value * corr_factor
|
||||||
|
setattr(weather_record, key, value)
|
||||||
|
self.insert_by_datetime(weather_record)
|
65
src/akkudoktoreos/prediction/weatherimport.py
Normal file
65
src/akkudoktoreos/prediction/weatherimport.py
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
"""Retrieves weather forecast data from an import file.
|
||||||
|
|
||||||
|
This module provides classes and mappings to manage weather data obtained from
|
||||||
|
an import file, including support for various weather attributes such as temperature,
|
||||||
|
humidity, cloud cover, and solar irradiance. The data is mapped to the `WeatherDataRecord`
|
||||||
|
format, enabling consistent access to forecasted and historical weather attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.predictionabc import PredictionImportProvider
|
||||||
|
from akkudoktoreos.prediction.weatherabc import WeatherProvider
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherImportCommonSettings(SettingsBaseModel):
|
||||||
|
"""Common settings for weather data import from file."""
|
||||||
|
|
||||||
|
weatherimport_file_path: Optional[Union[str, Path]] = Field(
|
||||||
|
default=None, description="Path to the file to import weather data from."
|
||||||
|
)
|
||||||
|
|
||||||
|
weatherimport_json: Optional[str] = Field(
|
||||||
|
default=None, description="JSON string, dictionary of weather forecast value lists."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validators
|
||||||
|
@field_validator("weatherimport_file_path", mode="after")
|
||||||
|
@classmethod
|
||||||
|
def validate_weatherimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = Path(value)
|
||||||
|
"""Ensure file is available."""
|
||||||
|
value.resolve()
|
||||||
|
if not value.is_file():
|
||||||
|
raise ValueError(f"Import file path '{value}' is not a file.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherImport(WeatherProvider, PredictionImportProvider):
|
||||||
|
"""Fetch weather forecast data from import file or JSON string.
|
||||||
|
|
||||||
|
WeatherImport is a singleton-based class that retrieves weather forecast data
|
||||||
|
from a file or JSON string and maps it to `WeatherDataRecord` fields. It manages the forecast
|
||||||
|
over a range of hours into the future and retains historical data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the WeatherImport provider."""
|
||||||
|
return "WeatherImport"
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
if self.config.weatherimport_file_path is not None:
|
||||||
|
self.import_from_file(self.config.weatherimport_file_path, key_prefix="weather")
|
||||||
|
if self.config.weatherimport_json is not None:
|
||||||
|
self.import_from_json(self.config.weatherimport_json, key_prefix="weather")
|
@ -1,38 +1,54 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os
|
import subprocess
|
||||||
from datetime import datetime
|
import sys
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Annotated, Any, Dict, List, Optional
|
from typing import Annotated, Any, AsyncGenerator, Dict, List, Optional, Union
|
||||||
|
|
||||||
import matplotlib
|
|
||||||
import uvicorn
|
|
||||||
from fastapi.exceptions import HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
# Sets the Matplotlib backend to 'Agg' for rendering plots in environments without a display
|
|
||||||
matplotlib.use("Agg")
|
|
||||||
|
|
||||||
|
import httpx
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from fastapi import FastAPI, Query
|
import uvicorn
|
||||||
from fastapi.responses import FileResponse, RedirectResponse
|
from fastapi import FastAPI, Query, Request
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
from fastapi.responses import FileResponse, RedirectResponse, Response
|
||||||
|
from pendulum import DateTime
|
||||||
|
|
||||||
from akkudoktoreos.config import (
|
from akkudoktoreos.config.config import ConfigEOS, SettingsEOS, get_config
|
||||||
SetupIncomplete,
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
get_start_enddate,
|
|
||||||
get_working_dir,
|
|
||||||
load_config,
|
|
||||||
)
|
|
||||||
from akkudoktoreos.optimization.genetic import (
|
from akkudoktoreos.optimization.genetic import (
|
||||||
OptimizationParameters,
|
OptimizationParameters,
|
||||||
OptimizeResponse,
|
OptimizeResponse,
|
||||||
optimization_problem,
|
optimization_problem,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Still to be adapted
|
||||||
from akkudoktoreos.prediction.load_container import Gesamtlast
|
from akkudoktoreos.prediction.load_container import Gesamtlast
|
||||||
from akkudoktoreos.prediction.load_corrector import LoadPredictionAdjuster
|
from akkudoktoreos.prediction.load_corrector import LoadPredictionAdjuster
|
||||||
from akkudoktoreos.prediction.load_forecast import LoadForecast
|
from akkudoktoreos.prediction.load_forecast import LoadForecast
|
||||||
from akkudoktoreos.prediction.price_forecast import HourlyElectricityPriceForecast
|
from akkudoktoreos.prediction.prediction import get_prediction
|
||||||
from akkudoktoreos.prediction.pv_forecast import ForecastResponse, PVForecast
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
config_eos = get_config()
|
||||||
|
prediction_eos = get_prediction()
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||||
|
"""Lifespan manager for the app."""
|
||||||
|
# On startup
|
||||||
|
if config_eos.server_fasthtml_host and config_eos.server_fasthtml_port:
|
||||||
|
try:
|
||||||
|
fasthtml_process = start_fasthtml_server()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to start FastHTML server. Error: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
# Handover to application
|
||||||
|
yield
|
||||||
|
# On shutdown
|
||||||
|
# nothing to do
|
||||||
|
|
||||||
|
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
title="Akkudoktor-EOS",
|
title="Akkudoktor-EOS",
|
||||||
@ -43,12 +59,12 @@ app = FastAPI(
|
|||||||
"name": "Apache 2.0",
|
"name": "Apache 2.0",
|
||||||
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
|
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
|
||||||
},
|
},
|
||||||
|
lifespan=lifespan,
|
||||||
)
|
)
|
||||||
|
|
||||||
working_dir = get_working_dir()
|
# That's the problem
|
||||||
# copy config to working directory. Make this a CLI option later
|
opt_class = optimization_problem()
|
||||||
config = load_config(working_dir, True)
|
|
||||||
opt_class = optimization_problem(config)
|
|
||||||
server_dir = Path(__file__).parent.resolve()
|
server_dir = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
|
|
||||||
@ -56,22 +72,44 @@ class PdfResponse(FileResponse):
|
|||||||
media_type = "application/pdf"
|
media_type = "application/pdf"
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/config")
|
||||||
|
def fastapi_config_get() -> ConfigEOS:
|
||||||
|
"""Get the current configuration."""
|
||||||
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
|
@app.put("/config")
|
||||||
|
def fastapi_config_put(settings: SettingsEOS) -> ConfigEOS:
|
||||||
|
"""Merge settings into current configuration."""
|
||||||
|
config_eos.merge_settings(settings)
|
||||||
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/prediction/keys")
|
||||||
|
def fastapi_prediction_keys() -> list[str]:
|
||||||
|
"""Get a list of available prediction keys."""
|
||||||
|
return sorted(list(prediction_eos.keys()))
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/prediction")
|
||||||
|
def fastapi_prediction(key: str) -> list[Union[float | str]]:
|
||||||
|
"""Get the current configuration."""
|
||||||
|
values = prediction_eos[key].to_list()
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
@app.get("/strompreis")
|
@app.get("/strompreis")
|
||||||
def fastapi_strompreis() -> list[float]:
|
def fastapi_strompreis() -> list[float]:
|
||||||
# Get the current date and the end date based on prediction hours
|
# Get the current date and the end date based on prediction hours
|
||||||
date_now, date = get_start_enddate(config.eos.prediction_hours, startdate=datetime.now().date())
|
marketprice_series = prediction_eos["elecprice_marketprice"]
|
||||||
price_forecast = HourlyElectricityPriceForecast(
|
# Fetch prices for the specified date range
|
||||||
source=f"https://api.akkudoktor.net/prices?start={date_now}&end={date}",
|
specific_date_prices = marketprice_series.loc[
|
||||||
config=config,
|
prediction_eos.start_datetime : prediction_eos.end_datetime
|
||||||
use_cache=False,
|
]
|
||||||
)
|
|
||||||
specific_date_prices = price_forecast.get_price_for_daterange(
|
|
||||||
date_now, date
|
|
||||||
) # Fetch prices for the specified date range
|
|
||||||
return specific_date_prices.tolist()
|
return specific_date_prices.tolist()
|
||||||
|
|
||||||
|
|
||||||
class GesamtlastRequest(BaseModel):
|
class GesamtlastRequest(PydanticBaseModel):
|
||||||
year_energy: float
|
year_energy: float
|
||||||
measured_data: List[Dict[str, Any]]
|
measured_data: List[Dict[str, Any]]
|
||||||
hours: int
|
hours: int
|
||||||
@ -134,26 +172,19 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
|||||||
|
|
||||||
@app.get("/gesamtlast_simple")
|
@app.get("/gesamtlast_simple")
|
||||||
def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
||||||
date_now, date = get_start_enddate(
|
|
||||||
config.eos.prediction_hours, startdate=datetime.now().date()
|
|
||||||
) # Get the current date and prediction end date
|
|
||||||
|
|
||||||
###############
|
###############
|
||||||
# Load Forecast
|
# Load Forecast
|
||||||
###############
|
###############
|
||||||
lf = LoadForecast(
|
lf = LoadForecast(
|
||||||
filepath=server_dir / ".." / "data" / "load_profiles.npz", year_energy=year_energy
|
filepath=server_dir / ".." / "data" / "load_profiles.npz", year_energy=year_energy
|
||||||
) # Instantiate LoadForecast with specified parameters
|
) # Instantiate LoadForecast with specified parameters
|
||||||
leistung_haushalt = lf.get_stats_for_date_range(date_now, date)[
|
leistung_haushalt = lf.get_stats_for_date_range(
|
||||||
0
|
prediction_eos.start_datetime, prediction_eos.end_datetime
|
||||||
] # Get expected household load for the date range
|
)[0] # Get expected household load for the date range
|
||||||
|
|
||||||
gesamtlast = Gesamtlast(
|
prediction_hours = config_eos.prediction_hours if config_eos.prediction_hours else 48
|
||||||
prediction_hours=config.eos.prediction_hours
|
gesamtlast = Gesamtlast(prediction_hours=prediction_hours) # Create Gesamtlast instance
|
||||||
) # Create Gesamtlast instance
|
gesamtlast.hinzufuegen("Haushalt", leistung_haushalt) # Add household to total load calculation
|
||||||
gesamtlast.hinzufuegen(
|
|
||||||
"Haushalt", leistung_haushalt
|
|
||||||
) # Add household load to total load calculation
|
|
||||||
|
|
||||||
# ###############
|
# ###############
|
||||||
# # WP (Heat Pump)
|
# # WP (Heat Pump)
|
||||||
@ -165,27 +196,31 @@ def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
|||||||
return last.tolist() # Return total load as JSON
|
return last.tolist() # Return total load as JSON
|
||||||
|
|
||||||
|
|
||||||
@app.get("/pvforecast")
|
class ForecastResponse(PydanticBaseModel):
|
||||||
def fastapi_pvprognose(url: str, ac_power_measurement: Optional[float] = None) -> ForecastResponse:
|
temperature: list[float]
|
||||||
date_now, date = get_start_enddate(config.eos.prediction_hours, startdate=datetime.now().date())
|
pvpower: list[float]
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/pvforecast")
|
||||||
|
def fastapi_pvprognose(ac_power_measurement: Optional[float] = None) -> ForecastResponse:
|
||||||
###############
|
###############
|
||||||
# PV Forecast
|
# PV Forecast
|
||||||
###############
|
###############
|
||||||
PVforecast = PVForecast(
|
pvforecast_ac_power = prediction_eos["pvforecast_ac_power"]
|
||||||
prediction_hours=config.eos.prediction_hours, url=url
|
# Fetch prices for the specified date range
|
||||||
) # Instantiate PVForecast with given parameters
|
pvforecast_ac_power = pvforecast_ac_power.loc[
|
||||||
if ac_power_measurement is not None:
|
prediction_eos.start_datetime : prediction_eos.end_datetime
|
||||||
PVforecast.update_ac_power_measurement(
|
]
|
||||||
date_time=datetime.now(),
|
pvforecastakkudoktor_temp_air = prediction_eos["pvforecastakkudoktor_temp_air"]
|
||||||
ac_power_measurement=ac_power_measurement,
|
# Fetch prices for the specified date range
|
||||||
) # Update measurement
|
pvforecastakkudoktor_temp_air = pvforecastakkudoktor_temp_air.loc[
|
||||||
|
prediction_eos.start_datetime : prediction_eos.end_datetime
|
||||||
|
]
|
||||||
|
|
||||||
# Get PV forecast and temperature forecast for the specified date range
|
# Return both forecasts as a JSON response
|
||||||
pv_forecast = PVforecast.get_pv_forecast_for_date_range(date_now, date)
|
return ForecastResponse(
|
||||||
temperature_forecast = PVforecast.get_temperature_for_date_range(date_now, date)
|
temperature=pvforecastakkudoktor_temp_air.tolist(), pvpower=pvforecast_ac_power.tolist()
|
||||||
|
)
|
||||||
return ForecastResponse(temperature=temperature_forecast.tolist(), pvpower=pv_forecast.tolist())
|
|
||||||
|
|
||||||
|
|
||||||
@app.post("/optimize")
|
@app.post("/optimize")
|
||||||
@ -196,7 +231,11 @@ def fastapi_optimize(
|
|||||||
] = None,
|
] = None,
|
||||||
) -> OptimizeResponse:
|
) -> OptimizeResponse:
|
||||||
if start_hour is None:
|
if start_hour is None:
|
||||||
start_hour = datetime.now().hour
|
start_hour = DateTime.now().hour
|
||||||
|
|
||||||
|
# TODO: Remove when config and prediction update is done by EMS.
|
||||||
|
config_eos.update()
|
||||||
|
prediction_eos.update_data()
|
||||||
|
|
||||||
# Perform optimization simulation
|
# Perform optimization simulation
|
||||||
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
|
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
|
||||||
@ -207,9 +246,9 @@ def fastapi_optimize(
|
|||||||
@app.get("/visualization_results.pdf", response_class=PdfResponse)
|
@app.get("/visualization_results.pdf", response_class=PdfResponse)
|
||||||
def get_pdf() -> PdfResponse:
|
def get_pdf() -> PdfResponse:
|
||||||
# Endpoint to serve the generated PDF with visualization results
|
# Endpoint to serve the generated PDF with visualization results
|
||||||
output_path = config.working_dir / config.directories.output
|
output_path = config_eos.data_output_path
|
||||||
if not output_path.is_dir():
|
if not output_path.is_dir():
|
||||||
raise SetupIncomplete(f"Output path does not exist: {output_path}.")
|
raise ValueError(f"Output path does not exist: {output_path}.")
|
||||||
file_path = output_path / "visualization_results.pdf"
|
file_path = output_path / "visualization_results.pdf"
|
||||||
if not file_path.is_file():
|
if not file_path.is_file():
|
||||||
raise HTTPException(status_code=404, detail="No visualization result available.")
|
raise HTTPException(status_code=404, detail="No visualization result available.")
|
||||||
@ -221,29 +260,85 @@ def site_map() -> RedirectResponse:
|
|||||||
return RedirectResponse(url="/docs")
|
return RedirectResponse(url="/docs")
|
||||||
|
|
||||||
|
|
||||||
@app.get("/", include_in_schema=False)
|
# Keep the proxy last to handle all requests that are not taken by the Rest API.
|
||||||
def root() -> RedirectResponse:
|
# Also keep the single endpoints for delete, get, post, put to assure openapi.json is always build
|
||||||
# Redirect the root URL to the site map
|
# the same way for testing.
|
||||||
return RedirectResponse(url="/docs")
|
|
||||||
|
|
||||||
|
@app.delete("/{path:path}")
|
||||||
|
async def proxy_delete(request: Request, path: str) -> Response:
|
||||||
|
return await proxy(request, path)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/{path:path}")
|
||||||
|
async def proxy_get(request: Request, path: str) -> Response:
|
||||||
|
return await proxy(request, path)
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/{path:path}")
|
||||||
|
async def proxy_post(request: Request, path: str) -> Response:
|
||||||
|
return await proxy(request, path)
|
||||||
|
|
||||||
|
|
||||||
|
@app.put("/{path:path}")
|
||||||
|
async def proxy_put(request: Request, path: str) -> Response:
|
||||||
|
return await proxy(request, path)
|
||||||
|
|
||||||
|
|
||||||
|
async def proxy(request: Request, path: str) -> Union[Response | RedirectResponse]:
|
||||||
|
if config_eos.server_fasthtml_host and config_eos.server_fasthtml_port:
|
||||||
|
# Proxy to fasthtml server
|
||||||
|
url = f"http://{config_eos.server_fasthtml_host}:{config_eos.server_fasthtml_port}/{path}"
|
||||||
|
headers = dict(request.headers)
|
||||||
|
|
||||||
|
data = await request.body()
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
if request.method == "GET":
|
||||||
|
response = await client.get(url, headers=headers)
|
||||||
|
elif request.method == "POST":
|
||||||
|
response = await client.post(url, headers=headers, content=data)
|
||||||
|
elif request.method == "PUT":
|
||||||
|
response = await client.put(url, headers=headers, content=data)
|
||||||
|
elif request.method == "DELETE":
|
||||||
|
response = await client.delete(url, headers=headers, content=data)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=response.content,
|
||||||
|
status_code=response.status_code,
|
||||||
|
headers=dict(response.headers),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Redirect the root URL to the site map
|
||||||
|
return RedirectResponse(url="/docs")
|
||||||
|
|
||||||
|
|
||||||
|
def start_fasthtml_server() -> subprocess.Popen:
|
||||||
|
"""Start the fasthtml server as a subprocess."""
|
||||||
|
server_process = subprocess.Popen(
|
||||||
|
[sys.executable, str(server_dir.joinpath("fasthtml_server.py"))],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
return server_process
|
||||||
|
|
||||||
|
|
||||||
|
def start_fastapi_server() -> None:
|
||||||
|
"""Start FastAPI server."""
|
||||||
|
try:
|
||||||
|
uvicorn.run(
|
||||||
|
app,
|
||||||
|
host=str(config_eos.server_fastapi_host),
|
||||||
|
port=config_eos.server_fastapi_port,
|
||||||
|
log_level="debug",
|
||||||
|
access_log=True,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Could not bind to host {config_eos.server_fastapi_host}:{config_eos.server_fastapi_port}. Error: {e}"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
start_fastapi_server()
|
||||||
config.run_setup()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to initialize: {e}")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Set host and port from environment variables or defaults
|
|
||||||
host = os.getenv("EOS_RUN_HOST", "0.0.0.0")
|
|
||||||
port = os.getenv("EOS_RUN_PORT", 8503)
|
|
||||||
try:
|
|
||||||
uvicorn.run(app, host=host, port=int(port)) # Run the FastAPI application
|
|
||||||
except Exception as e:
|
|
||||||
print(
|
|
||||||
f"Could not bind to host {host}:{port}. Error: {e}"
|
|
||||||
) # Error handling for binding issues
|
|
||||||
exit(1)
|
|
||||||
else:
|
|
||||||
# started from cli / dev server
|
|
||||||
config.run_setup()
|
|
||||||
|
60
src/akkudoktoreos/server/fasthtml_server.py
Normal file
60
src/akkudoktoreos/server/fasthtml_server.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
import uvicorn
|
||||||
|
from fasthtml.common import H1, FastHTML, Table, Td, Th, Thead, Titled, Tr
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
|
||||||
|
|
||||||
|
configs = []
|
||||||
|
for field_name in config_eos.model_fields:
|
||||||
|
config = {}
|
||||||
|
config["name"] = field_name
|
||||||
|
config["value"] = getattr(config_eos, field_name)
|
||||||
|
config["default"] = config_eos.model_fields[field_name].default
|
||||||
|
config["description"] = config_eos.model_fields[field_name].description
|
||||||
|
configs.append(config)
|
||||||
|
|
||||||
|
|
||||||
|
app = FastHTML()
|
||||||
|
rt = app.route
|
||||||
|
|
||||||
|
|
||||||
|
def config_table() -> Table:
|
||||||
|
rows = [
|
||||||
|
Tr(
|
||||||
|
Td(config["name"]),
|
||||||
|
Td(config["value"]),
|
||||||
|
Td(config["default"]),
|
||||||
|
Td(config["description"]),
|
||||||
|
cls="even:bg-purple/5",
|
||||||
|
)
|
||||||
|
for config in configs
|
||||||
|
]
|
||||||
|
flds = "Name", "Value", "Default", "Description"
|
||||||
|
head = Thead(*map(Th, flds), cls="bg-purple/10")
|
||||||
|
return Table(head, *rows, cls="w-full")
|
||||||
|
|
||||||
|
|
||||||
|
@rt("/")
|
||||||
|
def get(): # type: ignore
|
||||||
|
return Titled("EOS Config App", H1("Configuration"), config_table())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
f"Starting {config_eos.server_fasthtml_host}:{config_eos.server_fasthtml_port}."
|
||||||
|
)
|
||||||
|
uvicorn.run(
|
||||||
|
app, host=str(config_eos.server_fasthtml_host), port=config_eos.server_fasthtml_port
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# Error handling for binding issues
|
||||||
|
logger.error(
|
||||||
|
f"Could not bind to host {config_eos.server_fasthtml_host}:{config_eos.server_fasthtml_port}. Error: {e}"
|
||||||
|
)
|
||||||
|
exit(1)
|
37
src/akkudoktoreos/server/server.py
Normal file
37
src/akkudoktoreos/server/server.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""Server Module."""
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field, IPvAnyAddress, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ServerCommonSettings(SettingsBaseModel):
|
||||||
|
"""Common server settings.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
To be added
|
||||||
|
"""
|
||||||
|
|
||||||
|
server_fastapi_host: Optional[IPvAnyAddress] = Field(
|
||||||
|
default="0.0.0.0", description="FastAPI server IP address."
|
||||||
|
)
|
||||||
|
server_fastapi_port: Optional[int] = Field(
|
||||||
|
default=8503, description="FastAPI server IP port number."
|
||||||
|
)
|
||||||
|
server_fasthtml_host: Optional[IPvAnyAddress] = Field(
|
||||||
|
default="0.0.0.0", description="FastHTML server IP address."
|
||||||
|
)
|
||||||
|
server_fasthtml_port: Optional[int] = Field(
|
||||||
|
default=8504, description="FastHTML server IP port number."
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator("server_fastapi_port", "server_fasthtml_port")
|
||||||
|
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
||||||
|
if value is not None and not (1024 <= value <= 49151):
|
||||||
|
raise ValueError("Server port number must be between 1024 and 49151.")
|
||||||
|
return value
|
@ -1,6 +1,4 @@
|
|||||||
"""cachefilestore.py.
|
"""Class for in-memory managing of cache files.
|
||||||
|
|
||||||
This module provides a class for in-memory managing of cache files.
|
|
||||||
|
|
||||||
The `CacheFileStore` class is a singleton-based, thread-safe key-value store for managing
|
The `CacheFileStore` class is a singleton-based, thread-safe key-value store for managing
|
||||||
temporary file objects, allowing the creation, retrieval, and management of cache files.
|
temporary file objects, allowing the creation, retrieval, and management of cache files.
|
||||||
@ -34,12 +32,23 @@ import pickle
|
|||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
from datetime import date, datetime, time, timedelta
|
from datetime import date, datetime, time, timedelta
|
||||||
from typing import IO, Callable, Generic, List, Optional, ParamSpec, TypeVar, Union
|
from typing import (
|
||||||
|
IO,
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Generic,
|
||||||
|
List,
|
||||||
|
Literal,
|
||||||
|
Optional,
|
||||||
|
ParamSpec,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_timedelta
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||||
from akkudoktoreos.utils.logutil import get_logger
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
@ -106,7 +115,7 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
key (str): The key that identifies the cache file.
|
key (str): The key that identifies the cache file.
|
||||||
until_datetime (Union[datetime, date, str, int, float, None]): The datetime
|
until_datetime (Optional[Any]): The datetime
|
||||||
until the cache file is valid. The default is the current date at maximum time
|
until the cache file is valid. The default is the current date at maximum time
|
||||||
(23:59:59).
|
(23:59:59).
|
||||||
|
|
||||||
@ -140,15 +149,15 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
|
|
||||||
def _until_datetime_by_options(
|
def _until_datetime_by_options(
|
||||||
self,
|
self,
|
||||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
until_date: Optional[Any] = None,
|
||||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
until_datetime: Optional[Any] = None,
|
||||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||||
) -> datetime:
|
) -> datetime:
|
||||||
"""Get until_datetime from the given options."""
|
"""Get until_datetime from the given options."""
|
||||||
if until_datetime:
|
if until_datetime:
|
||||||
until_datetime = to_datetime(until_datetime)
|
until_datetime = to_datetime(until_datetime)
|
||||||
elif with_ttl:
|
elif with_ttl:
|
||||||
with_ttl = to_timedelta(with_ttl)
|
with_ttl = to_duration(with_ttl)
|
||||||
until_datetime = to_datetime(datetime.now() + with_ttl)
|
until_datetime = to_datetime(datetime.now() + with_ttl)
|
||||||
elif until_date:
|
elif until_date:
|
||||||
until_datetime = to_datetime(to_datetime(until_date).date())
|
until_datetime = to_datetime(to_datetime(until_date).date())
|
||||||
@ -176,9 +185,9 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
def _search(
|
def _search(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
until_datetime: Optional[Any] = None,
|
||||||
at_datetime: Union[datetime, date, str, int, float, None] = None,
|
at_datetime: Optional[Any] = None,
|
||||||
before_datetime: Union[datetime, date, str, int, float, None] = None,
|
before_datetime: Optional[Any] = None,
|
||||||
) -> Optional[tuple[str, IO[bytes], datetime]]:
|
) -> Optional[tuple[str, IO[bytes], datetime]]:
|
||||||
"""Searches for a cached item that matches the key and falls within the datetime range.
|
"""Searches for a cached item that matches the key and falls within the datetime range.
|
||||||
|
|
||||||
@ -188,12 +197,10 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
key (str): The key to identify the cache item.
|
key (str): The key to identify the cache item.
|
||||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
until_date (Optional[Any]): The date
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||||
at_datetime (Union[datetime, date, str, int, float], optional): The datetime to compare with
|
at_datetime (Optional[Any]): The datetime to compare with the cache item's datetime.
|
||||||
the cache item's datetime.
|
before_datetime (Optional[Any]): The datetime to compare the cache item's datetime to be before.
|
||||||
before_datetime (Union[datetime, date, str, int, float], optional): The datetime to compare
|
|
||||||
the cache item's datetime to be before.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Optional[tuple]: Returns the cache_file_key, chache_file, cache_file_datetime if found,
|
Optional[tuple]: Returns the cache_file_key, chache_file, cache_file_datetime if found,
|
||||||
@ -235,8 +242,8 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
def create(
|
def create(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
until_date: Optional[Any] = None,
|
||||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
until_datetime: Optional[Any] = None,
|
||||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||||
mode: str = "wb+",
|
mode: str = "wb+",
|
||||||
delete: bool = False,
|
delete: bool = False,
|
||||||
@ -249,9 +256,9 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
key (str): The key to store the cache file under.
|
key (str): The key to store the cache file under.
|
||||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
until_date (Optional[Any]): The date
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
until_datetime (Optional[Any]): The datetime
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||||
provided.
|
provided.
|
||||||
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
||||||
@ -293,8 +300,8 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
file_obj: IO[bytes],
|
file_obj: IO[bytes],
|
||||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
until_date: Optional[Any] = None,
|
||||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
until_datetime: Optional[Any] = None,
|
||||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Stores a file-like object in the cache under the specified key and date.
|
"""Stores a file-like object in the cache under the specified key and date.
|
||||||
@ -305,9 +312,9 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
Args:
|
Args:
|
||||||
key (str): The key to store the file object under.
|
key (str): The key to store the file object under.
|
||||||
file_obj: The file-like object.
|
file_obj: The file-like object.
|
||||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
until_date (Optional[Any]): The date
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
until_datetime (Optional[Any]): The datetime
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||||
provided.
|
provided.
|
||||||
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
||||||
@ -333,10 +340,10 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
def get(
|
def get(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
until_date: Optional[Any] = None,
|
||||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
until_datetime: Optional[Any] = None,
|
||||||
at_datetime: Union[datetime, date, str, int, float, None] = None,
|
at_datetime: Optional[Any] = None,
|
||||||
before_datetime: Union[datetime, date, str, int, float, None] = None,
|
before_datetime: Optional[Any] = None,
|
||||||
) -> Optional[IO[bytes]]:
|
) -> Optional[IO[bytes]]:
|
||||||
"""Retrieves the cache file associated with the given key and validity datetime.
|
"""Retrieves the cache file associated with the given key and validity datetime.
|
||||||
|
|
||||||
@ -345,15 +352,15 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
key (str): The key to retrieve the cache file for.
|
key (str): The key to retrieve the cache file for.
|
||||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
until_date (Optional[Any]): The date
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
until_datetime (Optional[Any]): The datetime
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||||
provided.
|
provided.
|
||||||
at_datetime (Union[datetime, date, str, int, float, None], optional): The datetime the
|
at_datetime (Optional[Any]): The datetime the
|
||||||
cache file shall be valid at. Time of day is set to maximum time (23:59:59) if not
|
cache file shall be valid at. Time of day is set to maximum time (23:59:59) if not
|
||||||
provided. Defaults to the current datetime if None is provided.
|
provided. Defaults to the current datetime if None is provided.
|
||||||
before_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
before_datetime (Optional[Any]): The datetime
|
||||||
to compare the cache files datetime to be before.
|
to compare the cache files datetime to be before.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@ -385,9 +392,9 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
def delete(
|
def delete(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
until_date: Optional[Any] = None,
|
||||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
until_datetime: Optional[Any] = None,
|
||||||
before_datetime: Union[datetime, date, str, int, float, None] = None,
|
before_datetime: Optional[Any] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Deletes the cache file associated with the given key and datetime.
|
"""Deletes the cache file associated with the given key and datetime.
|
||||||
|
|
||||||
@ -395,12 +402,12 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
key (str): The key of the cache file to delete.
|
key (str): The key of the cache file to delete.
|
||||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
until_date (Optional[Any]): The date
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
until_datetime (Optional[Any]): The datetime
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||||
provided.
|
provided.
|
||||||
before_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
before_datetime (Optional[Any]): The datetime
|
||||||
the cache file shall become or be invalid at. Time of day is set to maximum time
|
the cache file shall become or be invalid at. Time of day is set to maximum time
|
||||||
(23:59:59) if not provided. Defaults to tommorow start of day.
|
(23:59:59) if not provided. Defaults to tommorow start of day.
|
||||||
"""
|
"""
|
||||||
@ -441,13 +448,13 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
def clear(
|
def clear(
|
||||||
self,
|
self,
|
||||||
clear_all: bool = False,
|
clear_all: bool = False,
|
||||||
before_datetime: Union[datetime, date, str, int, float, None] = None,
|
before_datetime: Optional[Any] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Deletes all cache files or those expiring before `before_datetime`.
|
"""Deletes all cache files or those expiring before `before_datetime`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
clear_all (bool, optional): Delete all cache files. Default is False.
|
clear_all (bool, optional): Delete all cache files. Default is False.
|
||||||
before_datetime (Union[datetime, date, str, int, float, None], optional): The
|
before_datetime (Optional[Any]): The
|
||||||
threshold date. Cache files that are only valid before this date will be deleted.
|
threshold date. Cache files that are only valid before this date will be deleted.
|
||||||
The default datetime is beginning of today.
|
The default datetime is beginning of today.
|
||||||
|
|
||||||
@ -506,77 +513,108 @@ class CacheFileStore(metaclass=CacheFileStoreMeta):
|
|||||||
|
|
||||||
def cache_in_file(
|
def cache_in_file(
|
||||||
ignore_params: List[str] = [],
|
ignore_params: List[str] = [],
|
||||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
force_update: Optional[bool] = None,
|
||||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
until_date: Optional[Any] = None,
|
||||||
|
until_datetime: Optional[Any] = None,
|
||||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||||
mode: str = "wb+",
|
mode: Literal["w", "w+", "wb", "wb+", "r", "r+", "rb", "rb+"] = "wb+",
|
||||||
delete: bool = False,
|
delete: bool = False,
|
||||||
suffix: Optional[str] = None,
|
suffix: Optional[str] = None,
|
||||||
) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]:
|
) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]:
|
||||||
"""Decorator to cache the output of a function into a temporary file.
|
"""Cache the output of a function into a temporary file.
|
||||||
|
|
||||||
The decorator caches function output to a cache file based on its inputs as key to identify the
|
This decorator caches the result of a function call in a temporary file. The cache is
|
||||||
cache file. Ignore parameters are used to avoid key generation on non-deterministic inputs, such
|
identified by a key derived from the function's input arguments, excluding those specified
|
||||||
as time values. We can also ignore parameters that are slow to serialize/constant across runs,
|
in `ignore_params`. This is useful for caching results of expensive computations while
|
||||||
such as large objects.
|
avoiding redundant recalculations.
|
||||||
|
|
||||||
The cache file is created using `CacheFileStore` and stored with the generated key.
|
The cache file is created using `CacheFileStore` and stored with the generated key. If a valid
|
||||||
If the file exists in the cache and has not expired, it is returned instead of recomputing the
|
cache file exists, it is returned instead of recomputing the result. The cache expiration is
|
||||||
result.
|
controlled by the `until_date`, `until_datetime`, `with_ttl`, or `force_update` arguments.
|
||||||
|
If these arguments are present in the function call, their values override those specified in
|
||||||
|
the decorator.
|
||||||
|
|
||||||
The decorator scans the arguments of the decorated function for a 'until_date' or
|
By default, cache files are pickled to save storage space unless a `suffix` is provided. The
|
||||||
'until_datetime` or `with_ttl` or `force_update` parameter. The value of this parameter will be
|
`mode` parameter allows specifying file modes for reading and writing, and the `delete`
|
||||||
used instead of the one given in the decorator if available.
|
parameter controls whether the cache file is deleted after use.
|
||||||
|
|
||||||
Content of cache files without a suffix are transparently pickled to save file space.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
ignore_params (List[str], optional):
|
ignore_params (List[str], optional):
|
||||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
List of parameter names to ignore when generating the cache key. Useful for excluding
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
non-deterministic or irrelevant inputs, such as timestamps or large constant objects.
|
||||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
force_update (bool, optional):
|
||||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
Forces the cache to update, bypassing any existing cached results. If not provided,
|
||||||
provided.
|
the function will check for a `force_update` argument in the decorated function call.
|
||||||
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
until_date (Optional[Any], optional):
|
||||||
the cache file is valid. Time starts now.
|
Date until which the cache file is valid. If a date is provided, the time is set to
|
||||||
mode (str, optional): The mode in which the file will be opened. Defaults to 'wb+'.
|
the end of the day (23:59:59). If not specified, the function call arguments are checked.
|
||||||
delete (bool, optional): Whether the cache file will be deleted after being closed.
|
until_datetime (Optional[Any], optional):
|
||||||
Defaults to False.
|
Datetime until which the cache file is valid. Time of day is set to maximum time
|
||||||
suffix (str, optional): A suffix for the cache file, such as an extension (e.g., '.txt').
|
(23:59:59) if not provided.
|
||||||
Defaults to None.
|
with_ttl (Union[timedelta, str, int, float, None], optional):
|
||||||
|
Time-to-live (TTL) for the cache file, starting from the time of caching. Can be
|
||||||
|
specified as a `timedelta`, a numeric value (in seconds), or a string.
|
||||||
|
mode (Literal["w", "w+", "wb", "wb+", "r", "r+", "rb", "rb+"], optional):
|
||||||
|
File mode for opening the cache file. Defaults to "wb+" (write-binary with updates).
|
||||||
|
delete (bool, optional):
|
||||||
|
If True, deletes the cache file after it is closed. Defaults to False.
|
||||||
|
suffix (Optional[str], optional):
|
||||||
|
A file suffix (e.g., ".txt" or ".json") for the cache file. Defaults to None. If not
|
||||||
|
provided, files are pickled by default.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
callable: A decorated function that caches its result in a file.
|
Callable[[Callable[Param, RetType]], Callable[Param, RetType]]:
|
||||||
|
A decorated function that caches its result in a temporary file.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
>>> @cache_in_file(suffix = '.txt')
|
>>> from datetime import date
|
||||||
>>> def expensive_computation(until_date = None):
|
>>> @cache_in_file(suffix='.txt')
|
||||||
|
>>> def expensive_computation(until_date=None):
|
||||||
>>> # Perform some expensive computation
|
>>> # Perform some expensive computation
|
||||||
>>> return 'Some large result'
|
>>> return 'Some large result'
|
||||||
>>>
|
>>>
|
||||||
>>> result = expensive_computation(until_date = date.today())
|
>>> result = expensive_computation(until_date=date.today())
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- The cache key is based on the function arguments after excluding those in `ignore_params`.
|
||||||
|
- If conflicting expiration parameters are provided (`until_date`, `until_datetime`,
|
||||||
|
`with_ttl`), the one in the function call takes precedence.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def decorator(func: Callable[Param, RetType]) -> Callable[Param, RetType]:
|
def decorator(func: Callable[Param, RetType]) -> Callable[Param, RetType]:
|
||||||
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
|
nonlocal \
|
||||||
|
ignore_params, \
|
||||||
|
force_update, \
|
||||||
|
until_date, \
|
||||||
|
until_datetime, \
|
||||||
|
with_ttl, \
|
||||||
|
mode, \
|
||||||
|
delete, \
|
||||||
|
suffix
|
||||||
func_source_code = inspect.getsource(func)
|
func_source_code = inspect.getsource(func)
|
||||||
|
|
||||||
def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> RetType:
|
def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> RetType:
|
||||||
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
|
nonlocal \
|
||||||
|
ignore_params, \
|
||||||
|
force_update, \
|
||||||
|
until_date, \
|
||||||
|
until_datetime, \
|
||||||
|
with_ttl, \
|
||||||
|
mode, \
|
||||||
|
delete, \
|
||||||
|
suffix
|
||||||
# Convert args to a dictionary based on the function's signature
|
# Convert args to a dictionary based on the function's signature
|
||||||
args_names = func.__code__.co_varnames[: func.__code__.co_argcount]
|
args_names = func.__code__.co_varnames[: func.__code__.co_argcount]
|
||||||
args_dict = dict(zip(args_names, args))
|
args_dict = dict(zip(args_names, args))
|
||||||
|
|
||||||
# Search for caching parameters of function and remove
|
# Search for caching parameters of function and remove
|
||||||
force_update: Optional[bool] = None
|
|
||||||
for param in ["force_update", "until_datetime", "with_ttl", "until_date"]:
|
for param in ["force_update", "until_datetime", "with_ttl", "until_date"]:
|
||||||
if param in kwargs:
|
if param in kwargs:
|
||||||
if param == "force_update":
|
if param == "force_update":
|
||||||
force_update = kwargs[param] # type: ignore[assignment]
|
force_update = kwargs[param] # type: ignore[assignment]
|
||||||
kwargs.pop("force_update")
|
kwargs.pop("force_update")
|
||||||
|
|
||||||
if param == "until_datetime":
|
if param == "until_datetime":
|
||||||
until_datetime = kwargs[param] # type: ignore[assignment]
|
until_datetime = kwargs[param]
|
||||||
until_date = None
|
until_date = None
|
||||||
with_ttl = None
|
with_ttl = None
|
||||||
elif param == "with_ttl":
|
elif param == "with_ttl":
|
||||||
@ -585,8 +623,9 @@ def cache_in_file(
|
|||||||
with_ttl = kwargs[param] # type: ignore[assignment]
|
with_ttl = kwargs[param] # type: ignore[assignment]
|
||||||
elif param == "until_date":
|
elif param == "until_date":
|
||||||
until_datetime = None
|
until_datetime = None
|
||||||
until_date = kwargs[param] # type: ignore[assignment]
|
until_date = kwargs[param]
|
||||||
with_ttl = None
|
with_ttl = None
|
||||||
|
kwargs.pop("force_update", None)
|
||||||
kwargs.pop("until_datetime", None)
|
kwargs.pop("until_datetime", None)
|
||||||
kwargs.pop("until_date", None)
|
kwargs.pop("until_date", None)
|
||||||
kwargs.pop("with_ttl", None)
|
kwargs.pop("with_ttl", None)
|
@ -4,8 +4,8 @@ Functions:
|
|||||||
----------
|
----------
|
||||||
- to_datetime: Converts various date or time inputs to a timezone-aware or naive `datetime`
|
- to_datetime: Converts various date or time inputs to a timezone-aware or naive `datetime`
|
||||||
object or formatted string.
|
object or formatted string.
|
||||||
- to_timedelta: Converts various time delta inputs to a `timedelta`object.
|
- to_duration: Converts various time delta inputs to a `timedelta`object.
|
||||||
- to_timezone: Converts position latitude and longitude to a `timezone` object.
|
- to_timezone: Converts utc offset or location latitude and longitude to a `timezone` object.
|
||||||
|
|
||||||
Example usage:
|
Example usage:
|
||||||
--------------
|
--------------
|
||||||
@ -16,192 +16,204 @@ Example usage:
|
|||||||
>>> print(date_obj) # Output: datetime object for '2024-10-15'
|
>>> print(date_obj) # Output: datetime object for '2024-10-15'
|
||||||
|
|
||||||
# Time delta conversion
|
# Time delta conversion
|
||||||
>>> to_timedelta("2 days 5 hours")
|
>>> to_duration("2 days 5 hours")
|
||||||
|
|
||||||
# Timezone detection
|
# Timezone detection
|
||||||
>>> to_timezone(40.7128, -74.0060)
|
>>> to_timezone(location={40.7128, -74.0060})
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from datetime import date, datetime, time, timedelta, timezone
|
from datetime import date, datetime, timedelta
|
||||||
from typing import Annotated, Literal, Optional, Union, overload
|
from typing import Any, List, Literal, Optional, Tuple, Union, overload
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
from pendulum import DateTime
|
||||||
|
from pendulum.tz.timezone import Timezone
|
||||||
from timezonefinder import TimezoneFinder
|
from timezonefinder import TimezoneFinder
|
||||||
|
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def to_datetime(
|
def to_datetime(
|
||||||
date_input: Union[datetime, date, str, int, float, None],
|
date_input: Optional[Any] = None,
|
||||||
as_string: str | Literal[True],
|
as_string: Literal[False] | None = None,
|
||||||
to_timezone: Optional[Union[ZoneInfo, str]] = None,
|
in_timezone: Optional[Union[str, Timezone]] = None,
|
||||||
|
to_naiv: Optional[bool] = None,
|
||||||
|
to_maxtime: Optional[bool] = None,
|
||||||
|
) -> DateTime: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def to_datetime(
|
||||||
|
date_input: Optional[Any] = None,
|
||||||
|
as_string: str | Literal[True] = True,
|
||||||
|
in_timezone: Optional[Union[str, Timezone]] = None,
|
||||||
to_naiv: Optional[bool] = None,
|
to_naiv: Optional[bool] = None,
|
||||||
to_maxtime: Optional[bool] = None,
|
to_maxtime: Optional[bool] = None,
|
||||||
) -> str: ...
|
) -> str: ...
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def to_datetime(
|
def to_datetime(
|
||||||
date_input: Union[datetime, date, str, int, float, None],
|
date_input: Optional[Any] = None,
|
||||||
as_string: Literal[False] | None = None,
|
|
||||||
to_timezone: Optional[Union[ZoneInfo, str]] = None,
|
|
||||||
to_naiv: Optional[bool] = None,
|
|
||||||
to_maxtime: Optional[bool] = None,
|
|
||||||
) -> datetime: ...
|
|
||||||
|
|
||||||
|
|
||||||
def to_datetime(
|
|
||||||
date_input: Union[datetime, date, str, int, float, None],
|
|
||||||
as_string: Optional[Union[str, bool]] = None,
|
as_string: Optional[Union[str, bool]] = None,
|
||||||
to_timezone: Optional[Union[ZoneInfo, str]] = None,
|
in_timezone: Optional[Union[str, Timezone]] = None,
|
||||||
to_naiv: Optional[bool] = None,
|
to_naiv: Optional[bool] = None,
|
||||||
to_maxtime: Optional[bool] = None,
|
to_maxtime: Optional[bool] = None,
|
||||||
) -> str | datetime:
|
) -> Union[DateTime, str]:
|
||||||
"""Converts a date input to a datetime object or a formatted string with timezone support.
|
"""Convert a date input into a Pendulum DateTime object or a formatted string, with optional timezone handling.
|
||||||
|
|
||||||
|
This function handles various date input formats, adjusts for timezones, and provides flexibility for formatting and time adjustments. For date strings without explicit timezone information, the local timezone is assumed. Be aware that Pendulum DateTime objects created without a timezone default to UTC.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
date_input (Union[datetime, date, str, int, float, None]): The date input to convert.
|
date_input (Optional[Any]): The date input to convert. Supported types include:
|
||||||
Accepts a date string, a datetime object, a date object or a Unix timestamp.
|
- `str`: A date string in various formats (e.g., "2024-10-13", "13 Oct 2024").
|
||||||
as_string (Optional[Union[str, bool]]): If as_string is given (a format string or true)
|
- `pendulum.DateTime`: A Pendulum DateTime object.
|
||||||
return datetime as a string. Otherwise, return a datetime object, which is the default.
|
- `datetime.datetime`: A standard Python datetime object.
|
||||||
If true is given the string will returned in ISO format.
|
- `datetime.date`: A date object, which will be converted to a datetime at the start or end of the day.
|
||||||
If a format string is given it may define the special formats "UTC" or "utc"
|
- `int` or `float`: A Unix timestamp, interpreted as seconds since the epoch (UTC).
|
||||||
to return a string in ISO format normalized to UTC. Otherwise the format string must be
|
- `None`: Defaults to the current date and time, adjusted to the start or end of the day based on `to_maxtime`.
|
||||||
given compliant to Python's `datetime.strptime`.
|
|
||||||
to_timezone (Optional[Union[timezone, str]]):
|
|
||||||
Optional timezone object or name (e.g., 'UTC', 'Europe/Berlin').
|
|
||||||
If provided, the datetime will be converted to this timezone.
|
|
||||||
If not provided, the datetime will be converted to the local timezone.
|
|
||||||
to_naiv (Optional[bool]):
|
|
||||||
If True, remove timezone info from datetime after conversion.
|
|
||||||
If False, keep timezone info after conversion. The default.
|
|
||||||
to_maxtime (Optional[bool]):
|
|
||||||
If True, convert to maximum time if no time is given. The default.
|
|
||||||
If False, convert to minimum time if no time is given.
|
|
||||||
|
|
||||||
Example:
|
as_string (Optional[Union[str, bool]]): Determines the output format:
|
||||||
to_datetime("2027-12-12 24:13:12", as_string = "%Y-%m-%dT%H:%M:%S.%f%z")
|
- `True`: Returns the datetime in ISO 8601 string format.
|
||||||
|
- `"UTC"` or `"utc"`: Returns the datetime normalized to UTC as an ISO 8601 string.
|
||||||
|
- `str`: A custom date format string for the output (e.g., "YYYY-MM-DD HH:mm:ss").
|
||||||
|
- `False` or `None` (default): Returns a `pendulum.DateTime` object.
|
||||||
|
|
||||||
|
in_timezone (Optional[Union[str, Timezone]]): Specifies the target timezone for the result.
|
||||||
|
- Can be a timezone string (e.g., "UTC", "Europe/Berlin") or a `pendulum.Timezone` object.
|
||||||
|
- Defaults to the local timezone if not provided.
|
||||||
|
|
||||||
|
to_naiv (Optional[bool]): If `True`, removes timezone information from the resulting datetime object.
|
||||||
|
- Defaults to `False`.
|
||||||
|
|
||||||
|
to_maxtime (Optional[bool]): Determines the time portion of the resulting datetime for date inputs:
|
||||||
|
- `True`: Sets the time to the end of the day (23:59:59).
|
||||||
|
- `False` or `None`: Sets the time to the start of the day (00:00:00).
|
||||||
|
- Ignored if `date_input` includes an explicit time or if the input is a timestamp.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
datetime or str: Converted date as a datetime object or a formatted string with timezone.
|
pendulum.DateTime or str:
|
||||||
|
- A timezone-aware Pendulum DateTime object by default.
|
||||||
|
- A string representation if `as_string` is specified.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If the date input is not a valid type or format.
|
ValueError: If `date_input` is not a valid or supported type, or if the date string cannot be parsed.
|
||||||
RuntimeError: If no local timezone information available.
|
|
||||||
|
Examples:
|
||||||
|
>>> to_datetime("2024-10-13", as_string=True, in_timezone="UTC")
|
||||||
|
'2024-10-13T00:00:00+00:00'
|
||||||
|
|
||||||
|
>>> to_datetime("2024-10-13T15:30:00", in_timezone="Europe/Berlin")
|
||||||
|
DateTime(2024, 10, 13, 17, 30, 0, tzinfo=Timezone('Europe/Berlin'))
|
||||||
|
|
||||||
|
>>> to_datetime(date(2024, 10, 13), to_maxtime=True)
|
||||||
|
DateTime(2024, 10, 13, 23, 59, 59, tzinfo=Timezone('Local'))
|
||||||
|
|
||||||
|
>>> to_datetime(1698784800, as_string="YYYY-MM-DD HH:mm:ss", in_timezone="UTC")
|
||||||
|
'2024-10-31 12:00:00'
|
||||||
"""
|
"""
|
||||||
dt_object: Optional[datetime] = None
|
# Timezone to convert to
|
||||||
if isinstance(date_input, datetime):
|
if in_timezone is None:
|
||||||
dt_object = date_input
|
in_timezone = pendulum.local_timezone()
|
||||||
elif isinstance(date_input, date):
|
elif not isinstance(in_timezone, Timezone):
|
||||||
# Convert date object to datetime object
|
in_timezone = pendulum.timezone(in_timezone)
|
||||||
if to_maxtime is None or to_maxtime:
|
|
||||||
dt_object = datetime.combine(date_input, time.max)
|
if isinstance(date_input, DateTime):
|
||||||
else:
|
dt = date_input
|
||||||
dt_object = datetime.combine(date_input, time.max)
|
|
||||||
elif isinstance(date_input, (int, float)):
|
|
||||||
# Convert timestamp to datetime object
|
|
||||||
dt_object = datetime.fromtimestamp(date_input, tz=timezone.utc)
|
|
||||||
elif isinstance(date_input, str):
|
elif isinstance(date_input, str):
|
||||||
# Convert string to datetime object
|
# Convert to timezone aware datetime
|
||||||
try:
|
dt = None
|
||||||
# Try ISO format
|
formats = [
|
||||||
dt_object = datetime.fromisoformat(date_input)
|
"YYYY-MM-DD", # Format: 2024-10-13
|
||||||
except ValueError as e:
|
"DD/MM/YY", # Format: 13/10/24
|
||||||
formats = [
|
"DD/MM/YYYY", # Format: 13/10/2024
|
||||||
"%Y-%m-%d", # Format: 2024-10-13
|
"MM-DD-YYYY", # Format: 10-13-2024
|
||||||
"%d/%m/%y", # Format: 13/10/24
|
"D.M.YYYY", # Format: 1.7.2024
|
||||||
"%d/%m/%Y", # Format: 13/10/2024
|
"YYYY.MM.DD", # Format: 2024.10.13
|
||||||
"%m-%d-%Y", # Format: 10-13-2024
|
"D MMM YYYY", # Format: 13 Oct 2024
|
||||||
"%Y.%m.%d", # Format: 2024.10.13
|
"D MMMM YYYY", # Format: 13 October 2024
|
||||||
"%d %b %Y", # Format: 13 Oct 2024
|
"YYYY-MM-DD HH:mm:ss", # Format: 2024-10-13 15:30:00
|
||||||
"%d %B %Y", # Format: 13 October 2024
|
"YYYY-MM-DDTHH:mm:ss", # Format: 2024-10-13T15:30:00
|
||||||
"%Y-%m-%d %H:%M:%S", # Format: 2024-10-13 15:30:00
|
]
|
||||||
"%Y-%m-%d %H:%M:%S%z", # Format with timezone: 2024-10-13 15:30:00+0000
|
for fmt in formats:
|
||||||
"%Y-%m-%d %H:%M:%S%z:00", # Format with timezone: 2024-10-13 15:30:00+0000
|
# DateTime input without timezone info
|
||||||
"%Y-%m-%dT%H:%M:%S.%f%z", # Format with timezone: 2024-10-13T15:30:00.000+0000
|
|
||||||
]
|
|
||||||
|
|
||||||
for fmt in formats:
|
|
||||||
try:
|
|
||||||
dt_object = datetime.strptime(date_input, fmt)
|
|
||||||
break
|
|
||||||
except ValueError as e:
|
|
||||||
continue
|
|
||||||
if dt_object is None:
|
|
||||||
raise ValueError(f"Date string {date_input} does not match any known formats.")
|
|
||||||
elif date_input is None:
|
|
||||||
if to_maxtime is None or to_maxtime:
|
|
||||||
dt_object = datetime.combine(date.today(), time.max)
|
|
||||||
else:
|
|
||||||
dt_object = datetime.combine(date.today(), time.min)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unsupported date input type: {type(date_input)}")
|
|
||||||
|
|
||||||
# Get local timezone
|
|
||||||
local_date = datetime.now().astimezone()
|
|
||||||
local_tz_name = local_date.tzname()
|
|
||||||
local_utc_offset = local_date.utcoffset()
|
|
||||||
if local_tz_name is None or local_utc_offset is None:
|
|
||||||
raise RuntimeError("Could not determine local time zone")
|
|
||||||
local_timezone = timezone(local_utc_offset, local_tz_name)
|
|
||||||
|
|
||||||
# Get target timezone
|
|
||||||
if to_timezone:
|
|
||||||
if isinstance(to_timezone, ZoneInfo):
|
|
||||||
target_timezone = to_timezone
|
|
||||||
elif isinstance(to_timezone, str):
|
|
||||||
try:
|
try:
|
||||||
target_timezone = ZoneInfo(to_timezone)
|
fmt_tz = f"{fmt} z"
|
||||||
except Exception as e:
|
dt_tz = f"{date_input} {in_timezone}"
|
||||||
raise ValueError(f"Invalid timezone: {to_timezone}") from e
|
dt = pendulum.from_format(dt_tz, fmt_tz)
|
||||||
|
logger.debug(
|
||||||
|
f"Str Fmt converted: {dt}, tz={dt.tz} from {date_input}, tz={in_timezone}"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
except ValueError as e:
|
||||||
|
logger.debug(f"{date_input}, {fmt}, {e}")
|
||||||
|
dt = None
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Invalid timezone: {to_timezone}")
|
# DateTime input with timezone info
|
||||||
|
try:
|
||||||
# Adjust/Add timezone information
|
dt = pendulum.parse(date_input)
|
||||||
if dt_object.tzinfo is None or dt_object.tzinfo.utcoffset(dt_object) is None:
|
logger.debug(
|
||||||
# datetime object is naive (not timezone aware)
|
f"Pendulum Fmt converted: {dt}, tz={dt.tz} from {date_input}, tz={in_timezone}"
|
||||||
# Add timezone
|
)
|
||||||
if to_timezone is None:
|
except pendulum.parsing.exceptions.ParserError as e:
|
||||||
# Add local timezone
|
logger.debug(f"Date string {date_input} does not match any Pendulum formats: {e}")
|
||||||
dt_object = dt_object.replace(tzinfo=local_timezone)
|
dt = None
|
||||||
else:
|
if dt is None:
|
||||||
# Set to target timezone
|
raise ValueError(f"Date string {date_input} does not match any known formats.")
|
||||||
dt_object = dt_object.replace(tzinfo=target_timezone)
|
elif date_input is None:
|
||||||
elif to_timezone:
|
dt = (
|
||||||
# Localize the datetime object to given target timezone
|
pendulum.today(tz=in_timezone).end_of("day")
|
||||||
dt_object = dt_object.astimezone(target_timezone)
|
if to_maxtime
|
||||||
|
else pendulum.today(tz=in_timezone).start_of("day")
|
||||||
|
)
|
||||||
|
elif isinstance(date_input, datetime):
|
||||||
|
dt = pendulum.instance(date_input)
|
||||||
|
elif isinstance(date_input, date):
|
||||||
|
dt = pendulum.instance(
|
||||||
|
datetime.combine(date_input, datetime.max.time() if to_maxtime else datetime.min.time())
|
||||||
|
)
|
||||||
|
elif isinstance(date_input, (int, float)):
|
||||||
|
dt = pendulum.from_timestamp(date_input, tz="UTC")
|
||||||
else:
|
else:
|
||||||
# Localize the datetime object to local timezone
|
error_msg = f"Unsupported date input type: {type(date_input)}"
|
||||||
dt_object = dt_object.astimezone(local_timezone)
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# Represent in target timezone
|
||||||
|
dt_in_tz = dt.in_timezone(in_timezone)
|
||||||
|
logger.debug(
|
||||||
|
f"\nTimezone adapted to: {in_timezone}\nfrom: {dt} tz={dt.timezone}\nto: {dt_in_tz} tz={dt_in_tz.tz}"
|
||||||
|
)
|
||||||
|
dt = dt_in_tz
|
||||||
|
|
||||||
|
# Remove timezone info if specified
|
||||||
if to_naiv:
|
if to_naiv:
|
||||||
# Remove timezone info to make the datetime naiv
|
dt = dt.naive()
|
||||||
dt_object = dt_object.replace(tzinfo=None)
|
|
||||||
|
|
||||||
if as_string:
|
# Return as formatted string if specified
|
||||||
# Return formatted string as defined by as_string
|
if isinstance(as_string, str):
|
||||||
if isinstance(as_string, bool):
|
if as_string.lower() == "utc":
|
||||||
return dt_object.isoformat()
|
return dt.in_timezone("UTC").to_iso8601_string()
|
||||||
elif as_string == "UTC" or as_string == "utc":
|
|
||||||
dt_object = dt_object.astimezone(timezone.utc)
|
|
||||||
return dt_object.isoformat()
|
|
||||||
else:
|
else:
|
||||||
return dt_object.strftime(as_string)
|
return dt.format(as_string)
|
||||||
else:
|
if isinstance(as_string, bool) and as_string is True:
|
||||||
return dt_object
|
return dt.to_iso8601_string()
|
||||||
|
|
||||||
|
return dt
|
||||||
|
|
||||||
|
|
||||||
def to_timedelta(
|
def to_duration(
|
||||||
input_value: Union[
|
input_value: Union[timedelta, str, int, float, Tuple[int, int, int, int], List[int]],
|
||||||
timedelta, str, int, float, tuple[int, int, int, int], Annotated[list[int], 4]
|
|
||||||
],
|
|
||||||
) -> timedelta:
|
) -> timedelta:
|
||||||
"""Converts various input types into a timedelta object.
|
"""Converts various input types into a timedelta object using pendulum.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
input_value (Union[timedelta, str, int, float, tuple, list]): Input to be converted
|
input_value (Union[timedelta, str, int, float, tuple, list]): Input to be converted
|
||||||
timedelta.
|
into a timedelta:
|
||||||
- str: A string like "2 days", "5 hours", "30 minutes", or a combination.
|
- str: A duration string like "2 days", "5 hours", "30 minutes", or a combination.
|
||||||
- int/float: Number representing seconds.
|
- int/float: Number representing seconds.
|
||||||
- tuple/list: A tuple or list in the format (days, hours, minutes, seconds).
|
- tuple/list: A tuple or list in the format (days, hours, minutes, seconds).
|
||||||
|
|
||||||
@ -212,14 +224,14 @@ def to_timedelta(
|
|||||||
ValueError: If the input format is not supported.
|
ValueError: If the input format is not supported.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
>>> to_timedelta("2 days 5 hours")
|
>>> to_duration("2 days 5 hours")
|
||||||
datetime.timedelta(days=2, seconds=18000)
|
timedelta(days=2, seconds=18000)
|
||||||
|
|
||||||
>>> to_timedelta(3600)
|
>>> to_duration(3600)
|
||||||
datetime.timedelta(seconds=3600)
|
timedelta(seconds=3600)
|
||||||
|
|
||||||
>>> to_timedelta((1, 2, 30, 15))
|
>>> to_duration((1, 2, 30, 15))
|
||||||
datetime.timedelta(days=1, seconds=90315)
|
timedelta(days=1, seconds=90315)
|
||||||
"""
|
"""
|
||||||
if isinstance(input_value, timedelta):
|
if isinstance(input_value, timedelta):
|
||||||
return input_value
|
return input_value
|
||||||
@ -234,9 +246,18 @@ def to_timedelta(
|
|||||||
days, hours, minutes, seconds = input_value
|
days, hours, minutes, seconds = input_value
|
||||||
return timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
|
return timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Expected a tuple or list of length 4, got {len(input_value)}")
|
error_msg = f"Expected a tuple or list of length 4, got {len(input_value)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
elif isinstance(input_value, str):
|
elif isinstance(input_value, str):
|
||||||
|
# Use pendulum's parsing for human-readable duration strings
|
||||||
|
try:
|
||||||
|
duration = pendulum.parse(input_value)
|
||||||
|
return duration - duration.start_of("day")
|
||||||
|
except pendulum.parsing.exceptions.ParserError as e:
|
||||||
|
logger.debug(f"Invalid Pendulum time string format '{input_value}': {e}")
|
||||||
|
|
||||||
# Handle strings like "2 days 5 hours 30 minutes"
|
# Handle strings like "2 days 5 hours 30 minutes"
|
||||||
total_seconds = 0
|
total_seconds = 0
|
||||||
time_units = {
|
time_units = {
|
||||||
@ -250,73 +271,287 @@ def to_timedelta(
|
|||||||
matches = re.findall(r"(\d+)\s*(days?|hours?|minutes?|seconds?)", input_value)
|
matches = re.findall(r"(\d+)\s*(days?|hours?|minutes?|seconds?)", input_value)
|
||||||
|
|
||||||
if not matches:
|
if not matches:
|
||||||
raise ValueError(f"Invalid time string format: {input_value}")
|
error_msg = f"Invalid time string format '{input_value}'"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
for value, unit in matches:
|
for value, unit in matches:
|
||||||
unit = unit.lower().rstrip("s") # Normalize unit
|
unit = unit.lower().rstrip("s") # Normalize unit
|
||||||
if unit in time_units:
|
if unit in time_units:
|
||||||
total_seconds += int(value) * time_units[unit]
|
total_seconds += int(value) * time_units[unit]
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unsupported time unit: {unit}")
|
error_msg = f"Unsupported time unit: {unit}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
return timedelta(seconds=total_seconds)
|
return pendulum.duration(seconds=total_seconds)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unsupported input type: {type(input_value)}")
|
error_msg = f"Unsupported input type: {type(input_value)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
|
||||||
|
timezone_finder = TimezoneFinder() # Static variable for caching
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def to_timezone(lat: float, lon: float, as_string: Literal[True]) -> str: ...
|
def to_timezone(
|
||||||
|
utc_offset: Optional[float] = None,
|
||||||
|
location: Optional[Tuple[float, float]] = None,
|
||||||
|
as_string: Literal[True] = True,
|
||||||
|
) -> str: ...
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def to_timezone(lat: float, lon: float, as_string: Literal[False] | None = None) -> ZoneInfo: ...
|
def to_timezone(
|
||||||
|
utc_offset: Optional[float] = None,
|
||||||
|
location: Optional[Tuple[float, float]] = None,
|
||||||
|
as_string: Literal[False] | None = None,
|
||||||
|
) -> Timezone: ...
|
||||||
|
|
||||||
|
|
||||||
def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None) -> str | ZoneInfo:
|
def to_timezone(
|
||||||
"""Determines the timezone for a given geographic location specified by latitude and longitude.
|
utc_offset: Optional[float] = None,
|
||||||
|
location: Optional[Tuple[float, float]] = None,
|
||||||
|
as_string: Optional[bool] = False,
|
||||||
|
) -> Union[Timezone, str]:
|
||||||
|
"""Determines the timezone either by UTC offset, geographic location, or local system timezone.
|
||||||
|
|
||||||
By default, it returns a `ZoneInfo` object representing the timezone.
|
By default, it returns a `Timezone` object representing the timezone.
|
||||||
If `as_string` is set to `True`, the function returns the timezone name as a string instead.
|
If `as_string` is set to `True`, the function returns the timezone name as a string instead.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
lat (float): Latitude of the location in decimal degrees. Must be between -90 and 90.
|
utc_offset (Optional[float]): UTC offset in hours. Positive for UTC+, negative for UTC-.
|
||||||
lon (float): Longitude of the location in decimal degrees. Must be between -180 and 180.
|
location (Optional[Tuple[float,float]]): A tuple containing latitude and longitude as floats.
|
||||||
as_string (Optional[bool]):
|
as_string (Optional[bool]):
|
||||||
- If `True`, returns the timezone as a string (e.g., "America/New_York").
|
- If `True`, returns the timezone as a string (e.g., "America/New_York").
|
||||||
- If `False` or not provided, returns a `ZoneInfo` object for the timezone.
|
- If `False` or not provided, returns a `Timezone` object for the timezone.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str or ZoneInfo:
|
Union[Timezone, str]:
|
||||||
- A timezone name as a string (e.g., "America/New_York") if `as_string` is `True`.
|
- A timezone name as a string (e.g., "America/New_York") if `as_string` is `True`.
|
||||||
- A `ZoneInfo` timezone object if `as_string` is `False` or not provided.
|
- A `Timezone` object if `as_string` is `False` or not provided.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If the latitude or longitude is out of range, or if no timezone is found for
|
ValueError: If invalid inputs are provided.
|
||||||
the specified coordinates.
|
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
>>> to_timezone(40.7128, -74.0060, as_string=True)
|
>>> to_timezone(utc_offset=5.5, as_string=True)
|
||||||
'America/New_York'
|
'UTC+05:30'
|
||||||
|
|
||||||
>>> to_timezone(40.7128, -74.0060)
|
>>> to_timezone(location={40.7128, -74.0060})
|
||||||
ZoneInfo(key='America/New_York')
|
<Timezone [America/New_York]>
|
||||||
|
|
||||||
|
>>> to_timezone()
|
||||||
|
<Timezone [America/New_York]> # Returns local timezone
|
||||||
"""
|
"""
|
||||||
# Initialize the static variable only once
|
if utc_offset is not None:
|
||||||
if not hasattr(to_timezone, "timezone_finder"):
|
if not isinstance(utc_offset, (int, float)):
|
||||||
# static variable
|
raise ValueError("UTC offset must be an integer or float representing hours.")
|
||||||
to_timezone.timezone_finder = TimezoneFinder() # type: ignore[attr-defined]
|
if not -24 <= utc_offset <= 24:
|
||||||
|
raise ValueError("UTC offset must be within the range -24 to +24 hours.")
|
||||||
|
|
||||||
# Check and convert coordinates to timezone
|
# Convert UTC offset to an Etc/GMT-compatible format
|
||||||
tz_name: Optional[str] = None
|
hours = int(utc_offset)
|
||||||
try:
|
minutes = int((abs(utc_offset) - abs(hours)) * 60)
|
||||||
tz_name = to_timezone.timezone_finder.timezone_at(lat=lat, lng=lon) # type: ignore[attr-defined]
|
sign = "-" if utc_offset >= 0 else "+"
|
||||||
if not tz_name:
|
offset_str = f"Etc/GMT{sign}{abs(hours)}"
|
||||||
raise ValueError(f"No timezone found for coordinates: latitude {lat}, longitude {lon}")
|
if minutes > 0:
|
||||||
except Exception as e:
|
offset_str += f":{minutes:02}"
|
||||||
raise ValueError(f"Invalid location: latitude {lat}, longitude {lon}") from e
|
|
||||||
|
|
||||||
|
if as_string:
|
||||||
|
return offset_str
|
||||||
|
return pendulum.timezone(offset_str)
|
||||||
|
|
||||||
|
# Handle location-based lookup
|
||||||
|
if location is not None:
|
||||||
|
try:
|
||||||
|
lat, lon = location
|
||||||
|
if not (-90 <= lat <= 90 and -180 <= lon <= 180):
|
||||||
|
raise ValueError(f"Invalid latitude/longitude: {lat}, {lon}")
|
||||||
|
tz_name = timezone_finder.timezone_at(lat=lat, lng=lon)
|
||||||
|
if not tz_name:
|
||||||
|
raise ValueError(
|
||||||
|
f"No timezone found for coordinates: latitude {lat}, longitude {lon}"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Error determining timezone for location {location}: {e}") from e
|
||||||
|
|
||||||
|
if as_string:
|
||||||
|
return tz_name
|
||||||
|
return pendulum.timezone(tz_name)
|
||||||
|
|
||||||
|
# Fallback to local timezone
|
||||||
|
local_tz = pendulum.local_timezone()
|
||||||
if as_string:
|
if as_string:
|
||||||
return tz_name
|
return local_tz.name
|
||||||
|
return local_tz
|
||||||
|
|
||||||
return ZoneInfo(tz_name)
|
|
||||||
|
def hours_in_day(dt: Optional[DateTime] = None) -> int:
|
||||||
|
"""Returns the number of hours in the given date's day, considering DST transitions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dt (Optional[pendulum.DateTime]): The date to check (no time component).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: The number of hours in the day (23, 24, or 25).
|
||||||
|
"""
|
||||||
|
if dt is None:
|
||||||
|
dt = to_datetime()
|
||||||
|
|
||||||
|
# Start and end of the day in the local timezone
|
||||||
|
start_of_day = pendulum.datetime(dt.year, dt.month, dt.day, 0, 0, 0, tz=dt.timezone)
|
||||||
|
end_of_day = start_of_day.add(days=1)
|
||||||
|
|
||||||
|
# Calculate the difference in hours between the two
|
||||||
|
duration = end_of_day - start_of_day
|
||||||
|
return int(duration.total_hours())
|
||||||
|
|
||||||
|
|
||||||
|
class DatetimesComparisonResult:
|
||||||
|
"""Encapsulates the result of comparing two Pendulum DateTime objects.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
equal (bool): Indicates whether the two datetimes are exactly equal
|
||||||
|
(including timezone and DST state).
|
||||||
|
same_instant (bool): Indicates whether the two datetimes represent the same
|
||||||
|
point in time, regardless of their timezones.
|
||||||
|
time_diff (float): The time difference between the two datetimes in seconds.
|
||||||
|
timezone_diff (bool): Indicates whether the timezones of the two datetimes are different.
|
||||||
|
dst_diff (bool): Indicates whether the two datetimes differ in their DST states.
|
||||||
|
approximately_equal (bool): Indicates whether the time difference between the
|
||||||
|
two datetimes is within the specified tolerance.
|
||||||
|
ge (bool): True if `dt1` is greater than or equal to `dt2`.
|
||||||
|
gt (bool): True if `dt1` is strictly greater than `dt2`.
|
||||||
|
le (bool): True if `dt1` is less than or equal to `dt2`.
|
||||||
|
lt (bool): True if `dt1` is strictly less than `dt2`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
equal: bool,
|
||||||
|
same_instant: bool,
|
||||||
|
time_diff: float,
|
||||||
|
timezone_diff: bool,
|
||||||
|
dst_diff: bool,
|
||||||
|
approximately_equal: bool,
|
||||||
|
):
|
||||||
|
self.equal = equal
|
||||||
|
self.same_instant = same_instant
|
||||||
|
self.time_diff = time_diff
|
||||||
|
self.timezone_diff = timezone_diff
|
||||||
|
self.dst_diff = dst_diff
|
||||||
|
self.approximately_equal = approximately_equal
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ge(self) -> bool:
|
||||||
|
"""Greater than or equal: True if `dt1` >= `dt2`."""
|
||||||
|
return self.equal or self.time_diff > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gt(self) -> bool:
|
||||||
|
"""Strictly greater than: True if `dt1` > `dt2`."""
|
||||||
|
return not self.equal and self.time_diff > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def le(self) -> bool:
|
||||||
|
"""Less than or equal: True if `dt1` <= `dt2`."""
|
||||||
|
return self.equal or self.time_diff < 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lt(self) -> bool:
|
||||||
|
"""Strictly less than: True if `dt1` < `dt2`."""
|
||||||
|
return not self.equal and self.time_diff < 0
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return (
|
||||||
|
f"ComparisonResult(equal={self.equal}, "
|
||||||
|
f"same_instant={self.same_instant}, "
|
||||||
|
f"time_diff={self.time_diff}, "
|
||||||
|
f"timezone_diff={self.timezone_diff}, "
|
||||||
|
f"dst_diff={self.dst_diff}, "
|
||||||
|
f"approximately_equal={self.approximately_equal}, "
|
||||||
|
f"ge={self.ge}, gt={self.gt}, le={self.le}, lt={self.lt})"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def compare_datetimes(
|
||||||
|
dt1: DateTime,
|
||||||
|
dt2: DateTime,
|
||||||
|
tolerance: Optional[Union[int, pendulum.Duration]] = None,
|
||||||
|
) -> DatetimesComparisonResult:
|
||||||
|
"""Compares two Pendulum DateTime objects with precision, including DST and timezones.
|
||||||
|
|
||||||
|
This function evaluates various aspects of the relationship between two datetime objects:
|
||||||
|
- Exact equality, including timezone and DST state.
|
||||||
|
- Whether they represent the same instant in time (ignoring timezones).
|
||||||
|
- The absolute time difference in seconds.
|
||||||
|
- Differences in timezone and DST state.
|
||||||
|
- Approximate equality based on a specified tolerance.
|
||||||
|
- Greater or lesser comparisons.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dt1 (pendulum.DateTime): The first datetime object to compare.
|
||||||
|
dt2 (pendulum.DateTime): The second datetime object to compare.
|
||||||
|
tolerance (Optional[Union[int, pendulum.Duration]]): An optional tolerance for comparison.
|
||||||
|
- If an integer is provided, it is interpreted as seconds.
|
||||||
|
- If a `pendulum.Duration` is provided, its total seconds are used.
|
||||||
|
- If not provided, no tolerance is applied.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DatetimesComparisonResult: An object containing the results of the comparison, including:
|
||||||
|
- `equal`: Whether the datetimes are exactly equal.
|
||||||
|
- `same_instant`: Whether the datetimes represent the same instant.
|
||||||
|
- `time_diff`: The time difference in seconds.
|
||||||
|
- `timezone_diff`: Whether the timezones differ.
|
||||||
|
- `dst_diff`: Whether the DST states differ.
|
||||||
|
- `approximately_equal`: Whether the time difference is within the tolerance.
|
||||||
|
- `ge`, `gt`, `le`, `lt`: Relational comparisons between the two datetimes.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
Compare two datetimes exactly:
|
||||||
|
>>> dt1 = pendulum.datetime(2023, 7, 1, 12, tz='Europe/Berlin')
|
||||||
|
>>> dt2 = pendulum.datetime(2023, 7, 1, 12, tz='UTC')
|
||||||
|
>>> compare_datetimes(dt1, dt2)
|
||||||
|
DatetimesComparisonResult(equal=False, same_instant=True, time_diff=7200, timezone_diff=True, dst_diff=False, approximately_equal=False, ge=False, gt=False, le=True, lt=True)
|
||||||
|
|
||||||
|
Compare with a tolerance:
|
||||||
|
>>> compare_datetimes(dt1, dt2, tolerance=7200)
|
||||||
|
DatetimesComparisonResult(equal=False, same_instant=True, time_diff=7200, timezone_diff=True, dst_diff=False, approximately_equal=True, ge=False, gt=False, le=True, lt=True)
|
||||||
|
"""
|
||||||
|
# Normalize tolerance to seconds
|
||||||
|
if tolerance is None:
|
||||||
|
tolerance_seconds = 0
|
||||||
|
elif isinstance(tolerance, pendulum.Duration):
|
||||||
|
tolerance_seconds = tolerance.total_seconds()
|
||||||
|
else:
|
||||||
|
tolerance_seconds = int(tolerance)
|
||||||
|
|
||||||
|
# Strict equality check (includes timezone and DST)
|
||||||
|
is_equal = dt1.in_tz("UTC") == dt2.in_tz("UTC")
|
||||||
|
|
||||||
|
# Instant comparison (point in time, might be in different timezones)
|
||||||
|
is_same_instant = dt1.int_timestamp == dt2.int_timestamp
|
||||||
|
|
||||||
|
# Time difference calculation. Throws exception if diverging timezone awareness.
|
||||||
|
time_diff = dt1.int_timestamp - dt2.int_timestamp
|
||||||
|
|
||||||
|
# Timezone comparison
|
||||||
|
timezone_diff = dt1.timezone_name != dt2.timezone_name
|
||||||
|
|
||||||
|
# DST state comparison
|
||||||
|
dst_diff = dt1.is_dst() != dt2.is_dst()
|
||||||
|
|
||||||
|
# Tolerance-based approximate equality
|
||||||
|
is_approximately_equal = time_diff <= tolerance_seconds
|
||||||
|
|
||||||
|
return DatetimesComparisonResult(
|
||||||
|
equal=is_equal,
|
||||||
|
same_instant=is_same_instant,
|
||||||
|
time_diff=time_diff,
|
||||||
|
timezone_diff=timezone_diff,
|
||||||
|
dst_diff=dst_diff,
|
||||||
|
approximately_equal=is_approximately_equal,
|
||||||
|
)
|
||||||
|
@ -1,23 +1,16 @@
|
|||||||
import datetime
|
|
||||||
import json
|
import json
|
||||||
import zoneinfo
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
# currently unused
|
logger = get_logger(__name__)
|
||||||
def ist_dst_wechsel(tag: datetime.datetime, timezone: str = "Europe/Berlin") -> bool:
|
|
||||||
"""Checks if Daylight Saving Time (DST) starts or ends on a given day."""
|
|
||||||
tz = zoneinfo.ZoneInfo(timezone)
|
|
||||||
# Get the current day and the next day
|
|
||||||
current_day = datetime.datetime(tag.year, tag.month, tag.day)
|
|
||||||
next_day = current_day + datetime.timedelta(days=1)
|
|
||||||
|
|
||||||
# Check if the UTC offsets are different (indicating a DST change)
|
|
||||||
dst_change = current_day.replace(tzinfo=tz).dst() != next_day.replace(tzinfo=tz).dst()
|
|
||||||
|
|
||||||
return dst_change
|
class UtilsCommonSettings(SettingsBaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NumpyEncoder(json.JSONEncoder):
|
class NumpyEncoder(json.JSONEncoder):
|
||||||
|
@ -5,8 +5,9 @@ import matplotlib
|
|||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from matplotlib.backends.backend_pdf import PdfPages
|
from matplotlib.backends.backend_pdf import PdfPages
|
||||||
|
from numpy.typing import NDArray
|
||||||
|
|
||||||
from akkudoktoreos.config import AppConfig, SetupIncomplete
|
from akkudoktoreos.config.config import get_config
|
||||||
|
|
||||||
matplotlib.use("Agg")
|
matplotlib.use("Agg")
|
||||||
|
|
||||||
@ -22,23 +23,22 @@ def visualisiere_ergebnisse(
|
|||||||
temperature: Optional[list[float]],
|
temperature: Optional[list[float]],
|
||||||
start_hour: int,
|
start_hour: int,
|
||||||
einspeiseverguetung_euro_pro_wh: np.ndarray,
|
einspeiseverguetung_euro_pro_wh: np.ndarray,
|
||||||
config: AppConfig,
|
|
||||||
filename: str = "visualization_results.pdf",
|
filename: str = "visualization_results.pdf",
|
||||||
extra_data: Optional[dict[str, Any]] = None,
|
extra_data: Optional[dict[str, Any]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
#####################
|
#####################
|
||||||
# 24-hour visualization
|
# 24-hour visualization
|
||||||
#####################
|
#####################
|
||||||
output_dir = config.working_dir / config.directories.output
|
config = get_config()
|
||||||
if not output_dir.is_dir():
|
output_dir = config.data_output_path
|
||||||
raise SetupIncomplete(f"Output path does not exist: {output_dir}.")
|
output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
output_file = output_dir.joinpath(filename)
|
output_file = output_dir.joinpath(filename)
|
||||||
with PdfPages(output_file) as pdf:
|
with PdfPages(output_file) as pdf:
|
||||||
# Load and PV generation
|
# Load and PV generation
|
||||||
plt.figure(figsize=(14, 14))
|
plt.figure(figsize=(14, 14))
|
||||||
plt.subplot(3, 3, 1)
|
plt.subplot(3, 3, 1)
|
||||||
hours = np.arange(0, config.eos.prediction_hours)
|
hours: NDArray[np.int_] = np.arange(0, config.prediction_hours, dtype=np.int_)
|
||||||
|
|
||||||
gesamtlast_array = np.array(gesamtlast)
|
gesamtlast_array = np.array(gesamtlast)
|
||||||
# Plot individual loads
|
# Plot individual loads
|
||||||
@ -100,7 +100,7 @@ def visualisiere_ergebnisse(
|
|||||||
#####################
|
#####################
|
||||||
|
|
||||||
plt.figure(figsize=(14, 10))
|
plt.figure(figsize=(14, 10))
|
||||||
hours = np.arange(start_hour, config.eos.prediction_hours)
|
hours = np.arange(start_hour, config.prediction_hours)
|
||||||
|
|
||||||
# Energy flow, grid feed-in, and grid consumption
|
# Energy flow, grid feed-in, and grid consumption
|
||||||
plt.subplot(3, 2, 1)
|
plt.subplot(3, 2, 1)
|
||||||
@ -184,7 +184,7 @@ def visualisiere_ergebnisse(
|
|||||||
|
|
||||||
# Plot for AC, DC charging, and Discharge status using bar charts
|
# Plot for AC, DC charging, and Discharge status using bar charts
|
||||||
ax1 = plt.subplot(3, 2, 5)
|
ax1 = plt.subplot(3, 2, 5)
|
||||||
hours = np.arange(0, config.eos.prediction_hours)
|
hours = np.arange(0, config.prediction_hours)
|
||||||
# Plot AC charging as bars (relative values between 0 and 1)
|
# Plot AC charging as bars (relative values between 0 and 1)
|
||||||
plt.bar(hours, ac, width=0.4, label="AC Charging (relative)", color="blue", alpha=0.6)
|
plt.bar(hours, ac, width=0.4, label="AC Charging (relative)", color="blue", alpha=0.6)
|
||||||
|
|
||||||
@ -206,13 +206,13 @@ def visualisiere_ergebnisse(
|
|||||||
|
|
||||||
# Configure the plot
|
# Configure the plot
|
||||||
ax1.legend(loc="upper left")
|
ax1.legend(loc="upper left")
|
||||||
ax1.set_xlim(0, config.eos.prediction_hours)
|
ax1.set_xlim(0, config.prediction_hours)
|
||||||
ax1.set_xlabel("Hour")
|
ax1.set_xlabel("Hour")
|
||||||
ax1.set_ylabel("Relative Power (0-1) / Discharge (0 or 1)")
|
ax1.set_ylabel("Relative Power (0-1) / Discharge (0 or 1)")
|
||||||
ax1.set_title("AC/DC Charging and Discharge Overview")
|
ax1.set_title("AC/DC Charging and Discharge Overview")
|
||||||
ax1.grid(True)
|
ax1.grid(True)
|
||||||
|
|
||||||
hours = np.arange(start_hour, config.eos.prediction_hours)
|
hours = np.arange(start_hour, config.prediction_hours)
|
||||||
|
|
||||||
pdf.savefig() # Save the current figure state to the PDF
|
pdf.savefig() # Save the current figure state to the PDF
|
||||||
plt.close() # Close the current figure to free up memory
|
plt.close() # Close the current figure to free up memory
|
||||||
|
@ -1,31 +1,41 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import time
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
import platformdirs
|
||||||
import pytest
|
import pytest
|
||||||
from xprocess import ProcessStarter
|
from xprocess import ProcessStarter
|
||||||
|
|
||||||
from akkudoktoreos.config import EOS_DIR, AppConfig, load_config
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="tmp_config")
|
@pytest.fixture()
|
||||||
def load_config_tmp(tmp_path: Path) -> AppConfig:
|
def disable_debug_logging(scope="session", autouse=True):
|
||||||
"""Creates an AppConfig from default.config.json with a tmp output directory."""
|
"""Automatically disable debug logging for all tests."""
|
||||||
config = load_config(tmp_path)
|
original_levels = {}
|
||||||
config.directories.output = str(tmp_path)
|
root_logger = logging.getLogger()
|
||||||
return config
|
|
||||||
|
|
||||||
|
original_levels[root_logger] = root_logger.level
|
||||||
|
root_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
for logger_name, logger in logging.root.manager.loggerDict.items():
|
||||||
|
if isinstance(logger, logging.Logger):
|
||||||
|
original_levels[logger] = logger.level
|
||||||
|
if logger.level <= logging.DEBUG:
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
|
||||||
def disable_debug_logging():
|
|
||||||
# Temporarily set logging level higher than DEBUG
|
|
||||||
logging.disable(logging.DEBUG)
|
|
||||||
yield
|
yield
|
||||||
# Re-enable logging back to its original state after the test
|
|
||||||
logging.disable(logging.NOTSET)
|
for logger, level in original_levels.items():
|
||||||
|
logger.setLevel(level)
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
@ -39,6 +49,84 @@ def is_full_run(request):
|
|||||||
yield bool(request.config.getoption("--full-run"))
|
yield bool(request.config.getoption("--full-run"))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def reset_config(disable_debug_logging):
|
||||||
|
"""Fixture to reset EOS config to default values."""
|
||||||
|
config_eos = get_config()
|
||||||
|
config_eos.reset_settings()
|
||||||
|
config_eos.reset_to_defaults()
|
||||||
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def config_default_dirs():
|
||||||
|
"""Fixture that provides a list of directories to be used as config dir."""
|
||||||
|
config_eos = get_config()
|
||||||
|
# Default config directory from platform user config directory
|
||||||
|
config_default_dir_user = Path(platformdirs.user_config_dir(config_eos.APP_NAME))
|
||||||
|
# Default config directory from current working directory
|
||||||
|
config_default_dir_cwd = Path.cwd()
|
||||||
|
# Default config directory from default config file
|
||||||
|
config_default_dir_default = Path(__file__).parent.parent.joinpath("src/akkudoktoreos/data")
|
||||||
|
return config_default_dir_user, config_default_dir_cwd, config_default_dir_default
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def stash_config_file(config_default_dirs):
|
||||||
|
"""Fixture to temporarily stash away an existing config file during a test.
|
||||||
|
|
||||||
|
If the specified config file exists, it moves the file to a temporary directory.
|
||||||
|
The file is restored to its original location after the test.
|
||||||
|
|
||||||
|
Keep right most in fixture parameter list to assure application at last.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path to the stashed config file.
|
||||||
|
"""
|
||||||
|
config_eos = get_config()
|
||||||
|
config_default_dir_user, config_default_dir_cwd, _ = config_default_dirs
|
||||||
|
|
||||||
|
config_file_path_user = config_default_dir_user.joinpath(config_eos.CONFIG_FILE_NAME)
|
||||||
|
config_file_path_cwd = config_default_dir_cwd.joinpath(config_eos.CONFIG_FILE_NAME)
|
||||||
|
|
||||||
|
original_config_file_user = None
|
||||||
|
original_config_file_cwd = None
|
||||||
|
if config_file_path_user.exists():
|
||||||
|
original_config_file_user = config_file_path_user
|
||||||
|
if config_file_path_cwd.exists():
|
||||||
|
original_config_file_cwd = config_file_path_cwd
|
||||||
|
|
||||||
|
temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
temp_file_user = None
|
||||||
|
temp_file_cwd = None
|
||||||
|
|
||||||
|
# If the file exists, move it to the temporary directory
|
||||||
|
if original_config_file_user:
|
||||||
|
temp_file_user = Path(temp_dir.name) / f"user.{original_config_file_user.name}"
|
||||||
|
shutil.move(original_config_file_user, temp_file_user)
|
||||||
|
assert not original_config_file_user.exists()
|
||||||
|
logger.debug(f"Stashed: '{original_config_file_user}'")
|
||||||
|
if original_config_file_cwd:
|
||||||
|
temp_file_cwd = Path(temp_dir.name) / f"cwd.{original_config_file_cwd.name}"
|
||||||
|
shutil.move(original_config_file_cwd, temp_file_cwd)
|
||||||
|
assert not original_config_file_cwd.exists()
|
||||||
|
logger.debug(f"Stashed: '{original_config_file_cwd}'")
|
||||||
|
|
||||||
|
# Yield the temporary file path to the test
|
||||||
|
yield temp_file_user, temp_file_cwd
|
||||||
|
|
||||||
|
# Cleanup after the test
|
||||||
|
if temp_file_user:
|
||||||
|
# Restore the file to its original location
|
||||||
|
shutil.move(temp_file_user, original_config_file_user)
|
||||||
|
assert original_config_file_user.exists()
|
||||||
|
if temp_file_cwd:
|
||||||
|
# Restore the file to its original location
|
||||||
|
shutil.move(temp_file_cwd, original_config_file_cwd)
|
||||||
|
assert original_config_file_cwd.exists()
|
||||||
|
temp_dir.cleanup()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def server(xprocess, tmp_path: Path):
|
def server(xprocess, tmp_path: Path):
|
||||||
"""Fixture to start the server.
|
"""Fixture to start the server.
|
||||||
@ -50,13 +138,13 @@ def server(xprocess, tmp_path: Path):
|
|||||||
# assure server to be installed
|
# assure server to be installed
|
||||||
try:
|
try:
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[sys.executable, "-c", "import akkudoktoreos.server"],
|
[sys.executable, "-c", "import akkudoktoreos.server.fastapi_server"],
|
||||||
check=True,
|
check=True,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
)
|
)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
project_dir = Path(__file__).parent.parent
|
project_dir = Path(__file__).parent.parent.parent
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[sys.executable, "-m", "pip", "install", "-e", project_dir],
|
[sys.executable, "-m", "pip", "install", "-e", project_dir],
|
||||||
check=True,
|
check=True,
|
||||||
@ -66,11 +154,15 @@ def server(xprocess, tmp_path: Path):
|
|||||||
|
|
||||||
# command to start server process
|
# command to start server process
|
||||||
args = [sys.executable, "-m", "akkudoktoreos.server.fastapi_server"]
|
args = [sys.executable, "-m", "akkudoktoreos.server.fastapi_server"]
|
||||||
env = {EOS_DIR: f"{tmp_path}", **os.environ.copy()}
|
config_eos = get_config()
|
||||||
|
settings = {
|
||||||
|
"data_folder_path": tmp_path,
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
|
||||||
# startup pattern
|
# startup pattern
|
||||||
pattern = "Application startup complete."
|
pattern = "Application startup complete."
|
||||||
# search the first 30 lines for the startup pattern, if not found
|
# search this number of lines for the startup pattern, if not found
|
||||||
# a RuntimeError will be raised informing the user
|
# a RuntimeError will be raised informing the user
|
||||||
max_read_lines = 30
|
max_read_lines = 30
|
||||||
|
|
||||||
@ -81,7 +173,7 @@ def server(xprocess, tmp_path: Path):
|
|||||||
terminate_on_interrupt = True
|
terminate_on_interrupt = True
|
||||||
|
|
||||||
# ensure process is running and return its logfile
|
# ensure process is running and return its logfile
|
||||||
logfile = xprocess.ensure("eos", Starter)
|
pid, logfile = xprocess.ensure("eos", Starter)
|
||||||
|
|
||||||
# create url/port info to the server
|
# create url/port info to the server
|
||||||
url = "http://127.0.0.1:8503"
|
url = "http://127.0.0.1:8503"
|
||||||
@ -92,26 +184,26 @@ def server(xprocess, tmp_path: Path):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def other_timezone():
|
def set_other_timezone():
|
||||||
"""Fixture to temporarily change the timezone.
|
"""Temporarily sets a timezone for Pendulum during a test.
|
||||||
|
|
||||||
Restores the original timezone after the test.
|
Resets to the original timezone after the test completes.
|
||||||
"""
|
"""
|
||||||
original_tz = os.environ.get("TZ", None)
|
original_timezone = pendulum.local_timezone()
|
||||||
|
|
||||||
other_tz = "Atlantic/Canary"
|
default_other_timezone = "Atlantic/Canary"
|
||||||
if original_tz == other_tz:
|
if default_other_timezone == original_timezone:
|
||||||
other_tz = "Asia/Singapore"
|
default_other_timezone = "Asia/Singapore"
|
||||||
|
|
||||||
# Change the timezone to another
|
def _set_timezone(other_timezone: Optional[str] = None) -> str:
|
||||||
os.environ["TZ"] = other_tz
|
if other_timezone is None:
|
||||||
time.tzset() # For Unix/Linux to apply the timezone change
|
other_timezone = default_other_timezone
|
||||||
|
pendulum.set_local_timezone(other_timezone)
|
||||||
|
assert pendulum.local_timezone() == other_timezone
|
||||||
|
return other_timezone
|
||||||
|
|
||||||
yield os.environ["TZ"] # Yield control back to the test case
|
yield _set_timezone
|
||||||
|
|
||||||
# Restore the original timezone after the test
|
# Restore the original timezone
|
||||||
if original_tz:
|
pendulum.set_local_timezone(original_timezone)
|
||||||
os.environ["TZ"] = original_tz
|
assert pendulum.local_timezone() == original_timezone
|
||||||
else:
|
|
||||||
del os.environ["TZ"]
|
|
||||||
time.tzset() # Re-apply the original timezone
|
|
||||||
|
@ -7,7 +7,7 @@ from time import sleep
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from akkudoktoreos.utils.cachefilestore import CacheFileStore, cache_in_file
|
from akkudoktoreos.utils.cacheutil import CacheFileStore, cache_in_file
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
@ -268,7 +268,7 @@ def test_cache_in_file_decorator_uses_cache(cache_store):
|
|||||||
assert result == result2
|
assert result == result2
|
||||||
|
|
||||||
|
|
||||||
def test_cache_in_file_decorator_forces_update(cache_store):
|
def test_cache_in_file_decorator_forces_update_data(cache_store):
|
||||||
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
||||||
# Clear store to assure it is empty
|
# Clear store to assure it is empty
|
||||||
cache_store.clear(clear_all=True)
|
cache_store.clear(clear_all=True)
|
@ -1,29 +1,33 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from akkudoktoreos.config import AppConfig
|
from akkudoktoreos.config.config import get_config
|
||||||
from akkudoktoreos.devices.battery import EAutoParameters, PVAkku, PVAkkuParameters
|
from akkudoktoreos.core.ems import (
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
|
||||||
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
|
|
||||||
from akkudoktoreos.prediction.ems import (
|
|
||||||
EnergieManagementSystem,
|
EnergieManagementSystem,
|
||||||
EnergieManagementSystemParameters,
|
EnergieManagementSystemParameters,
|
||||||
SimulationResult,
|
SimulationResult,
|
||||||
|
get_ems,
|
||||||
)
|
)
|
||||||
|
from akkudoktoreos.devices.battery import EAutoParameters, PVAkku, PVAkkuParameters
|
||||||
|
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
||||||
|
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
|
||||||
|
|
||||||
prediction_hours = 48
|
|
||||||
optimization_hours = 24
|
|
||||||
start_hour = 1
|
start_hour = 1
|
||||||
|
|
||||||
|
|
||||||
# Example initialization of necessary components
|
# Example initialization of necessary components
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
|
def create_ems_instance() -> EnergieManagementSystem:
|
||||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||||
|
# Assure configuration holds the correct values
|
||||||
|
config_eos = get_config()
|
||||||
|
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 24})
|
||||||
|
assert config_eos.prediction_hours is not None
|
||||||
|
|
||||||
# Initialize the battery and the inverter
|
# Initialize the battery and the inverter
|
||||||
akku = PVAkku(
|
akku = PVAkku(
|
||||||
PVAkkuParameters(kapazitaet_wh=5000, start_soc_prozent=80, min_soc_prozent=10),
|
PVAkkuParameters(kapazitaet_wh=5000, start_soc_prozent=80, min_soc_prozent=10),
|
||||||
hours=prediction_hours,
|
hours=config_eos.prediction_hours,
|
||||||
)
|
)
|
||||||
akku.reset()
|
akku.reset()
|
||||||
wechselrichter = Wechselrichter(WechselrichterParameters(max_leistung_wh=10000), akku)
|
wechselrichter = Wechselrichter(WechselrichterParameters(max_leistung_wh=10000), akku)
|
||||||
@ -34,16 +38,16 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
|
|||||||
consumption_wh=2000,
|
consumption_wh=2000,
|
||||||
duration_h=2,
|
duration_h=2,
|
||||||
),
|
),
|
||||||
hours=prediction_hours,
|
hours=config_eos.prediction_hours,
|
||||||
)
|
)
|
||||||
home_appliance.set_starting_time(2)
|
home_appliance.set_starting_time(2)
|
||||||
|
|
||||||
# Example initialization of electric car battery
|
# Example initialization of electric car battery
|
||||||
eauto = PVAkku(
|
eauto = PVAkku(
|
||||||
EAutoParameters(kapazitaet_wh=26400, start_soc_prozent=10, min_soc_prozent=10),
|
EAutoParameters(kapazitaet_wh=26400, start_soc_prozent=10, min_soc_prozent=10),
|
||||||
hours=prediction_hours,
|
hours=config_eos.prediction_hours,
|
||||||
)
|
)
|
||||||
eauto.set_charge_per_hour(np.full(prediction_hours, 1))
|
eauto.set_charge_per_hour(np.full(config_eos.prediction_hours, 1))
|
||||||
|
|
||||||
# Parameters based on previous example data
|
# Parameters based on previous example data
|
||||||
pv_prognose_wh = [
|
pv_prognose_wh = [
|
||||||
@ -203,8 +207,8 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Initialize the energy management system with the respective parameters
|
# Initialize the energy management system with the respective parameters
|
||||||
ems = EnergieManagementSystem(
|
ems = get_ems()
|
||||||
tmp_config.eos,
|
ems.set_parameters(
|
||||||
EnergieManagementSystemParameters(
|
EnergieManagementSystemParameters(
|
||||||
pv_prognose_wh=pv_prognose_wh,
|
pv_prognose_wh=pv_prognose_wh,
|
||||||
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
|
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
|
||||||
|
@ -1,28 +1,32 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from akkudoktoreos.config import AppConfig
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import (
|
||||||
|
EnergieManagementSystem,
|
||||||
|
EnergieManagementSystemParameters,
|
||||||
|
get_ems,
|
||||||
|
)
|
||||||
from akkudoktoreos.devices.battery import EAutoParameters, PVAkku, PVAkkuParameters
|
from akkudoktoreos.devices.battery import EAutoParameters, PVAkku, PVAkkuParameters
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
||||||
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
|
from akkudoktoreos.devices.inverter import Wechselrichter, WechselrichterParameters
|
||||||
from akkudoktoreos.prediction.ems import (
|
|
||||||
EnergieManagementSystem,
|
|
||||||
EnergieManagementSystemParameters,
|
|
||||||
)
|
|
||||||
|
|
||||||
prediction_hours = 48
|
|
||||||
optimization_hours = 24
|
|
||||||
start_hour = 0
|
start_hour = 0
|
||||||
|
|
||||||
|
|
||||||
# Example initialization of necessary components
|
# Example initialization of necessary components
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
|
def create_ems_instance() -> EnergieManagementSystem:
|
||||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||||
|
# Assure configuration holds the correct values
|
||||||
|
config_eos = get_config()
|
||||||
|
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 24})
|
||||||
|
assert config_eos.prediction_hours is not None
|
||||||
|
|
||||||
# Initialize the battery and the inverter
|
# Initialize the battery and the inverter
|
||||||
akku = PVAkku(
|
akku = PVAkku(
|
||||||
PVAkkuParameters(kapazitaet_wh=5000, start_soc_prozent=80, min_soc_prozent=10),
|
PVAkkuParameters(kapazitaet_wh=5000, start_soc_prozent=80, min_soc_prozent=10),
|
||||||
hours=prediction_hours,
|
hours=config_eos.prediction_hours,
|
||||||
)
|
)
|
||||||
akku.reset()
|
akku.reset()
|
||||||
wechselrichter = Wechselrichter(WechselrichterParameters(max_leistung_wh=10000), akku)
|
wechselrichter = Wechselrichter(WechselrichterParameters(max_leistung_wh=10000), akku)
|
||||||
@ -33,27 +37,28 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
|
|||||||
consumption_wh=2000,
|
consumption_wh=2000,
|
||||||
duration_h=2,
|
duration_h=2,
|
||||||
),
|
),
|
||||||
hours=prediction_hours,
|
hours=config_eos.prediction_hours,
|
||||||
)
|
)
|
||||||
home_appliance.set_starting_time(2)
|
home_appliance.set_starting_time(2)
|
||||||
|
|
||||||
# Example initialization of electric car battery
|
# Example initialization of electric car battery
|
||||||
eauto = PVAkku(
|
eauto = PVAkku(
|
||||||
EAutoParameters(kapazitaet_wh=26400, start_soc_prozent=100, min_soc_prozent=100),
|
EAutoParameters(kapazitaet_wh=26400, start_soc_prozent=100, min_soc_prozent=100),
|
||||||
hours=prediction_hours,
|
hours=config_eos.prediction_hours,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Parameters based on previous example data
|
# Parameters based on previous example data
|
||||||
pv_prognose_wh = [0.0] * prediction_hours
|
pv_prognose_wh = [0.0] * config_eos.prediction_hours
|
||||||
pv_prognose_wh[10] = 5000.0
|
pv_prognose_wh[10] = 5000.0
|
||||||
pv_prognose_wh[11] = 5000.0
|
pv_prognose_wh[11] = 5000.0
|
||||||
|
|
||||||
strompreis_euro_pro_wh = [0.001] * prediction_hours
|
strompreis_euro_pro_wh = [0.001] * config_eos.prediction_hours
|
||||||
strompreis_euro_pro_wh[0:10] = [0.00001] * 10
|
strompreis_euro_pro_wh[0:10] = [0.00001] * 10
|
||||||
strompreis_euro_pro_wh[11:15] = [0.00005] * 4
|
strompreis_euro_pro_wh[11:15] = [0.00005] * 4
|
||||||
strompreis_euro_pro_wh[20] = 0.00001
|
strompreis_euro_pro_wh[20] = 0.00001
|
||||||
|
|
||||||
einspeiseverguetung_euro_pro_wh = [0.00007] * len(strompreis_euro_pro_wh)
|
einspeiseverguetung_euro_pro_wh = [0.00007] * len(strompreis_euro_pro_wh)
|
||||||
|
preis_euro_pro_wh_akku = 0.0001
|
||||||
|
|
||||||
gesamtlast = [
|
gesamtlast = [
|
||||||
676.71,
|
676.71,
|
||||||
@ -107,13 +112,13 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Initialize the energy management system with the respective parameters
|
# Initialize the energy management system with the respective parameters
|
||||||
ems = EnergieManagementSystem(
|
ems = get_ems()
|
||||||
tmp_config.eos,
|
ems.set_parameters(
|
||||||
EnergieManagementSystemParameters(
|
EnergieManagementSystemParameters(
|
||||||
pv_prognose_wh=pv_prognose_wh,
|
pv_prognose_wh=pv_prognose_wh,
|
||||||
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
|
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
|
||||||
einspeiseverguetung_euro_pro_wh=einspeiseverguetung_euro_pro_wh,
|
einspeiseverguetung_euro_pro_wh=einspeiseverguetung_euro_pro_wh,
|
||||||
preis_euro_pro_wh_akku=0,
|
preis_euro_pro_wh_akku=preis_euro_pro_wh_akku,
|
||||||
gesamtlast=gesamtlast,
|
gesamtlast=gesamtlast,
|
||||||
),
|
),
|
||||||
wechselrichter=wechselrichter,
|
wechselrichter=wechselrichter,
|
||||||
@ -121,10 +126,10 @@ def create_ems_instance(tmp_config: AppConfig) -> EnergieManagementSystem:
|
|||||||
home_appliance=home_appliance,
|
home_appliance=home_appliance,
|
||||||
)
|
)
|
||||||
|
|
||||||
ac = np.full(prediction_hours, 0)
|
ac = np.full(config_eos.prediction_hours, 0.0)
|
||||||
ac[20] = 1
|
ac[20] = 1
|
||||||
ems.set_akku_ac_charge_hours(ac)
|
ems.set_akku_ac_charge_hours(ac)
|
||||||
dc = np.full(prediction_hours, 0)
|
dc = np.full(config_eos.prediction_hours, 0.0)
|
||||||
dc[11] = 1
|
dc[11] = 1
|
||||||
ems.set_akku_dc_charge_hours(dc)
|
ems.set_akku_dc_charge_hours(dc)
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ from unittest.mock import patch
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from akkudoktoreos.config import AppConfig
|
from akkudoktoreos.config.config import get_config
|
||||||
from akkudoktoreos.optimization.genetic import (
|
from akkudoktoreos.optimization.genetic import (
|
||||||
OptimizationParameters,
|
OptimizationParameters,
|
||||||
OptimizeResponse,
|
OptimizeResponse,
|
||||||
@ -39,14 +39,13 @@ def compare_dict(actual: dict[str, Any], expected: dict[str, Any]):
|
|||||||
)
|
)
|
||||||
@patch("akkudoktoreos.optimization.genetic.visualisiere_ergebnisse")
|
@patch("akkudoktoreos.optimization.genetic.visualisiere_ergebnisse")
|
||||||
def test_optimize(
|
def test_optimize(
|
||||||
visualisiere_ergebnisse_patch,
|
visualisiere_ergebnisse_patch, fn_in: str, fn_out: str, ngen: int, is_full_run: bool
|
||||||
fn_in: str,
|
|
||||||
fn_out: str,
|
|
||||||
ngen: int,
|
|
||||||
is_full_run: bool,
|
|
||||||
tmp_config: AppConfig,
|
|
||||||
):
|
):
|
||||||
"""Test optimierung_ems."""
|
"""Test optimierung_ems."""
|
||||||
|
# Assure configuration holds the correct values
|
||||||
|
config_eos = get_config()
|
||||||
|
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 24})
|
||||||
|
|
||||||
# Load input and output data
|
# Load input and output data
|
||||||
file = DIR_TESTDATA / fn_in
|
file = DIR_TESTDATA / fn_in
|
||||||
with file.open("r") as f_in:
|
with file.open("r") as f_in:
|
||||||
@ -56,7 +55,7 @@ def test_optimize(
|
|||||||
with file.open("r") as f_out:
|
with file.open("r") as f_out:
|
||||||
expected_result = OptimizeResponse(**json.load(f_out))
|
expected_result = OptimizeResponse(**json.load(f_out))
|
||||||
|
|
||||||
opt_class = optimization_problem(tmp_config, fixed_seed=42)
|
opt_class = optimization_problem(fixed_seed=42)
|
||||||
start_hour = 10
|
start_hour = 10
|
||||||
|
|
||||||
if ngen > 10 and not is_full_run:
|
if ngen > 10 and not is_full_run:
|
||||||
|
@ -1,71 +1,112 @@
|
|||||||
import json
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from pydantic import ValidationError
|
|
||||||
|
|
||||||
from akkudoktoreos.config import (
|
from akkudoktoreos.config.config import ConfigEOS, get_config
|
||||||
CONFIG_FILE_NAME,
|
from akkudoktoreos.utils.logutil import get_logger
|
||||||
DEFAULT_CONFIG_FILE,
|
|
||||||
get_config_file,
|
logger = get_logger(__name__)
|
||||||
load_config,
|
|
||||||
)
|
config_eos = get_config()
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_CONFIGEOS_1_JSON = DIR_TESTDATA.joinpath(config_eos.CONFIG_FILE_NAME)
|
||||||
|
FILE_TESTDATA_CONFIGEOS_1_DIR = FILE_TESTDATA_CONFIGEOS_1_JSON.parent
|
||||||
|
|
||||||
|
|
||||||
def test_config() -> None:
|
@pytest.fixture
|
||||||
"""Test the default config file."""
|
def reset_config_singleton():
|
||||||
try:
|
"""Fixture to reset the ConfigEOS singleton instance before a test."""
|
||||||
load_config(Path.cwd())
|
ConfigEOS.reset_instance()
|
||||||
except ValidationError as exc:
|
yield
|
||||||
pytest.fail(f"Default configuration is not valid: {exc}")
|
ConfigEOS.reset_instance()
|
||||||
|
|
||||||
|
|
||||||
def test_config_copy(tmp_path: Path) -> None:
|
def test_fixture_stash_config_file(stash_config_file, config_default_dirs):
|
||||||
"""Test if the config is copied to the provided path."""
|
"""Assure fixture stash_config_file is working."""
|
||||||
assert DEFAULT_CONFIG_FILE == get_config_file(Path("does", "not", "exist"), False)
|
config_default_dir_user, config_default_dir_cwd, _ = config_default_dirs
|
||||||
|
|
||||||
load_config(tmp_path, True)
|
config_file_path_user = config_default_dir_user.joinpath(config_eos.CONFIG_FILE_NAME)
|
||||||
expected_config = tmp_path.joinpath(CONFIG_FILE_NAME)
|
config_file_path_cwd = config_default_dir_cwd.joinpath(config_eos.CONFIG_FILE_NAME)
|
||||||
|
|
||||||
assert expected_config == get_config_file(tmp_path, False)
|
assert not config_file_path_user.exists()
|
||||||
assert expected_config.is_file()
|
assert not config_file_path_cwd.exists()
|
||||||
|
|
||||||
|
|
||||||
def test_config_merge(tmp_path: Path) -> None:
|
def test_config_constants():
|
||||||
"""Test if config is merged and updated correctly."""
|
"""Test config constants are the way expected by the tests."""
|
||||||
config_file = tmp_path.joinpath(CONFIG_FILE_NAME)
|
assert config_eos.APP_NAME == "net.akkudoktor.eos"
|
||||||
custom_config = {
|
assert config_eos.APP_AUTHOR == "akkudoktor"
|
||||||
"eos": {
|
assert config_eos.EOS_DIR == "EOS_DIR"
|
||||||
"optimization_hours": 30,
|
assert config_eos.ENCODING == "UTF-8"
|
||||||
"penalty": 21,
|
assert config_eos.CONFIG_FILE_NAME == "EOS.config.json"
|
||||||
"does_not_exist": "nope",
|
|
||||||
"available_charging_rates_in_percentage": "False entry",
|
|
||||||
|
def test_computed_paths(reset_config):
|
||||||
|
"""Test computed paths for output and cache."""
|
||||||
|
config_eos.merge_settings_from_dict(
|
||||||
|
{
|
||||||
|
"data_folder_path": "/base/data",
|
||||||
|
"data_output_subpath": "output",
|
||||||
|
"data_cache_subpath": "cache",
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
with config_file.open("w") as f_out:
|
assert config_eos.data_output_path == Path("/base/data/output")
|
||||||
json.dump(custom_config, f_out)
|
assert config_eos.data_cache_path == Path("/base/data/cache")
|
||||||
|
|
||||||
assert config_file.exists()
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
# custom configuration is broken but not updated.
|
|
||||||
load_config(tmp_path, True, False)
|
|
||||||
|
|
||||||
with config_file.open("r") as f_in:
|
|
||||||
# custom configuration is not changed.
|
|
||||||
assert json.load(f_in) == custom_config
|
|
||||||
|
|
||||||
config = load_config(tmp_path)
|
|
||||||
|
|
||||||
assert config.eos.optimization_hours == 30
|
|
||||||
assert config.eos.penalty == 21
|
|
||||||
|
|
||||||
|
|
||||||
def test_setup(tmp_path: Path) -> None:
|
def test_singleton_behavior(reset_config_singleton):
|
||||||
"""Test setup."""
|
"""Test that ConfigEOS behaves as a singleton."""
|
||||||
config = load_config(tmp_path, True)
|
instance1 = ConfigEOS()
|
||||||
config.run_setup()
|
instance2 = ConfigEOS()
|
||||||
|
assert instance1 is instance2
|
||||||
|
|
||||||
assert tmp_path.joinpath(CONFIG_FILE_NAME).is_file()
|
|
||||||
assert tmp_path.joinpath(config.directories.cache).is_dir()
|
def test_default_config_path(reset_config, config_default_dirs, stash_config_file):
|
||||||
assert tmp_path.joinpath(config.directories.output).is_dir()
|
"""Test that the default config file path is computed correctly."""
|
||||||
|
_, _, config_default_dir_default = config_default_dirs
|
||||||
|
|
||||||
|
expected_path = config_default_dir_default.joinpath("default.config.json")
|
||||||
|
assert config_eos.config_default_file_path == expected_path
|
||||||
|
assert config_eos.config_default_file_path.is_file()
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_folder_path(reset_config, config_default_dirs, stash_config_file, monkeypatch):
|
||||||
|
"""Test that _config_folder_path identifies the correct config directory or None."""
|
||||||
|
config_default_dir_user, _, _ = config_default_dirs
|
||||||
|
|
||||||
|
# All config files are stashed away, no config folder path
|
||||||
|
assert config_eos._config_folder_path() is None
|
||||||
|
|
||||||
|
config_file_user = config_default_dir_user.joinpath(config_eos.CONFIG_FILE_NAME)
|
||||||
|
shutil.copy2(config_eos.config_default_file_path, config_file_user)
|
||||||
|
assert config_eos._config_folder_path() == config_default_dir_user
|
||||||
|
|
||||||
|
monkeypatch.setenv("EOS_DIR", str(FILE_TESTDATA_CONFIGEOS_1_DIR))
|
||||||
|
assert config_eos._config_folder_path() == FILE_TESTDATA_CONFIGEOS_1_DIR
|
||||||
|
|
||||||
|
# Cleanup after the test
|
||||||
|
os.remove(config_file_user)
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_copy(reset_config, stash_config_file, monkeypatch):
|
||||||
|
"""Test if the config is copied to the provided path."""
|
||||||
|
temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
temp_folder_path = Path(temp_dir.name)
|
||||||
|
temp_config_file_path = temp_folder_path.joinpath(config_eos.CONFIG_FILE_NAME).resolve()
|
||||||
|
monkeypatch.setenv(config_eos.EOS_DIR, str(temp_folder_path))
|
||||||
|
if temp_config_file_path.exists():
|
||||||
|
temp_config_file_path.unlink()
|
||||||
|
assert not temp_config_file_path.exists()
|
||||||
|
assert config_eos._config_folder_path() is None
|
||||||
|
assert config_eos._config_file_path() == temp_config_file_path
|
||||||
|
|
||||||
|
config_eos.from_config_file()
|
||||||
|
assert temp_config_file_path.exists()
|
||||||
|
|
||||||
|
# Cleanup after the test
|
||||||
|
temp_dir.cleanup()
|
||||||
|
94
tests/test_configabc.py
Normal file
94
tests/test_configabc.py
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
from typing import List, Literal, Optional, no_type_check
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pydantic import Field, ValidationError
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsModel(SettingsBaseModel):
|
||||||
|
name: str = "Default Name"
|
||||||
|
age: int = 18
|
||||||
|
tags: List[str] = Field(default_factory=list)
|
||||||
|
readonly_field: Literal["ReadOnly"] = "ReadOnly" # Use Literal instead of const
|
||||||
|
|
||||||
|
|
||||||
|
def test_reset_to_defaults():
|
||||||
|
"""Test resetting to default values."""
|
||||||
|
instance = SettingsModel(name="Custom Name", age=25, tags=["tag1", "tag2"])
|
||||||
|
|
||||||
|
# Modify the instance
|
||||||
|
instance.name = "Modified Name"
|
||||||
|
instance.age = 30
|
||||||
|
instance.tags.append("tag3")
|
||||||
|
|
||||||
|
# Ensure the instance is modified
|
||||||
|
assert instance.name == "Modified Name"
|
||||||
|
assert instance.age == 30
|
||||||
|
assert instance.tags == ["tag1", "tag2", "tag3"]
|
||||||
|
|
||||||
|
# Reset to defaults
|
||||||
|
instance.reset_to_defaults()
|
||||||
|
|
||||||
|
# Verify default values
|
||||||
|
assert instance.name == "Default Name"
|
||||||
|
assert instance.age == 18
|
||||||
|
assert instance.tags == []
|
||||||
|
assert instance.readonly_field == "ReadOnly"
|
||||||
|
|
||||||
|
|
||||||
|
@no_type_check
|
||||||
|
def test_reset_to_defaults_readonly_field():
|
||||||
|
"""Ensure read-only fields remain unchanged."""
|
||||||
|
instance = SettingsModel()
|
||||||
|
|
||||||
|
# Attempt to modify readonly_field (should raise an error)
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
instance.readonly_field = "New Value"
|
||||||
|
|
||||||
|
# Reset to defaults
|
||||||
|
instance.reset_to_defaults()
|
||||||
|
|
||||||
|
# Ensure readonly_field is still at its default value
|
||||||
|
assert instance.readonly_field == "ReadOnly"
|
||||||
|
|
||||||
|
|
||||||
|
def test_reset_to_defaults_with_default_factory():
|
||||||
|
"""Test reset with fields having default_factory."""
|
||||||
|
|
||||||
|
class FactoryModel(SettingsBaseModel):
|
||||||
|
items: List[int] = Field(default_factory=lambda: [1, 2, 3])
|
||||||
|
value: Optional[int] = None
|
||||||
|
|
||||||
|
instance = FactoryModel(items=[4, 5, 6], value=10)
|
||||||
|
|
||||||
|
# Ensure instance has custom values
|
||||||
|
assert instance.items == [4, 5, 6]
|
||||||
|
assert instance.value == 10
|
||||||
|
|
||||||
|
# Reset to defaults
|
||||||
|
instance.reset_to_defaults()
|
||||||
|
|
||||||
|
# Verify reset values
|
||||||
|
assert instance.items == [1, 2, 3]
|
||||||
|
assert instance.value is None
|
||||||
|
|
||||||
|
|
||||||
|
@no_type_check
|
||||||
|
def test_reset_to_defaults_error_handling():
|
||||||
|
"""Ensure reset_to_defaults skips fields that cannot be set."""
|
||||||
|
|
||||||
|
class ReadOnlyModel(SettingsBaseModel):
|
||||||
|
readonly_field: Literal["ReadOnly"] = "ReadOnly"
|
||||||
|
|
||||||
|
instance = ReadOnlyModel()
|
||||||
|
|
||||||
|
# Attempt to modify readonly_field (should raise an error)
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
instance.readonly_field = "New Value"
|
||||||
|
|
||||||
|
# Reset to defaults
|
||||||
|
instance.reset_to_defaults()
|
||||||
|
|
||||||
|
# Ensure readonly_field is unaffected
|
||||||
|
assert instance.readonly_field == "ReadOnly"
|
667
tests/test_dataabc.py
Normal file
667
tests/test_dataabc.py
Normal file
@ -0,0 +1,667 @@
|
|||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any, ClassVar, List, Optional, Union
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pendulum
|
||||||
|
import pytest
|
||||||
|
from pydantic import Field, ValidationError
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.core.dataabc import (
|
||||||
|
DataBase,
|
||||||
|
DataContainer,
|
||||||
|
DataImportProvider,
|
||||||
|
DataProvider,
|
||||||
|
DataRecord,
|
||||||
|
DataSequence,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||||
|
|
||||||
|
# Derived classes for testing
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedConfig(SettingsBaseModel):
|
||||||
|
env_var: Optional[int] = Field(default=None, description="Test config by environment var")
|
||||||
|
instance_field: Optional[str] = Field(default=None, description="Test config by instance field")
|
||||||
|
class_constant: Optional[int] = Field(default=None, description="Test config by class constant")
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedBase(DataBase):
|
||||||
|
instance_field: Optional[str] = Field(default=None, description="Field Value")
|
||||||
|
class_constant: ClassVar[int] = 30
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedRecord(DataRecord):
|
||||||
|
data_value: Optional[float] = Field(default=None, description="Data Value")
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedSequence(DataSequence):
|
||||||
|
# overload
|
||||||
|
records: List[DerivedRecord] = Field(
|
||||||
|
default_factory=list, description="List of DerivedRecord records"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def record_class(cls) -> Any:
|
||||||
|
return DerivedRecord
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedDataProvider(DataProvider):
|
||||||
|
"""A concrete subclass of DataProvider for testing purposes."""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[DerivedRecord] = Field(
|
||||||
|
default_factory=list, description="List of DerivedRecord records"
|
||||||
|
)
|
||||||
|
provider_enabled: ClassVar[bool] = False
|
||||||
|
provider_updated: ClassVar[bool] = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def record_class(cls) -> Any:
|
||||||
|
return DerivedRecord
|
||||||
|
|
||||||
|
# Implement abstract methods for test purposes
|
||||||
|
def provider_id(self) -> str:
|
||||||
|
return "DerivedDataProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
return self.provider_enabled
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
# Simulate update logic
|
||||||
|
DerivedDataProvider.provider_updated = True
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedDataImportProvider(DataImportProvider):
|
||||||
|
"""A concrete subclass of DataImportProvider for testing purposes."""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[DerivedRecord] = Field(
|
||||||
|
default_factory=list, description="List of DerivedRecord records"
|
||||||
|
)
|
||||||
|
provider_enabled: ClassVar[bool] = False
|
||||||
|
provider_updated: ClassVar[bool] = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def record_class(cls) -> Any:
|
||||||
|
return DerivedRecord
|
||||||
|
|
||||||
|
# Implement abstract methods for test purposes
|
||||||
|
def provider_id(self) -> str:
|
||||||
|
return "DerivedDataImportProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
return self.provider_enabled
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
# Simulate update logic
|
||||||
|
DerivedDataImportProvider.provider_updated = True
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedDataContainer(DataContainer):
|
||||||
|
providers: List[Union[DerivedDataProvider, DataProvider]] = Field(
|
||||||
|
default_factory=list, description="List of data providers"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
# ----------
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataBase:
|
||||||
|
@pytest.fixture
|
||||||
|
def base(self, reset_config, monkeypatch):
|
||||||
|
# Provide default values for configuration
|
||||||
|
derived = DerivedBase()
|
||||||
|
derived.config.update()
|
||||||
|
return derived
|
||||||
|
|
||||||
|
def test_get_config_value_key_error(self, base):
|
||||||
|
with pytest.raises(AttributeError):
|
||||||
|
base.config.non_existent_key
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataRecord:
|
||||||
|
def create_test_record(self, date, value):
|
||||||
|
"""Helper function to create a test DataRecord."""
|
||||||
|
return DerivedRecord(date_time=date, data_value=value)
|
||||||
|
|
||||||
|
def test_getitem(self):
|
||||||
|
record = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 10.0)
|
||||||
|
assert record["data_value"] == 10.0
|
||||||
|
|
||||||
|
def test_setitem(self):
|
||||||
|
record = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 10.0)
|
||||||
|
record["data_value"] = 20.0
|
||||||
|
assert record.data_value == 20.0
|
||||||
|
|
||||||
|
def test_delitem(self):
|
||||||
|
record = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 10.0)
|
||||||
|
record.data_value = 20.0
|
||||||
|
del record["data_value"]
|
||||||
|
assert record.data_value is None
|
||||||
|
|
||||||
|
def test_len(self):
|
||||||
|
record = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 10.0)
|
||||||
|
record.date_time = None
|
||||||
|
record.data_value = 20.0
|
||||||
|
assert len(record) == 2
|
||||||
|
|
||||||
|
def test_to_dict(self):
|
||||||
|
record = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 10.0)
|
||||||
|
record.data_value = 20.0
|
||||||
|
record_dict = record.to_dict()
|
||||||
|
assert "data_value" in record_dict
|
||||||
|
assert record_dict["data_value"] == 20.0
|
||||||
|
record2 = DerivedRecord.from_dict(record_dict)
|
||||||
|
assert record2 == record
|
||||||
|
|
||||||
|
def test_to_json(self):
|
||||||
|
record = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 10.0)
|
||||||
|
record.data_value = 20.0
|
||||||
|
json_str = record.to_json()
|
||||||
|
assert "data_value" in json_str
|
||||||
|
assert "20.0" in json_str
|
||||||
|
record2 = DerivedRecord.from_json(json_str)
|
||||||
|
assert record2 == record
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataSequence:
|
||||||
|
@pytest.fixture
|
||||||
|
def sequence(self):
|
||||||
|
sequence0 = DerivedSequence()
|
||||||
|
assert len(sequence0) == 0
|
||||||
|
return sequence0
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sequence2(self):
|
||||||
|
sequence = DerivedSequence()
|
||||||
|
record1 = self.create_test_record(datetime(1970, 1, 1), 1970)
|
||||||
|
record2 = self.create_test_record(datetime(1971, 1, 1), 1971)
|
||||||
|
sequence.append(record1)
|
||||||
|
sequence.append(record2)
|
||||||
|
assert len(sequence) == 2
|
||||||
|
return sequence
|
||||||
|
|
||||||
|
def create_test_record(self, date, value):
|
||||||
|
"""Helper function to create a test DataRecord."""
|
||||||
|
return DerivedRecord(date_time=date, data_value=value)
|
||||||
|
|
||||||
|
# Test cases
|
||||||
|
def test_getitem(self, sequence):
|
||||||
|
assert len(sequence) == 0
|
||||||
|
record = self.create_test_record("2024-01-01 00:00:00", 0)
|
||||||
|
sequence.insert_by_datetime(record)
|
||||||
|
assert isinstance(sequence[0], DerivedRecord)
|
||||||
|
|
||||||
|
def test_setitem(self, sequence2):
|
||||||
|
new_record = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 1)
|
||||||
|
sequence2[0] = new_record
|
||||||
|
assert sequence2[0].date_time == datetime(2024, 1, 3, tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
def test_set_record_at_index(self, sequence2):
|
||||||
|
record1 = self.create_test_record(datetime(2024, 1, 3, tzinfo=timezone.utc), 1)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
sequence2[1] = record1
|
||||||
|
assert sequence2[1].date_time == datetime(2024, 1, 3, tzinfo=timezone.utc)
|
||||||
|
sequence2[0] = record2
|
||||||
|
assert len(sequence2) == 2
|
||||||
|
assert sequence2[0] == record2
|
||||||
|
|
||||||
|
def test_insert_duplicate_date_record(self, sequence):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 5), 0.9) # Duplicate date
|
||||||
|
sequence.insert_by_datetime(record1)
|
||||||
|
sequence.insert_by_datetime(record2)
|
||||||
|
assert len(sequence) == 1
|
||||||
|
assert sequence[0].data_value == 0.9 # Record should have merged with new value
|
||||||
|
|
||||||
|
def test_sort_by_datetime_ascending(self, sequence):
|
||||||
|
"""Test sorting records in ascending order by date_time."""
|
||||||
|
records = [
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 11, 1), 0.7),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 10, 1), 0.8),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 12, 1), 0.9),
|
||||||
|
]
|
||||||
|
for i, record in enumerate(records):
|
||||||
|
sequence.insert(i, record)
|
||||||
|
sequence.sort_by_datetime()
|
||||||
|
sorted_dates = [record.date_time for record in sequence.records]
|
||||||
|
for i, expected_date in enumerate(
|
||||||
|
[
|
||||||
|
pendulum.datetime(2024, 10, 1),
|
||||||
|
pendulum.datetime(2024, 11, 1),
|
||||||
|
pendulum.datetime(2024, 12, 1),
|
||||||
|
]
|
||||||
|
):
|
||||||
|
assert compare_datetimes(sorted_dates[i], expected_date).equal
|
||||||
|
|
||||||
|
def test_sort_by_datetime_descending(self, sequence):
|
||||||
|
"""Test sorting records in descending order by date_time."""
|
||||||
|
records = [
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 11, 1), 0.7),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 10, 1), 0.8),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 12, 1), 0.9),
|
||||||
|
]
|
||||||
|
for i, record in enumerate(records):
|
||||||
|
sequence.insert(i, record)
|
||||||
|
sequence.sort_by_datetime(reverse=True)
|
||||||
|
sorted_dates = [record.date_time for record in sequence.records]
|
||||||
|
for i, expected_date in enumerate(
|
||||||
|
[
|
||||||
|
pendulum.datetime(2024, 12, 1),
|
||||||
|
pendulum.datetime(2024, 11, 1),
|
||||||
|
pendulum.datetime(2024, 10, 1),
|
||||||
|
]
|
||||||
|
):
|
||||||
|
assert compare_datetimes(sorted_dates[i], expected_date).equal
|
||||||
|
|
||||||
|
def test_sort_by_datetime_with_none(self, sequence):
|
||||||
|
"""Test sorting records when some date_time values are None."""
|
||||||
|
records = [
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 11, 1), 0.7),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 10, 1), 0.8),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 12, 1), 0.9),
|
||||||
|
]
|
||||||
|
for i, record in enumerate(records):
|
||||||
|
sequence.insert(i, record)
|
||||||
|
sequence.records[2].date_time = None
|
||||||
|
assert sequence.records[2].date_time is None
|
||||||
|
sequence.sort_by_datetime()
|
||||||
|
sorted_dates = [record.date_time for record in sequence.records]
|
||||||
|
for i, expected_date in enumerate(
|
||||||
|
[
|
||||||
|
None, # None values should come first
|
||||||
|
pendulum.datetime(2024, 10, 1),
|
||||||
|
pendulum.datetime(2024, 11, 1),
|
||||||
|
]
|
||||||
|
):
|
||||||
|
if expected_date is None:
|
||||||
|
assert sorted_dates[i] is None
|
||||||
|
else:
|
||||||
|
assert compare_datetimes(sorted_dates[i], expected_date).equal
|
||||||
|
|
||||||
|
def test_sort_by_datetime_error_on_uncomparable(self, sequence):
|
||||||
|
"""Test error is raised when date_time contains uncomparable values."""
|
||||||
|
records = [
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 11, 1), 0.7),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 12, 1), 0.9),
|
||||||
|
self.create_test_record(pendulum.datetime(2024, 10, 1), 0.8),
|
||||||
|
]
|
||||||
|
for i, record in enumerate(records):
|
||||||
|
sequence.insert(i, record)
|
||||||
|
with pytest.raises(
|
||||||
|
ValidationError, match="Date string not_a_datetime does not match any known formats."
|
||||||
|
):
|
||||||
|
sequence.records[2].date_time = "not_a_datetime" # Invalid date_time
|
||||||
|
sequence.sort_by_datetime()
|
||||||
|
|
||||||
|
def test_key_to_series(self, sequence):
|
||||||
|
record = self.create_test_record(datetime(2023, 11, 6), 0.8)
|
||||||
|
sequence.append(record)
|
||||||
|
series = sequence.key_to_series("data_value")
|
||||||
|
assert isinstance(series, pd.Series)
|
||||||
|
assert series[to_datetime(datetime(2023, 11, 6))] == 0.8
|
||||||
|
|
||||||
|
def test_key_from_series(self, sequence):
|
||||||
|
series = pd.Series(
|
||||||
|
data=[0.8, 0.9], index=pd.to_datetime([datetime(2023, 11, 5), datetime(2023, 11, 6)])
|
||||||
|
)
|
||||||
|
sequence.key_from_series("data_value", series)
|
||||||
|
assert len(sequence) == 2
|
||||||
|
assert sequence[0].data_value == 0.8
|
||||||
|
assert sequence[1].data_value == 0.9
|
||||||
|
|
||||||
|
def test_key_to_array(self, sequence):
|
||||||
|
interval = to_duration("1 day")
|
||||||
|
start_datetime = to_datetime("2023-11-6")
|
||||||
|
last_datetime = to_datetime("2023-11-8")
|
||||||
|
end_datetime = to_datetime("2023-11-9")
|
||||||
|
record = self.create_test_record(start_datetime, float(start_datetime.day))
|
||||||
|
sequence.insert_by_datetime(record)
|
||||||
|
record = self.create_test_record(last_datetime, float(last_datetime.day))
|
||||||
|
sequence.insert_by_datetime(record)
|
||||||
|
assert sequence[0].data_value == 6.0
|
||||||
|
assert sequence[1].data_value == 8.0
|
||||||
|
|
||||||
|
series = sequence.key_to_series(
|
||||||
|
key="data_value", start_datetime=start_datetime, end_datetime=end_datetime
|
||||||
|
)
|
||||||
|
assert len(series) == 2
|
||||||
|
assert series[to_datetime("2023-11-6")] == 6
|
||||||
|
assert series[to_datetime("2023-11-8")] == 8
|
||||||
|
|
||||||
|
array = sequence.key_to_array(
|
||||||
|
key="data_value",
|
||||||
|
start_datetime=start_datetime,
|
||||||
|
end_datetime=end_datetime,
|
||||||
|
interval=interval,
|
||||||
|
)
|
||||||
|
assert isinstance(array, np.ndarray)
|
||||||
|
assert len(array) == 3
|
||||||
|
assert array[0] == start_datetime.day
|
||||||
|
assert array[1] == 7
|
||||||
|
assert array[2] == last_datetime.day
|
||||||
|
|
||||||
|
def test_to_datetimeindex(self, sequence2):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 0.9)
|
||||||
|
sequence2.insert(0, record1)
|
||||||
|
sequence2.insert(1, record2)
|
||||||
|
dt_index = sequence2.to_datetimeindex()
|
||||||
|
assert isinstance(dt_index, pd.DatetimeIndex)
|
||||||
|
assert dt_index[0] == to_datetime(datetime(2023, 11, 5))
|
||||||
|
assert dt_index[1] == to_datetime(datetime(2023, 11, 6))
|
||||||
|
|
||||||
|
def test_delete_by_datetime_range(self, sequence):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 0.9)
|
||||||
|
record3 = self.create_test_record(datetime(2023, 11, 7), 1.0)
|
||||||
|
sequence.append(record1)
|
||||||
|
sequence.append(record2)
|
||||||
|
sequence.append(record3)
|
||||||
|
assert len(sequence) == 3
|
||||||
|
sequence.delete_by_datetime(
|
||||||
|
start_datetime=datetime(2023, 11, 6), end_datetime=datetime(2023, 11, 7)
|
||||||
|
)
|
||||||
|
assert len(sequence) == 2
|
||||||
|
assert sequence[0].date_time == to_datetime(datetime(2023, 11, 5))
|
||||||
|
assert sequence[1].date_time == to_datetime(datetime(2023, 11, 7))
|
||||||
|
|
||||||
|
def test_delete_by_datetime_start(self, sequence):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 0.9)
|
||||||
|
sequence.append(record1)
|
||||||
|
sequence.append(record2)
|
||||||
|
assert len(sequence) == 2
|
||||||
|
sequence.delete_by_datetime(start_datetime=datetime(2023, 11, 6))
|
||||||
|
assert len(sequence) == 1
|
||||||
|
assert sequence[0].date_time == to_datetime(datetime(2023, 11, 5))
|
||||||
|
|
||||||
|
def test_delete_by_datetime_end(self, sequence):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 0.9)
|
||||||
|
sequence.append(record1)
|
||||||
|
sequence.append(record2)
|
||||||
|
assert len(sequence) == 2
|
||||||
|
sequence.delete_by_datetime(end_datetime=datetime(2023, 11, 6))
|
||||||
|
assert len(sequence) == 1
|
||||||
|
assert sequence[0].date_time == to_datetime(datetime(2023, 11, 6))
|
||||||
|
|
||||||
|
def test_filter_by_datetime(self, sequence):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 0.9)
|
||||||
|
sequence.append(record1)
|
||||||
|
sequence.append(record2)
|
||||||
|
filtered_sequence = sequence.filter_by_datetime(start_datetime=datetime(2023, 11, 6))
|
||||||
|
assert len(filtered_sequence) == 1
|
||||||
|
assert filtered_sequence[0].date_time == to_datetime(datetime(2023, 11, 6))
|
||||||
|
|
||||||
|
def test_to_dict(self, sequence):
|
||||||
|
record = self.create_test_record(datetime(2023, 11, 6), 0.8)
|
||||||
|
sequence.append(record)
|
||||||
|
data_dict = sequence.to_dict()
|
||||||
|
assert isinstance(data_dict, dict)
|
||||||
|
sequence_other = sequence.from_dict(data_dict)
|
||||||
|
assert sequence_other == sequence
|
||||||
|
|
||||||
|
def test_to_json(self, sequence):
|
||||||
|
record = self.create_test_record(datetime(2023, 11, 6), 0.8)
|
||||||
|
sequence.append(record)
|
||||||
|
json_str = sequence.to_json()
|
||||||
|
assert isinstance(json_str, str)
|
||||||
|
assert "2023-11-06" in json_str
|
||||||
|
assert ":0.8" in json_str
|
||||||
|
|
||||||
|
def test_from_json(self, sequence, sequence2):
|
||||||
|
json_str = sequence2.to_json()
|
||||||
|
sequence = sequence.from_json(json_str)
|
||||||
|
assert len(sequence) == len(sequence2)
|
||||||
|
assert sequence[0].date_time == sequence2[0].date_time
|
||||||
|
assert sequence[0].data_value == sequence2[0].data_value
|
||||||
|
|
||||||
|
def test_key_to_dict(self, sequence):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 0.9)
|
||||||
|
sequence.append(record1)
|
||||||
|
sequence.append(record2)
|
||||||
|
data_dict = sequence.key_to_dict("data_value")
|
||||||
|
assert isinstance(data_dict, dict)
|
||||||
|
assert data_dict[to_datetime(datetime(2023, 11, 5), as_string=True)] == 0.8
|
||||||
|
assert data_dict[to_datetime(datetime(2023, 11, 6), as_string=True)] == 0.9
|
||||||
|
|
||||||
|
def test_key_to_lists(self, sequence):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 0.8)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 0.9)
|
||||||
|
sequence.append(record1)
|
||||||
|
sequence.append(record2)
|
||||||
|
dates, values = sequence.key_to_lists("data_value")
|
||||||
|
assert dates == [to_datetime(datetime(2023, 11, 5)), to_datetime(datetime(2023, 11, 6))]
|
||||||
|
assert values == [0.8, 0.9]
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataProvider:
|
||||||
|
# Fixtures and helper functions
|
||||||
|
@pytest.fixture
|
||||||
|
def provider(self):
|
||||||
|
"""Fixture to provide an instance of TestDataProvider for testing."""
|
||||||
|
DerivedDataProvider.provider_enabled = True
|
||||||
|
DerivedDataProvider.provider_updated = False
|
||||||
|
return DerivedDataProvider()
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_start_datetime(self):
|
||||||
|
"""Fixture for a sample start datetime."""
|
||||||
|
return to_datetime(datetime(2024, 11, 1, 12, 0))
|
||||||
|
|
||||||
|
def create_test_record(self, date, value):
|
||||||
|
"""Helper function to create a test DataRecord."""
|
||||||
|
return DerivedRecord(date_time=date, data_value=value)
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
|
||||||
|
def test_singleton_behavior(self, provider):
|
||||||
|
"""Test that DataProvider enforces singleton behavior."""
|
||||||
|
instance1 = provider
|
||||||
|
instance2 = DerivedDataProvider()
|
||||||
|
assert (
|
||||||
|
instance1 is instance2
|
||||||
|
), "Singleton pattern is not enforced; instances are not the same."
|
||||||
|
|
||||||
|
def test_update_method_with_defaults(self, provider, sample_start_datetime, monkeypatch):
|
||||||
|
"""Test the `update` method with default parameters."""
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
ems_eos.set_start_datetime(sample_start_datetime)
|
||||||
|
provider.update_data()
|
||||||
|
|
||||||
|
assert provider.start_datetime == sample_start_datetime
|
||||||
|
|
||||||
|
def test_update_method_force_enable(self, provider, monkeypatch):
|
||||||
|
"""Test that `update` executes when `force_enable` is True, even if `enabled` is False."""
|
||||||
|
# Override enabled to return False for this test
|
||||||
|
DerivedDataProvider.provider_enabled = False
|
||||||
|
DerivedDataProvider.provider_updated = False
|
||||||
|
provider.update_data(force_enable=True)
|
||||||
|
assert provider.enabled() is False, "Provider should be disabled, but enabled() is True."
|
||||||
|
assert (
|
||||||
|
DerivedDataProvider.provider_updated is True
|
||||||
|
), "Provider should have been executed, but was not."
|
||||||
|
|
||||||
|
def test_delete_by_datetime(self, provider, sample_start_datetime):
|
||||||
|
"""Test `delete_by_datetime` method for removing records by datetime range."""
|
||||||
|
# Add records to the provider for deletion testing
|
||||||
|
provider.records = [
|
||||||
|
self.create_test_record(sample_start_datetime - to_duration("3 hours"), 1),
|
||||||
|
self.create_test_record(sample_start_datetime - to_duration("1 hour"), 2),
|
||||||
|
self.create_test_record(sample_start_datetime + to_duration("1 hour"), 3),
|
||||||
|
]
|
||||||
|
|
||||||
|
provider.delete_by_datetime(
|
||||||
|
start_datetime=sample_start_datetime - to_duration("2 hours"),
|
||||||
|
end_datetime=sample_start_datetime + to_duration("2 hours"),
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
len(provider.records) == 1
|
||||||
|
), "Only one record should remain after deletion by datetime."
|
||||||
|
assert provider.records[0].date_time == sample_start_datetime - to_duration(
|
||||||
|
"3 hours"
|
||||||
|
), "Unexpected record remains."
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataImportProvider:
|
||||||
|
# Fixtures and helper functions
|
||||||
|
@pytest.fixture
|
||||||
|
def provider(self):
|
||||||
|
"""Fixture to provide an instance of DerivedDataImportProvider for testing."""
|
||||||
|
DerivedDataImportProvider.provider_enabled = True
|
||||||
|
DerivedDataImportProvider.provider_updated = False
|
||||||
|
return DerivedDataImportProvider()
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start_datetime, value_count, expected_mapping_count",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", 24, 24), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", 24, 24), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", 24, 23), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", 24, 25), # DST change in Germany (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_import_datetimes(self, provider, start_datetime, value_count, expected_mapping_count):
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
|
|
||||||
|
value_datetime_mapping = provider.import_datetimes(value_count)
|
||||||
|
|
||||||
|
assert len(value_datetime_mapping) == expected_mapping_count
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start_datetime, value_count, expected_mapping_count",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", 24, 24), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", 24, 24), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", 24, 23), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", 24, 25), # DST change in Germany (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_import_datetimes_utc(
|
||||||
|
self, set_other_timezone, provider, start_datetime, value_count, expected_mapping_count
|
||||||
|
):
|
||||||
|
original_tz = set_other_timezone("Etc/UTC")
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
|
assert ems_eos.start_datetime.timezone.name == "Europe/Berlin"
|
||||||
|
|
||||||
|
value_datetime_mapping = provider.import_datetimes(value_count)
|
||||||
|
|
||||||
|
assert len(value_datetime_mapping) == expected_mapping_count
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataContainer:
|
||||||
|
# Fixture and helpers
|
||||||
|
@pytest.fixture
|
||||||
|
def container(self):
|
||||||
|
container = DerivedDataContainer()
|
||||||
|
return container
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def container_with_providers(self):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 1)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 2)
|
||||||
|
record3 = self.create_test_record(datetime(2023, 11, 7), 3)
|
||||||
|
provider = DerivedDataProvider()
|
||||||
|
provider.clear()
|
||||||
|
assert len(provider) == 0
|
||||||
|
provider.append(record1)
|
||||||
|
provider.append(record2)
|
||||||
|
provider.append(record3)
|
||||||
|
assert len(provider) == 3
|
||||||
|
container = DerivedDataContainer()
|
||||||
|
container.providers.clear()
|
||||||
|
assert len(container.providers) == 0
|
||||||
|
container.providers.append(provider)
|
||||||
|
assert len(container.providers) == 1
|
||||||
|
return container
|
||||||
|
|
||||||
|
def create_test_record(self, date, value):
|
||||||
|
"""Helper function to create a test DataRecord."""
|
||||||
|
return DerivedRecord(date_time=date, data_value=value)
|
||||||
|
|
||||||
|
def test_append_provider(self, container):
|
||||||
|
assert len(container.providers) == 0
|
||||||
|
container.providers.append(DerivedDataProvider())
|
||||||
|
assert len(container.providers) == 1
|
||||||
|
assert isinstance(container.providers[0], DerivedDataProvider)
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="type check not implemented")
|
||||||
|
def test_append_provider_invalid_type(self, container):
|
||||||
|
with pytest.raises(ValueError, match="must be an instance of DataProvider"):
|
||||||
|
container.providers.append("not_a_provider")
|
||||||
|
|
||||||
|
def test_getitem_existing_key(self, container_with_providers):
|
||||||
|
assert len(container_with_providers.providers) == 1
|
||||||
|
# check all keys are available (don't care for position)
|
||||||
|
for key in ["data_value", "date_time"]:
|
||||||
|
assert key in list(container_with_providers.keys())
|
||||||
|
series = container_with_providers["data_value"]
|
||||||
|
assert isinstance(series, pd.Series)
|
||||||
|
assert series.name == "data_value"
|
||||||
|
assert series.tolist() == [1.0, 2.0, 3.0]
|
||||||
|
|
||||||
|
def test_getitem_non_existing_key(self, container_with_providers):
|
||||||
|
with pytest.raises(KeyError, match="No data found for key 'non_existent_key'"):
|
||||||
|
container_with_providers["non_existent_key"]
|
||||||
|
|
||||||
|
def test_setitem_existing_key(self, container_with_providers):
|
||||||
|
new_series = container_with_providers["data_value"]
|
||||||
|
new_series[:] = [4, 5, 6]
|
||||||
|
container_with_providers["data_value"] = new_series
|
||||||
|
series = container_with_providers["data_value"]
|
||||||
|
assert series.name == "data_value"
|
||||||
|
assert series.tolist() == [4, 5, 6]
|
||||||
|
|
||||||
|
def test_setitem_invalid_value(self, container_with_providers):
|
||||||
|
with pytest.raises(ValueError, match="Value must be an instance of pd.Series"):
|
||||||
|
container_with_providers["test_key"] = "not_a_series"
|
||||||
|
|
||||||
|
def test_setitem_non_existing_key(self, container_with_providers):
|
||||||
|
new_series = pd.Series([4, 5, 6], name="non_existent_key")
|
||||||
|
with pytest.raises(KeyError, match="Key 'non_existent_key' not found"):
|
||||||
|
container_with_providers["non_existent_key"] = new_series
|
||||||
|
|
||||||
|
def test_delitem_existing_key(self, container_with_providers):
|
||||||
|
del container_with_providers["data_value"]
|
||||||
|
series = container_with_providers["data_value"]
|
||||||
|
assert series.name == "data_value"
|
||||||
|
assert series.tolist() == [None, None, None]
|
||||||
|
|
||||||
|
def test_delitem_non_existing_key(self, container_with_providers):
|
||||||
|
with pytest.raises(KeyError, match="Key 'non_existent_key' not found"):
|
||||||
|
del container_with_providers["non_existent_key"]
|
||||||
|
|
||||||
|
def test_len(self, container_with_providers):
|
||||||
|
assert len(container_with_providers) == 3
|
||||||
|
|
||||||
|
def test_repr(self, container_with_providers):
|
||||||
|
representation = repr(container_with_providers)
|
||||||
|
assert representation.startswith("DerivedDataContainer(")
|
||||||
|
assert "DerivedDataProvider" in representation
|
||||||
|
|
||||||
|
def test_to_json(self, container_with_providers):
|
||||||
|
json_str = container_with_providers.to_json()
|
||||||
|
container_other = DerivedDataContainer.from_json(json_str)
|
||||||
|
assert container_other == container_with_providers
|
||||||
|
|
||||||
|
def test_from_json(self, container_with_providers):
|
||||||
|
json_str = container_with_providers.to_json()
|
||||||
|
container = DerivedDataContainer.from_json(json_str)
|
||||||
|
assert isinstance(container, DerivedDataContainer)
|
||||||
|
assert len(container.providers) == 1
|
||||||
|
assert container.providers[0] == container_with_providers.providers[0]
|
||||||
|
|
||||||
|
def test_provider_by_id(self, container_with_providers):
|
||||||
|
provider = container_with_providers.provider_by_id("DerivedDataProvider")
|
||||||
|
assert isinstance(provider, DerivedDataProvider)
|
@ -1,95 +1,379 @@
|
|||||||
"""Test Module for datetimeutil Module."""
|
"""Test Module for pendulum.datetimeutil Module."""
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
|
|
||||||
|
import pendulum
|
||||||
import pytest
|
import pytest
|
||||||
|
from pendulum.tz.timezone import Timezone
|
||||||
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_timedelta, to_timezone
|
from akkudoktoreos.utils.datetimeutil import (
|
||||||
|
compare_datetimes,
|
||||||
|
hours_in_day,
|
||||||
|
to_datetime,
|
||||||
|
to_duration,
|
||||||
|
to_timezone,
|
||||||
|
)
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
# to_datetime
|
# to_datetime
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
|
|
||||||
|
|
||||||
# Test cases for valid timedelta inputs
|
# Test cases for valid pendulum.duration inputs
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"date_input, as_string, to_timezone, to_naiv, to_maxtime, expected_output",
|
"test_case, local_timezone, date_input, as_string, in_timezone, to_naiv, to_maxtime, expected_output",
|
||||||
[
|
[
|
||||||
# as datetime object
|
# ---------------------------------------
|
||||||
|
# from string to pendulum.datetime object
|
||||||
|
# ---------------------------------------
|
||||||
|
# - no timezone
|
||||||
(
|
(
|
||||||
"2024-10-07T10:20:30.000+02:00",
|
"TC001",
|
||||||
|
"Etc/UTC",
|
||||||
|
"2024-01-01",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 1, 1, 0, 0, 0, tz="Etc/UTC"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC002",
|
||||||
|
"Europe/Berlin",
|
||||||
|
"2024-01-01",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 1, 1, 0, 0, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC003",
|
||||||
|
"Europe/Berlin",
|
||||||
|
"2024-01-01",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2023, 12, 31, 23, 0, 0, tz="Etc/UTC"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC004",
|
||||||
|
"Europe/Paris",
|
||||||
|
"2024-01-01 00:00:00",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 1, 1, 0, 0, 0, tz="Europe/Paris"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC005",
|
||||||
|
"Etc/UTC",
|
||||||
|
"2024-01-01 00:00:00",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 1, 1, 1, 0, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC006",
|
||||||
|
"Europe/Berlin",
|
||||||
|
"2024-01-01 00:00:00",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2023, 12, 31, 23, 0, 0, tz="Etc/UTC"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC007",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
"2024-01-01 12:00:00",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(
|
||||||
|
2024,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
12,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
tz="Atlantic/Canary",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC008",
|
||||||
|
"Etc/UTC",
|
||||||
|
"2024-01-01 12:00:00",
|
||||||
|
None,
|
||||||
|
None, # force local timezone
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 1, 1, 13, 0, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC009",
|
||||||
|
"Europe/Berlin",
|
||||||
|
"2024-01-01 12:00:00",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 1, 1, 11, 0, 0, tz="Etc/UTC"),
|
||||||
|
),
|
||||||
|
# - with timezone
|
||||||
|
(
|
||||||
|
"TC010",
|
||||||
|
"Etc/UTC",
|
||||||
|
"02/02/24",
|
||||||
|
None,
|
||||||
|
"Europe/Berlin",
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 2, 2, 0, 0, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC011",
|
||||||
|
"Etc/UTC",
|
||||||
|
"2024-03-03T10:20:30.000+01:00", # No dalight saving time at this date
|
||||||
None,
|
None,
|
||||||
"Europe/Berlin",
|
"Europe/Berlin",
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
datetime(2024, 10, 7, 10, 20, 30, 0, tzinfo=ZoneInfo("Europe/Berlin")),
|
pendulum.datetime(2024, 3, 3, 10, 20, 30, 0, tz="Europe/Berlin"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"2024-10-07T10:20:30.000+02:00",
|
"TC012",
|
||||||
|
"Etc/UTC",
|
||||||
|
"2024-04-04T10:20:30.000+02:00",
|
||||||
None,
|
None,
|
||||||
"Europe/Berlin",
|
"Europe/Berlin",
|
||||||
False,
|
False,
|
||||||
None,
|
None,
|
||||||
datetime(2024, 10, 7, 10, 20, 30, 0, tzinfo=ZoneInfo("Europe/Berlin")),
|
pendulum.datetime(2024, 4, 4, 10, 20, 30, 0, tz="Europe/Berlin"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"2024-10-07T10:20:30.000+02:00",
|
"TC013",
|
||||||
|
"Etc/UTC",
|
||||||
|
"2024-05-05T10:20:30.000+02:00",
|
||||||
None,
|
None,
|
||||||
"Europe/Berlin",
|
"Europe/Berlin",
|
||||||
True,
|
True,
|
||||||
None,
|
None,
|
||||||
datetime(2024, 10, 7, 10, 20, 30, 0),
|
pendulum.naive(2024, 5, 5, 10, 20, 30, 0),
|
||||||
|
),
|
||||||
|
# - without local timezone as UTC
|
||||||
|
(
|
||||||
|
"TC014",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
"02/02/24",
|
||||||
|
None,
|
||||||
|
"UTC",
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 2, 2, 0, 0, 0, tz="UTC"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC015",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
"2024-03-03T10:20:30.000Z", # No dalight saving time at this date
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
pendulum.datetime(2024, 3, 3, 10, 20, 30, 0, tz="UTC"),
|
||||||
|
),
|
||||||
|
# ---------------------------------------
|
||||||
|
# from pendulum.datetime to pendulum.datetime object
|
||||||
|
# ---------------------------------------
|
||||||
|
(
|
||||||
|
"TC016",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
pendulum.datetime(2024, 4, 4, 0, 0, 0),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 4, 4, 0, 0, 0, tz="Etc/UTC"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC017",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
pendulum.datetime(2024, 4, 4, 1, 0, 0),
|
||||||
|
None,
|
||||||
|
"Europe/Berlin",
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 4, 4, 3, 0, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC018",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
pendulum.datetime(2024, 4, 4, 1, 0, 0, tz="Etc/UTC"),
|
||||||
|
None,
|
||||||
|
"Europe/Berlin",
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 4, 4, 3, 0, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC019",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
pendulum.datetime(2024, 4, 4, 2, 0, 0, tz="Europe/Berlin"),
|
||||||
|
None,
|
||||||
|
"Etc/UTC",
|
||||||
|
None,
|
||||||
|
False,
|
||||||
|
pendulum.datetime(2024, 4, 4, 0, 0, 0, tz="Etc/UTC"),
|
||||||
|
),
|
||||||
|
# ---------------------------------------
|
||||||
|
# from string to UTC string
|
||||||
|
# ---------------------------------------
|
||||||
|
# - no timezone
|
||||||
|
# local timezone UTC
|
||||||
|
(
|
||||||
|
"TC020",
|
||||||
|
"Etc/UTC",
|
||||||
|
"2023-11-06T00:00:00",
|
||||||
|
"UTC",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
"2023-11-06T00:00:00Z",
|
||||||
|
),
|
||||||
|
# local timezone "Europe/Berlin"
|
||||||
|
(
|
||||||
|
"TC021",
|
||||||
|
"Europe/Berlin",
|
||||||
|
"2023-11-06T00:00:00",
|
||||||
|
"UTC",
|
||||||
|
"Europe/Berlin",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
"2023-11-05T23:00:00Z",
|
||||||
|
),
|
||||||
|
# - no microseconds
|
||||||
|
(
|
||||||
|
"TC022",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
"2024-10-30T00:00:00+01:00",
|
||||||
|
"UTC",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
"2024-10-29T23:00:00Z",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"TC023",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
"2024-10-30T01:00:00+01:00",
|
||||||
|
"utc",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
"2024-10-30T00:00:00Z",
|
||||||
|
),
|
||||||
|
# - with microseconds
|
||||||
|
(
|
||||||
|
"TC024",
|
||||||
|
"Atlantic/Canary",
|
||||||
|
"2024-10-07T10:20:30.000+02:00",
|
||||||
|
"UTC",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
"2024-10-07T08:20:30Z",
|
||||||
),
|
),
|
||||||
# as string
|
|
||||||
("2024-10-07T10:20:30.000+02:00", "UTC", None, None, None, "2024-10-07T08:20:30+00:00"),
|
|
||||||
("2024-10-07T10:20:30.000+02:00", "utc", None, None, None, "2024-10-07T08:20:30+00:00"),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_to_datetime(date_input, as_string, to_timezone, to_naiv, to_maxtime, expected_output):
|
def test_to_datetime(
|
||||||
"""Test datetime conversion with valid inputs."""
|
set_other_timezone,
|
||||||
assert (
|
test_case,
|
||||||
to_datetime(
|
local_timezone,
|
||||||
date_input,
|
date_input,
|
||||||
as_string=as_string,
|
as_string,
|
||||||
to_timezone=to_timezone,
|
in_timezone,
|
||||||
to_naiv=to_naiv,
|
to_naiv,
|
||||||
to_maxtime=to_maxtime,
|
to_maxtime,
|
||||||
)
|
expected_output,
|
||||||
== expected_output
|
):
|
||||||
|
"""Test pendulum.datetime conversion with valid inputs."""
|
||||||
|
set_other_timezone(local_timezone)
|
||||||
|
result = to_datetime(
|
||||||
|
date_input,
|
||||||
|
as_string=as_string,
|
||||||
|
in_timezone=in_timezone,
|
||||||
|
to_naiv=to_naiv,
|
||||||
|
to_maxtime=to_maxtime,
|
||||||
)
|
)
|
||||||
|
# if isinstance(date_input, str):
|
||||||
|
# print(f"Input: {date_input}")
|
||||||
|
# else:
|
||||||
|
# print(f"Input: {date_input} tz={date_input.timezone}")
|
||||||
|
if isinstance(expected_output, str):
|
||||||
|
# print(f"Expected: {expected_output}")
|
||||||
|
# print(f"Result: {result}")
|
||||||
|
assert result == expected_output
|
||||||
|
elif expected_output.timezone is None:
|
||||||
|
# We expect an exception
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
assert compare_datetimes(result, expected_output).equal
|
||||||
|
else:
|
||||||
|
compare = compare_datetimes(result, expected_output)
|
||||||
|
# print(f"---- Testcase: {test_case} ----")
|
||||||
|
# print(f"Expected: {expected_output} tz={expected_output.timezone}")
|
||||||
|
# print(f"Result: {result} tz={result.timezone}")
|
||||||
|
# print(f"Compare: {compare}")
|
||||||
|
assert compare.equal == True
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
# to_timedelta
|
# to_duration
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
|
|
||||||
|
|
||||||
# Test cases for valid timedelta inputs
|
# Test cases for valid duration inputs
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"input_value, expected_output",
|
"input_value, expected_output",
|
||||||
[
|
[
|
||||||
# timedelta input
|
# duration input
|
||||||
(timedelta(days=1), timedelta(days=1)),
|
(pendulum.duration(days=1), pendulum.duration(days=1)),
|
||||||
# String input
|
# String input
|
||||||
("2 days", timedelta(days=2)),
|
("2 days", pendulum.duration(days=2)),
|
||||||
("5 hours", timedelta(hours=5)),
|
("5 hours", pendulum.duration(hours=5)),
|
||||||
("30 minutes", timedelta(minutes=30)),
|
("47 hours", pendulum.duration(hours=47)),
|
||||||
("45 seconds", timedelta(seconds=45)),
|
("48 hours", pendulum.duration(seconds=48 * 3600)),
|
||||||
("1 day 2 hours 30 minutes 15 seconds", timedelta(days=1, hours=2, minutes=30, seconds=15)),
|
("30 minutes", pendulum.duration(minutes=30)),
|
||||||
("3 days 4 hours", timedelta(days=3, hours=4)),
|
("45 seconds", pendulum.duration(seconds=45)),
|
||||||
|
(
|
||||||
|
"1 day 2 hours 30 minutes 15 seconds",
|
||||||
|
pendulum.duration(days=1, hours=2, minutes=30, seconds=15),
|
||||||
|
),
|
||||||
|
("3 days 4 hours", pendulum.duration(days=3, hours=4)),
|
||||||
# Integer/Float input
|
# Integer/Float input
|
||||||
(3600, timedelta(seconds=3600)), # 1 hour
|
(3600, pendulum.duration(seconds=3600)), # 1 hour
|
||||||
(86400, timedelta(days=1)), # 1 day
|
(86400, pendulum.duration(days=1)), # 1 day
|
||||||
(1800.5, timedelta(seconds=1800.5)), # 30 minutes and 0.5 seconds
|
(1800.5, pendulum.duration(seconds=1800.5)), # 30 minutes and 0.5 seconds
|
||||||
# Tuple/List input
|
# Tuple/List input
|
||||||
((1, 2, 30, 15), timedelta(days=1, hours=2, minutes=30, seconds=15)),
|
((1, 2, 30, 15), pendulum.duration(days=1, hours=2, minutes=30, seconds=15)),
|
||||||
([0, 10, 0, 0], timedelta(hours=10)),
|
([0, 10, 0, 0], pendulum.duration(hours=10)),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_to_timedelta_valid(input_value, expected_output):
|
def test_to_duration_valid(input_value, expected_output):
|
||||||
"""Test to_timedelta with valid inputs."""
|
"""Test to_duration with valid inputs."""
|
||||||
assert to_timedelta(input_value) == expected_output
|
assert to_duration(input_value) == expected_output
|
||||||
|
|
||||||
|
|
||||||
|
def test_to_duration_summation():
|
||||||
|
start_datetime = to_datetime("2028-01-11 00:00:00")
|
||||||
|
index_datetime = start_datetime
|
||||||
|
for i in range(48):
|
||||||
|
expected_datetime = start_datetime + to_duration(f"{i} hours")
|
||||||
|
assert index_datetime == expected_datetime
|
||||||
|
index_datetime += to_duration("1 hour")
|
||||||
|
assert index_datetime == to_datetime("2028-01-13 00:00:00")
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
@ -99,21 +383,196 @@ def test_to_timedelta_valid(input_value, expected_output):
|
|||||||
|
|
||||||
def test_to_timezone_string():
|
def test_to_timezone_string():
|
||||||
"""Test to_timezone function returns correct timezone as a string."""
|
"""Test to_timezone function returns correct timezone as a string."""
|
||||||
lat, lon = 40.7128, -74.0060 # New York City coordinates
|
location = (40.7128, -74.0060) # New York City coordinates
|
||||||
result = to_timezone(lat, lon, as_string=True)
|
result = to_timezone(location=location, as_string=True)
|
||||||
assert result == "America/New_York", "Expected timezone string 'America/New_York'"
|
assert result == "America/New_York", "Expected timezone string 'America/New_York'"
|
||||||
|
|
||||||
|
|
||||||
def test_to_timezone_zoneinfo():
|
def test_to_timezone_timezone():
|
||||||
"""Test to_timezone function returns correct timezone as a ZoneInfo object."""
|
"""Test to_timezone function returns correct timezone as a Timezone object."""
|
||||||
lat, lon = 40.7128, -74.0060 # New York City coordinates
|
location = (40.7128, -74.0060) # New York City coordinates
|
||||||
result = to_timezone(lat, lon)
|
result = to_timezone(location=location)
|
||||||
assert isinstance(result, ZoneInfo), "Expected a ZoneInfo object"
|
assert isinstance(result, Timezone), "Expected a Timezone object"
|
||||||
assert result.key == "America/New_York", "Expected ZoneInfo key 'America/New_York'"
|
assert result.name == "America/New_York", "Expected Timezone name 'America/New_York'"
|
||||||
|
|
||||||
|
|
||||||
def test_to_timezone_invalid_coordinates():
|
def test_to_timezone_invalid_coordinates():
|
||||||
"""Test to_timezone function handles invalid coordinates gracefully."""
|
"""Test to_timezone function handles invalid coordinates gracefully."""
|
||||||
lat, lon = 100.0, 200.0 # Invalid coordinates outside Earth range
|
location = (100.0, 200.0) # Invalid coordinates outside Earth range
|
||||||
with pytest.raises(ValueError, match="Invalid location"):
|
with pytest.raises(ValueError, match="Invalid latitude/longitude"):
|
||||||
to_timezone(lat, lon, as_string=True)
|
to_timezone(location=location, as_string=True)
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# hours_in_day
|
||||||
|
# -----------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"local_timezone, date, in_timezone, expected_hours",
|
||||||
|
[
|
||||||
|
("Etc/UTC", "2024-11-10 00:00:00", "Europe/Berlin", 24), # No DST in Germany
|
||||||
|
("Etc/UTC", "2024-08-10 00:00:00", "Europe/Berlin", 24), # DST in Germany
|
||||||
|
("Etc/UTC", "2024-03-31 00:00:00", "Europe/Berlin", 23), # DST change (23 hours/ day)
|
||||||
|
("Etc/UTC", "2024-10-27 00:00:00", "Europe/Berlin", 25), # DST change (25 hours/ day)
|
||||||
|
("Europe/Berlin", "2024-11-10 00:00:00", "Europe/Berlin", 24), # No DST in Germany
|
||||||
|
("Europe/Berlin", "2024-08-10 00:00:00", "Europe/Berlin", 24), # DST in Germany
|
||||||
|
("Europe/Berlin", "2024-03-31 00:00:00", "Europe/Berlin", 23), # DST change (23 hours/ day)
|
||||||
|
("Europe/Berlin", "2024-10-27 00:00:00", "Europe/Berlin", 25), # DST change (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_hours_in_day(set_other_timezone, local_timezone, date, in_timezone, expected_hours):
|
||||||
|
"""Test the `test_hours_in_day` function."""
|
||||||
|
set_other_timezone(local_timezone)
|
||||||
|
date_input = to_datetime(date, in_timezone=in_timezone)
|
||||||
|
assert date_input.timezone.name == in_timezone
|
||||||
|
assert hours_in_day(date_input) == expected_hours
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# compare_datetimes
|
||||||
|
# -----------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"dt1, dt2, equal, ge, gt, le, lt",
|
||||||
|
[
|
||||||
|
# Same time in the same timezone
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 3, 15, 12, 0, 0, tz="UTC"),
|
||||||
|
pendulum.datetime(2024, 3, 15, 12, 0, 0, tz="UTC"),
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 4, 4, 0, 0, 0, tz="Europe/Berlin"),
|
||||||
|
pendulum.datetime(2024, 4, 4, 0, 0, 0, tz="Europe/Berlin"),
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
# Same instant in different timezones (converted to UTC)
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 3, 15, 8, 0, 0, tz="Europe/Berlin"),
|
||||||
|
pendulum.datetime(2024, 3, 15, 7, 0, 0, tz="UTC"),
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
# Different times across timezones (converted to UTC)
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 3, 15, 8, 0, 0, tz="America/New_York"),
|
||||||
|
pendulum.datetime(2024, 3, 15, 12, 0, 0, tz="UTC"),
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_compare_datetimes_equal(dt1, dt2, equal, ge, gt, le, lt):
|
||||||
|
# requal = compare_datetimes(dt1, dt2).equal
|
||||||
|
# rgt = compare_datetimes(dt1, dt2).gt
|
||||||
|
# rge = compare_datetimes(dt1, dt2).ge
|
||||||
|
# rlt = compare_datetimes(dt1, dt2).lt
|
||||||
|
# rle = compare_datetimes(dt1, dt2).le
|
||||||
|
# print(f"{dt1} vs. {dt2}: expected equal={equal}, ge={ge}, gt={gt}, le={le}, lt={lt}")
|
||||||
|
# print(f"{dt1} vs. {dt2}: result equal={requal}, ge={rge}, gt={rgt}, le={rle}, lt={rlt}")
|
||||||
|
assert compare_datetimes(dt1, dt2).equal == equal
|
||||||
|
assert compare_datetimes(dt1, dt2).ge == ge
|
||||||
|
assert compare_datetimes(dt1, dt2).gt == gt
|
||||||
|
assert compare_datetimes(dt1, dt2).le == le
|
||||||
|
assert compare_datetimes(dt1, dt2).lt == lt
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"dt1, dt2, equal, ge, gt, le, lt",
|
||||||
|
[
|
||||||
|
# Different times in the same timezone
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 3, 15, 11, 0, 0, tz="UTC"),
|
||||||
|
pendulum.datetime(2024, 3, 15, 12, 0, 0, tz="UTC"),
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
# Different times across timezones (converted to UTC)
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 3, 15, 6, 0, 0, tz="America/New_York"),
|
||||||
|
pendulum.datetime(2024, 3, 15, 12, 0, 0, tz="UTC"),
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
# DST changes: spring forward
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 3, 10, 1, 59, 0, tz="America/New_York"),
|
||||||
|
pendulum.datetime(2024, 3, 10, 3, 0, 0, tz="America/New_York"),
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
# DST changes: fall back
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 11, 3, 1, 0, 0, tz="America/New_York"),
|
||||||
|
pendulum.datetime(2024, 11, 3, 1, 30, 0, tz="America/New_York"),
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
True,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_compare_datetimes_lt(dt1, dt2, equal, ge, gt, le, lt):
|
||||||
|
# requal = compare_datetimes(dt1, dt2).equal
|
||||||
|
# rgt = compare_datetimes(dt1, dt2).gt
|
||||||
|
# rge = compare_datetimes(dt1, dt2).ge
|
||||||
|
# rlt = compare_datetimes(dt1, dt2).lt
|
||||||
|
# rle = compare_datetimes(dt1, dt2).le
|
||||||
|
# print(f"{dt1} vs. {dt2}: expected equal={equal}, ge={ge}, gt={gt}, le={le}, lt={lt}")
|
||||||
|
# print(f"{dt1} vs. {dt2}: result equal={requal}, ge={rge}, gt={rgt}, le={rle}, lt={rlt}")
|
||||||
|
assert compare_datetimes(dt1, dt2).equal == equal
|
||||||
|
assert compare_datetimes(dt1, dt2).ge == ge
|
||||||
|
assert compare_datetimes(dt1, dt2).gt == gt
|
||||||
|
assert compare_datetimes(dt1, dt2).le == le
|
||||||
|
assert compare_datetimes(dt1, dt2).lt == lt
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"dt1, dt2",
|
||||||
|
[
|
||||||
|
# Different times in the same timezone
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 3, 15, 13, 0, 0, tz="UTC"),
|
||||||
|
pendulum.datetime(2024, 3, 15, 12, 0, 0, tz="UTC"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_compare_datetimes_gt(dt1, dt2):
|
||||||
|
# requal = compare_datetimes(dt1, dt2).equal
|
||||||
|
# rgt = compare_datetimes(dt1, dt2).gt
|
||||||
|
# rge = compare_datetimes(dt1, dt2).ge
|
||||||
|
# rlt = compare_datetimes(dt1, dt2).lt
|
||||||
|
# rle = compare_datetimes(dt1, dt2).le
|
||||||
|
# print(f"{dt1} vs. {dt2}: expected equal={equal}, ge={ge}, gt={gt}, le={le}, lt={lt}")
|
||||||
|
# print(f"{dt1} vs. {dt2}: result equal={requal}, ge={rge}, gt={rgt}, le={rle}, lt={rlt}")
|
||||||
|
assert compare_datetimes(dt1, dt2).equal == False
|
||||||
|
assert compare_datetimes(dt1, dt2).ge
|
||||||
|
assert compare_datetimes(dt1, dt2).gt
|
||||||
|
assert compare_datetimes(dt1, dt2).le == False
|
||||||
|
assert compare_datetimes(dt1, dt2).lt == False
|
||||||
|
141
tests/test_elecpriceakkudoktor.py
Normal file
141
tests/test_elecpriceakkudoktor.py
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.elecpriceakkudoktor import (
|
||||||
|
AkkudoktorElecPrice,
|
||||||
|
AkkudoktorElecPriceValue,
|
||||||
|
ElecPriceAkkudoktor,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.utils.cacheutil import CacheFileStore
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON = DIR_TESTDATA.joinpath(
|
||||||
|
"elecpriceforecast_akkudoktor_1.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def elecprice_provider(monkeypatch):
|
||||||
|
"""Fixture to create a ElecPriceProvider instance."""
|
||||||
|
monkeypatch.setenv("elecprice_provider", "Akkudoktor")
|
||||||
|
return ElecPriceAkkudoktor()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_akkudoktor_1_json():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "r") as f_res:
|
||||||
|
input_data = json.load(f_res)
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def cache_store():
|
||||||
|
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
|
||||||
|
return CacheFileStore()
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# General forecast
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_singleton_instance(elecprice_provider):
|
||||||
|
"""Test that ElecPriceForecast behaves as a singleton."""
|
||||||
|
another_instance = ElecPriceAkkudoktor()
|
||||||
|
assert elecprice_provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_provider(elecprice_provider, monkeypatch):
|
||||||
|
"""Test requesting an unsupported elecprice_provider."""
|
||||||
|
monkeypatch.setenv("elecprice_provider", "<invalid>")
|
||||||
|
elecprice_provider.config.update()
|
||||||
|
assert elecprice_provider.enabled() == False
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Akkudoktor
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_request_forecast(mock_get, elecprice_provider, sample_akkudoktor_1_json):
|
||||||
|
"""Test requesting forecast from Akkudoktor."""
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = json.dumps(sample_akkudoktor_1_json)
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
# Preset, as this is usually done by update()
|
||||||
|
elecprice_provider.config.update()
|
||||||
|
|
||||||
|
# Test function
|
||||||
|
akkudoktor_data = elecprice_provider._request_forecast()
|
||||||
|
|
||||||
|
assert isinstance(akkudoktor_data, AkkudoktorElecPrice)
|
||||||
|
assert akkudoktor_data.values[0] == AkkudoktorElecPriceValue(
|
||||||
|
start_timestamp=1733871600000,
|
||||||
|
end_timestamp=1733875200000,
|
||||||
|
start="2024-12-10T23:00:00.000Z",
|
||||||
|
end="2024-12-11T00:00:00.000Z",
|
||||||
|
marketprice=115.94,
|
||||||
|
unit="Eur/MWh",
|
||||||
|
marketpriceEurocentPerKWh=11.59,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_update_data(mock_get, elecprice_provider, sample_akkudoktor_1_json, cache_store):
|
||||||
|
"""Test fetching forecast from Akkudoktor."""
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = json.dumps(sample_akkudoktor_1_json)
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
cache_store.clear(clear_all=True)
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
ems_eos.set_start_datetime(to_datetime("2024-12-11 00:00:00", in_timezone="Europe/Berlin"))
|
||||||
|
elecprice_provider.update_data(force_enable=True, force_update=True)
|
||||||
|
|
||||||
|
# Assert: Verify the result is as expected
|
||||||
|
mock_get.assert_called_once()
|
||||||
|
assert len(elecprice_provider) == 25
|
||||||
|
|
||||||
|
# Assert we get prediction_hours prioce values by resampling
|
||||||
|
np_price_array = elecprice_provider.key_to_array(
|
||||||
|
key="elecprice_marketprice",
|
||||||
|
start_datetime=elecprice_provider.start_datetime,
|
||||||
|
end_datetime=elecprice_provider.end_datetime,
|
||||||
|
)
|
||||||
|
assert len(np_price_array) == elecprice_provider.total_hours
|
||||||
|
|
||||||
|
# with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_2_JSON, "w") as f_out:
|
||||||
|
# f_out.write(elecprice_provider.to_json())
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Development Akkudoktor
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="For development only")
|
||||||
|
def test_akkudoktor_development_forecast_data(elecprice_provider):
|
||||||
|
"""Fetch data from real Akkudoktor server."""
|
||||||
|
# Preset, as this is usually done by update_data()
|
||||||
|
elecprice_provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
||||||
|
|
||||||
|
akkudoktor_data = elecprice_provider._request_forecast()
|
||||||
|
|
||||||
|
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "w") as f_out:
|
||||||
|
json.dump(akkudoktor_data, f_out, indent=4)
|
110
tests/test_elecpriceimport.py
Normal file
110
tests/test_elecpriceimport.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImport
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_ELECPRICEIMPORT_1_JSON = DIR_TESTDATA.joinpath("import_input_1.json")
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def elecprice_provider(reset_config, sample_import_1_json):
|
||||||
|
"""Fixture to create a ElecPriceProvider instance."""
|
||||||
|
settings = {
|
||||||
|
"elecprice_provider": "ElecPriceImport",
|
||||||
|
"elecpriceimport_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
||||||
|
"elecpriceimport_json": json.dumps(sample_import_1_json),
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
provider = ElecPriceImport()
|
||||||
|
assert provider.enabled() == True
|
||||||
|
return provider
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_import_1_json():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON, "r") as f_res:
|
||||||
|
input_data = json.load(f_res)
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# General forecast
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_singleton_instance(elecprice_provider):
|
||||||
|
"""Test that ElecPriceForecast behaves as a singleton."""
|
||||||
|
another_instance = ElecPriceImport()
|
||||||
|
assert elecprice_provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_provider(elecprice_provider):
|
||||||
|
"""Test requesting an unsupported elecprice_provider."""
|
||||||
|
settings = {
|
||||||
|
"elecprice_provider": "<invalid>",
|
||||||
|
"elecpriceimport_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
assert elecprice_provider.enabled() == False
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Import
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start_datetime, from_file",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", True), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", True), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", True), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", True), # DST change in Germany (25 hours/ day)
|
||||||
|
("2024-11-10 00:00:00", False), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", False), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", False), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_import(elecprice_provider, sample_import_1_json, start_datetime, from_file):
|
||||||
|
"""Test fetching forecast from Import."""
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
|
if from_file:
|
||||||
|
config_eos.elecpriceimport_json = None
|
||||||
|
assert config_eos.elecpriceimport_json is None
|
||||||
|
else:
|
||||||
|
config_eos.elecpriceimport_file_path = None
|
||||||
|
assert config_eos.elecpriceimport_file_path is None
|
||||||
|
elecprice_provider.clear()
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
elecprice_provider.update_data()
|
||||||
|
|
||||||
|
# Assert: Verify the result is as expected
|
||||||
|
assert elecprice_provider.start_datetime is not None
|
||||||
|
assert elecprice_provider.total_hours is not None
|
||||||
|
assert compare_datetimes(elecprice_provider.start_datetime, ems_eos.start_datetime).equal
|
||||||
|
values = sample_import_1_json["elecprice_marketprice"]
|
||||||
|
value_datetime_mapping = elecprice_provider.import_datetimes(len(values))
|
||||||
|
for i, mapping in enumerate(value_datetime_mapping):
|
||||||
|
assert i < len(elecprice_provider.records)
|
||||||
|
expected_datetime, expected_value_index = mapping
|
||||||
|
expected_value = values[expected_value_index]
|
||||||
|
result_datetime = elecprice_provider.records[i].date_time
|
||||||
|
result_value = elecprice_provider.records[i]["elecprice_marketprice"]
|
||||||
|
|
||||||
|
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
||||||
|
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
||||||
|
assert compare_datetimes(result_datetime, expected_datetime).equal
|
||||||
|
assert result_value == expected_value
|
99
tests/test_loadakkudoktor.py
Normal file
99
tests/test_loadakkudoktor.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pendulum
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.loadakkudoktor import (
|
||||||
|
LoadAkkudoktor,
|
||||||
|
LoadAkkudoktorCommonSettings,
|
||||||
|
)
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def load_provider(monkeypatch):
|
||||||
|
"""Fixture to create a LoadAkkudoktor instance."""
|
||||||
|
settings = {
|
||||||
|
"load0_provider": "LoadAkkudoktor",
|
||||||
|
"load0_name": "Akkudoktor Profile",
|
||||||
|
"loadakkudoktor_year_energy": "1000",
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
return LoadAkkudoktor()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_load_profiles_file(tmp_path):
|
||||||
|
"""Fixture to create a mock load profiles file."""
|
||||||
|
load_profiles_path = tmp_path / "load_profiles.npz"
|
||||||
|
np.savez(
|
||||||
|
load_profiles_path,
|
||||||
|
yearly_profiles=np.random.rand(365, 24), # Random load profiles
|
||||||
|
yearly_profiles_std=np.random.rand(365, 24), # Random standard deviation
|
||||||
|
)
|
||||||
|
return load_profiles_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_loadakkudoktor_settings_validator():
|
||||||
|
"""Test the field validator for `loadakkudoktor_year_energy`."""
|
||||||
|
settings = LoadAkkudoktorCommonSettings(loadakkudoktor_year_energy=1234)
|
||||||
|
assert isinstance(settings.loadakkudoktor_year_energy, float)
|
||||||
|
assert settings.loadakkudoktor_year_energy == 1234.0
|
||||||
|
|
||||||
|
settings = LoadAkkudoktorCommonSettings(loadakkudoktor_year_energy=1234.56)
|
||||||
|
assert isinstance(settings.loadakkudoktor_year_energy, float)
|
||||||
|
assert settings.loadakkudoktor_year_energy == 1234.56
|
||||||
|
|
||||||
|
|
||||||
|
def test_loadakkudoktor_provider_id(load_provider):
|
||||||
|
"""Test the `provider_id` class method."""
|
||||||
|
assert load_provider.provider_id() == "LoadAkkudoktor"
|
||||||
|
|
||||||
|
|
||||||
|
@patch("akkudoktoreos.prediction.loadakkudoktor.Path")
|
||||||
|
@patch("akkudoktoreos.prediction.loadakkudoktor.np.load")
|
||||||
|
def test_load_data_from_mock(mock_np_load, mock_path, mock_load_profiles_file, load_provider):
|
||||||
|
"""Test the `load_data` method."""
|
||||||
|
# Mock path behavior to return the test file
|
||||||
|
mock_path.return_value.parent.parent.joinpath.return_value = mock_load_profiles_file
|
||||||
|
|
||||||
|
# Mock numpy load to return data similar to what would be in the file
|
||||||
|
mock_np_load.return_value = {
|
||||||
|
"yearly_profiles": np.ones((365, 24)),
|
||||||
|
"yearly_profiles_std": np.zeros((365, 24)),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test data loading
|
||||||
|
data_year_energy = load_provider.load_data()
|
||||||
|
assert data_year_energy is not None
|
||||||
|
assert data_year_energy.shape == (365, 2, 24)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_data_from_file(load_provider):
|
||||||
|
"""Test `load_data` loads data from the profiles file."""
|
||||||
|
data_year_energy = load_provider.load_data()
|
||||||
|
assert data_year_energy is not None
|
||||||
|
|
||||||
|
|
||||||
|
@patch("akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor.load_data")
|
||||||
|
def test_update_data(mock_load_data, load_provider):
|
||||||
|
"""Test the `_update` method."""
|
||||||
|
mock_load_data.return_value = np.random.rand(365, 2, 24)
|
||||||
|
|
||||||
|
# Mock methods for updating values
|
||||||
|
ems_eos.set_start_datetime(pendulum.datetime(2024, 1, 1))
|
||||||
|
|
||||||
|
# Assure there are no prediction records
|
||||||
|
load_provider.clear()
|
||||||
|
assert len(load_provider) == 0
|
||||||
|
|
||||||
|
# Execute the method
|
||||||
|
load_provider._update_data()
|
||||||
|
|
||||||
|
# Validate that update_value is called
|
||||||
|
assert len(load_provider) > 0
|
@ -16,4 +16,9 @@ def test_openapi_spec_current():
|
|||||||
new_spec = json.load(f_new)
|
new_spec = json.load(f_new)
|
||||||
with open(old_spec_path) as f_old:
|
with open(old_spec_path) as f_old:
|
||||||
old_spec = json.load(f_old)
|
old_spec = json.load(f_old)
|
||||||
|
|
||||||
|
# Serialize to ensure comparison is consistent
|
||||||
|
new_spec = json.dumps(new_spec, indent=4, sort_keys=True)
|
||||||
|
old_spec = json.dumps(old_spec, indent=4, sort_keys=True)
|
||||||
|
|
||||||
assert new_spec == old_spec
|
assert new_spec == old_spec
|
||||||
|
226
tests/test_prediction.py
Normal file
226
tests/test_prediction.py
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
import pytest
|
||||||
|
from pydantic import ValidationError
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.elecpriceakkudoktor import ElecPriceAkkudoktor
|
||||||
|
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImport
|
||||||
|
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktor
|
||||||
|
from akkudoktoreos.prediction.loadimport import LoadImport
|
||||||
|
from akkudoktoreos.prediction.prediction import (
|
||||||
|
Prediction,
|
||||||
|
PredictionCommonSettings,
|
||||||
|
get_prediction,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.prediction.pvforecastakkudoktor import PVForecastAkkudoktor
|
||||||
|
from akkudoktoreos.prediction.pvforecastimport import PVForecastImport
|
||||||
|
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
||||||
|
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
||||||
|
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_settings(reset_config):
|
||||||
|
"""Fixture that adds settings data to the global config."""
|
||||||
|
settings = {
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"prediction_historic_hours": 24,
|
||||||
|
"latitude": 52.52,
|
||||||
|
"longitude": 13.405,
|
||||||
|
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||||
|
"pvforecast0_peakpower": 5.0,
|
||||||
|
"pvforecast0_surface_azimuth": -10,
|
||||||
|
"pvforecast0_surface_tilt": 7,
|
||||||
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
|
"pvforecast0_inverter_paco": 10000,
|
||||||
|
"pvforecast1_peakpower": 4.8,
|
||||||
|
"pvforecast1_surface_azimuth": -90,
|
||||||
|
"pvforecast1_surface_tilt": 7,
|
||||||
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
|
"pvforecast1_inverter_paco": 10000,
|
||||||
|
"pvforecast2_peakpower": 1.4,
|
||||||
|
"pvforecast2_surface_azimuth": -40,
|
||||||
|
"pvforecast2_surface_tilt": 60,
|
||||||
|
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||||
|
"pvforecast2_inverter_paco": 2000,
|
||||||
|
"pvforecast3_peakpower": 1.6,
|
||||||
|
"pvforecast3_surface_azimuth": 5,
|
||||||
|
"pvforecast3_surface_tilt": 45,
|
||||||
|
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||||
|
"pvforecast3_inverter_paco": 1400,
|
||||||
|
"pvforecast4_peakpower": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge settings to config
|
||||||
|
config = get_config()
|
||||||
|
config.merge_settings_from_dict(settings)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def prediction():
|
||||||
|
"""All EOS predictions."""
|
||||||
|
return get_prediction()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def forecast_providers():
|
||||||
|
"""Fixture for singleton forecast provider instances."""
|
||||||
|
return [
|
||||||
|
ElecPriceAkkudoktor(),
|
||||||
|
ElecPriceImport(),
|
||||||
|
LoadAkkudoktor(),
|
||||||
|
LoadImport(),
|
||||||
|
PVForecastAkkudoktor(),
|
||||||
|
PVForecastImport(),
|
||||||
|
WeatherBrightSky(),
|
||||||
|
WeatherClearOutside(),
|
||||||
|
WeatherImport(),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"prediction_hours, prediction_historic_hours, latitude, longitude, expected_timezone",
|
||||||
|
[
|
||||||
|
(48, 24, 40.7128, -74.0060, "America/New_York"), # Valid latitude/longitude
|
||||||
|
(0, 0, None, None, None), # No location
|
||||||
|
(100, 50, 51.5074, -0.1278, "Europe/London"), # Another valid location
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_prediction_common_settings_valid(
|
||||||
|
prediction_hours, prediction_historic_hours, latitude, longitude, expected_timezone
|
||||||
|
):
|
||||||
|
"""Test valid settings for PredictionCommonSettings."""
|
||||||
|
settings = PredictionCommonSettings(
|
||||||
|
prediction_hours=prediction_hours,
|
||||||
|
prediction_historic_hours=prediction_historic_hours,
|
||||||
|
latitude=latitude,
|
||||||
|
longitude=longitude,
|
||||||
|
)
|
||||||
|
assert settings.prediction_hours == prediction_hours
|
||||||
|
assert settings.prediction_historic_hours == prediction_historic_hours
|
||||||
|
assert settings.latitude == latitude
|
||||||
|
assert settings.longitude == longitude
|
||||||
|
assert settings.timezone == expected_timezone
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"field_name, invalid_value, expected_error",
|
||||||
|
[
|
||||||
|
("prediction_hours", -1, "Input should be greater than or equal to 0"),
|
||||||
|
("prediction_historic_hours", -5, "Input should be greater than or equal to 0"),
|
||||||
|
("latitude", -91.0, "Input should be greater than or equal to -90"),
|
||||||
|
("latitude", 91.0, "Input should be less than or equal to 90"),
|
||||||
|
("longitude", -181.0, "Input should be greater than or equal to -180"),
|
||||||
|
("longitude", 181.0, "Input should be less than or equal to 180"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_prediction_common_settings_invalid(field_name, invalid_value, expected_error):
|
||||||
|
"""Test invalid settings for PredictionCommonSettings."""
|
||||||
|
valid_data = {
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"prediction_historic_hours": 24,
|
||||||
|
"latitude": 40.7128,
|
||||||
|
"longitude": -74.0060,
|
||||||
|
}
|
||||||
|
valid_data[field_name] = invalid_value
|
||||||
|
|
||||||
|
with pytest.raises(ValidationError, match=expected_error):
|
||||||
|
PredictionCommonSettings(**valid_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_prediction_common_settings_no_location():
|
||||||
|
"""Test that timezone is None when latitude and longitude are not provided."""
|
||||||
|
settings = PredictionCommonSettings(
|
||||||
|
prediction_hours=48, prediction_historic_hours=24, latitude=None, longitude=None
|
||||||
|
)
|
||||||
|
assert settings.timezone is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_prediction_common_settings_with_location():
|
||||||
|
"""Test that timezone is correctly computed when latitude and longitude are provided."""
|
||||||
|
settings = PredictionCommonSettings(
|
||||||
|
prediction_hours=48, prediction_historic_hours=24, latitude=34.0522, longitude=-118.2437
|
||||||
|
)
|
||||||
|
assert settings.timezone == "America/Los_Angeles"
|
||||||
|
|
||||||
|
|
||||||
|
def test_prediction_common_settings_timezone_none_when_coordinates_missing():
|
||||||
|
"""Test that timezone is None when latitude or longitude is missing."""
|
||||||
|
config_no_latitude = PredictionCommonSettings(longitude=-74.0060)
|
||||||
|
config_no_longitude = PredictionCommonSettings(latitude=40.7128)
|
||||||
|
config_no_coords = PredictionCommonSettings()
|
||||||
|
|
||||||
|
assert config_no_latitude.timezone is None
|
||||||
|
assert config_no_longitude.timezone is None
|
||||||
|
assert config_no_coords.timezone is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_initialization(prediction, forecast_providers):
|
||||||
|
"""Test that Prediction is initialized with the correct providers in sequence."""
|
||||||
|
assert isinstance(prediction, Prediction)
|
||||||
|
assert prediction.providers == forecast_providers
|
||||||
|
|
||||||
|
|
||||||
|
def test_provider_sequence(prediction):
|
||||||
|
"""Test the provider sequence is maintained in the Prediction instance."""
|
||||||
|
assert isinstance(prediction.providers[0], ElecPriceAkkudoktor)
|
||||||
|
assert isinstance(prediction.providers[1], ElecPriceImport)
|
||||||
|
assert isinstance(prediction.providers[2], LoadAkkudoktor)
|
||||||
|
assert isinstance(prediction.providers[3], LoadImport)
|
||||||
|
assert isinstance(prediction.providers[4], PVForecastAkkudoktor)
|
||||||
|
assert isinstance(prediction.providers[5], PVForecastImport)
|
||||||
|
assert isinstance(prediction.providers[6], WeatherBrightSky)
|
||||||
|
assert isinstance(prediction.providers[7], WeatherClearOutside)
|
||||||
|
assert isinstance(prediction.providers[8], WeatherImport)
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_calls_providers(sample_settings, prediction):
|
||||||
|
"""Test that the update method calls the update method for each provider in sequence."""
|
||||||
|
# Mark the `update_datetime` method for each provider
|
||||||
|
old_datetime = to_datetime("1970-01-01 00:00:00")
|
||||||
|
for provider in prediction.providers:
|
||||||
|
provider.update_datetime = old_datetime
|
||||||
|
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(to_datetime())
|
||||||
|
prediction.update_data()
|
||||||
|
|
||||||
|
# Verify each provider's `update` method was called
|
||||||
|
for provider in prediction.providers:
|
||||||
|
if provider.enabled():
|
||||||
|
assert compare_datetimes(provider.update_datetime, old_datetime).gt
|
||||||
|
|
||||||
|
|
||||||
|
def test_provider_by_id(prediction, forecast_providers):
|
||||||
|
"""Test that provider_by_id method returns the correct provider."""
|
||||||
|
for provider in forecast_providers:
|
||||||
|
assert prediction.provider_by_id(provider.provider_id()) == provider
|
||||||
|
|
||||||
|
|
||||||
|
def test_prediction_repr(prediction):
|
||||||
|
"""Test that the Prediction instance's representation is correct."""
|
||||||
|
result = repr(prediction)
|
||||||
|
assert "Prediction([" in result
|
||||||
|
assert "ElecPriceAkkudoktor" in result
|
||||||
|
assert "ElecPriceImport" in result
|
||||||
|
assert "LoadAkkudoktor" in result
|
||||||
|
assert "LoadImport" in result
|
||||||
|
assert "PVForecastAkkudoktor" in result
|
||||||
|
assert "PVForecastImport" in result
|
||||||
|
assert "WeatherBrightSky" in result
|
||||||
|
assert "WeatherClearOutside" in result
|
||||||
|
assert "WeatherImport" in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_providers(prediction, forecast_providers):
|
||||||
|
"""Test behavior when Prediction does not have providers."""
|
||||||
|
# Clear all prediction providers from prediction
|
||||||
|
providers_bkup = prediction.providers.copy()
|
||||||
|
prediction.providers.clear()
|
||||||
|
assert prediction.providers == []
|
||||||
|
prediction.update_data() # Should not raise an error even with no providers
|
||||||
|
|
||||||
|
# Cleanup after Test
|
||||||
|
prediction.providers = providers_bkup
|
437
tests/test_predictionabc.py
Normal file
437
tests/test_predictionabc.py
Normal file
@ -0,0 +1,437 @@
|
|||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, ClassVar, List, Optional, Union
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import pendulum
|
||||||
|
import pytest
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.prediction import PredictionCommonSettings
|
||||||
|
from akkudoktoreos.prediction.predictionabc import (
|
||||||
|
PredictionBase,
|
||||||
|
PredictionContainer,
|
||||||
|
PredictionProvider,
|
||||||
|
PredictionRecord,
|
||||||
|
PredictionSequence,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||||
|
|
||||||
|
# Derived classes for testing
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedConfig(PredictionCommonSettings):
|
||||||
|
env_var: Optional[int] = Field(default=None, description="Test config by environment var")
|
||||||
|
instance_field: Optional[str] = Field(default=None, description="Test config by instance field")
|
||||||
|
class_constant: Optional[int] = Field(default=None, description="Test config by class constant")
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedBase(PredictionBase):
|
||||||
|
instance_field: Optional[str] = Field(default=None, description="Field Value")
|
||||||
|
class_constant: ClassVar[int] = 30
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedRecord(PredictionRecord):
|
||||||
|
prediction_value: Optional[float] = Field(default=None, description="Prediction Value")
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedSequence(PredictionSequence):
|
||||||
|
# overload
|
||||||
|
records: List[DerivedRecord] = Field(
|
||||||
|
default_factory=list, description="List of DerivedRecord records"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def record_class(cls) -> Any:
|
||||||
|
return DerivedRecord
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedPredictionProvider(PredictionProvider):
|
||||||
|
"""A concrete subclass of PredictionProvider for testing purposes."""
|
||||||
|
|
||||||
|
# overload
|
||||||
|
records: List[DerivedRecord] = Field(
|
||||||
|
default_factory=list, description="List of DerivedRecord records"
|
||||||
|
)
|
||||||
|
provider_enabled: ClassVar[bool] = False
|
||||||
|
provider_updated: ClassVar[bool] = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def record_class(cls) -> Any:
|
||||||
|
return DerivedRecord
|
||||||
|
|
||||||
|
# Implement abstract methods for test purposes
|
||||||
|
def provider_id(self) -> str:
|
||||||
|
return "DerivedPredictionProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
return self.provider_enabled
|
||||||
|
|
||||||
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
|
# Simulate update logic
|
||||||
|
DerivedPredictionProvider.provider_updated = True
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedPredictionContainer(PredictionContainer):
|
||||||
|
providers: List[Union[DerivedPredictionProvider, PredictionProvider]] = Field(
|
||||||
|
default_factory=list, description="List of prediction providers"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
# ----------
|
||||||
|
|
||||||
|
|
||||||
|
class TestPredictionBase:
|
||||||
|
@pytest.fixture
|
||||||
|
def base(self, reset_config, monkeypatch):
|
||||||
|
# Provide default values for configuration
|
||||||
|
monkeypatch.setenv("latitude", "50.0")
|
||||||
|
monkeypatch.setenv("longitude", "10.0")
|
||||||
|
derived = DerivedBase()
|
||||||
|
derived.config.update()
|
||||||
|
return derived
|
||||||
|
|
||||||
|
def test_config_value_from_env_variable(self, base, monkeypatch):
|
||||||
|
# From Prediction Config
|
||||||
|
monkeypatch.setenv("latitude", "2.5")
|
||||||
|
base.config.update()
|
||||||
|
assert base.config.latitude == 2.5
|
||||||
|
|
||||||
|
def test_config_value_from_field_default(self, base, monkeypatch):
|
||||||
|
assert base.config.model_fields["prediction_hours"].default == 48
|
||||||
|
assert base.config.prediction_hours == 48
|
||||||
|
monkeypatch.setenv("prediction_hours", "128")
|
||||||
|
base.config.update()
|
||||||
|
assert base.config.prediction_hours == 128
|
||||||
|
monkeypatch.delenv("prediction_hours")
|
||||||
|
base.config.update()
|
||||||
|
assert base.config.prediction_hours == 48
|
||||||
|
|
||||||
|
def test_get_config_value_key_error(self, base):
|
||||||
|
with pytest.raises(AttributeError):
|
||||||
|
base.config.non_existent_key
|
||||||
|
|
||||||
|
|
||||||
|
# TestPredictionRecord fully covered by TestDataRecord
|
||||||
|
# ----------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
# TestPredictionSequence fully covered by TestDataSequence
|
||||||
|
# --------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
# TestPredictionStartEndKeepMixin fully covered by TestPredictionContainer
|
||||||
|
# --------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestPredictionProvider:
|
||||||
|
# Fixtures and helper functions
|
||||||
|
@pytest.fixture
|
||||||
|
def provider(self):
|
||||||
|
"""Fixture to provide an instance of TestPredictionProvider for testing."""
|
||||||
|
DerivedPredictionProvider.provider_enabled = True
|
||||||
|
DerivedPredictionProvider.provider_updated = False
|
||||||
|
return DerivedPredictionProvider()
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_start_datetime(self):
|
||||||
|
"""Fixture for a sample start datetime."""
|
||||||
|
return to_datetime(datetime(2024, 11, 1, 12, 0))
|
||||||
|
|
||||||
|
def create_test_record(self, date, value):
|
||||||
|
"""Helper function to create a test PredictionRecord."""
|
||||||
|
return DerivedRecord(date_time=date, prediction_value=value)
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
|
||||||
|
def test_singleton_behavior(self, provider):
|
||||||
|
"""Test that PredictionProvider enforces singleton behavior."""
|
||||||
|
instance1 = provider
|
||||||
|
instance2 = DerivedPredictionProvider()
|
||||||
|
assert (
|
||||||
|
instance1 is instance2
|
||||||
|
), "Singleton pattern is not enforced; instances are not the same."
|
||||||
|
|
||||||
|
def test_update_computed_fields(self, provider, sample_start_datetime):
|
||||||
|
"""Test that computed fields `end_datetime` and `keep_datetime` are correctly calculated."""
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(sample_start_datetime)
|
||||||
|
provider.config.prediction_hours = 24 # 24 hours into the future
|
||||||
|
provider.config.prediction_historic_hours = 48 # 48 hours into the past
|
||||||
|
|
||||||
|
expected_end_datetime = sample_start_datetime + to_duration(
|
||||||
|
provider.config.prediction_hours * 3600
|
||||||
|
)
|
||||||
|
expected_keep_datetime = sample_start_datetime - to_duration(
|
||||||
|
provider.config.prediction_historic_hours * 3600
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
provider.end_datetime == expected_end_datetime
|
||||||
|
), "End datetime is not calculated correctly."
|
||||||
|
assert (
|
||||||
|
provider.keep_datetime == expected_keep_datetime
|
||||||
|
), "Keep datetime is not calculated correctly."
|
||||||
|
|
||||||
|
def test_update_method_with_defaults(self, provider, sample_start_datetime, monkeypatch):
|
||||||
|
"""Test the `update` method with default parameters."""
|
||||||
|
# EOS config supersedes
|
||||||
|
config_eos = get_config()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
# The following values are currently not set in EOS config, we can override
|
||||||
|
monkeypatch.setenv("prediction_historic_hours", "2")
|
||||||
|
assert os.getenv("prediction_historic_hours") == "2"
|
||||||
|
monkeypatch.setenv("latitude", "37.7749")
|
||||||
|
assert os.getenv("latitude") == "37.7749"
|
||||||
|
monkeypatch.setenv("longitude", "-122.4194")
|
||||||
|
assert os.getenv("longitude") == "-122.4194"
|
||||||
|
|
||||||
|
ems_eos.set_start_datetime(sample_start_datetime)
|
||||||
|
provider.update_data()
|
||||||
|
|
||||||
|
assert provider.config.prediction_hours == config_eos.prediction_hours
|
||||||
|
assert provider.config.prediction_historic_hours == 2
|
||||||
|
assert provider.config.latitude == 37.7749
|
||||||
|
assert provider.config.longitude == -122.4194
|
||||||
|
assert provider.start_datetime == sample_start_datetime
|
||||||
|
assert provider.end_datetime == sample_start_datetime + to_duration(
|
||||||
|
f"{provider.config.prediction_hours} hours"
|
||||||
|
)
|
||||||
|
assert provider.keep_datetime == sample_start_datetime - to_duration("2 hours")
|
||||||
|
|
||||||
|
def test_update_method_force_enable(self, provider, monkeypatch):
|
||||||
|
"""Test that `update` executes when `force_enable` is True, even if `enabled` is False."""
|
||||||
|
# Preset values that are needed by update
|
||||||
|
monkeypatch.setenv("latitude", "37.7749")
|
||||||
|
monkeypatch.setenv("longitude", "-122.4194")
|
||||||
|
|
||||||
|
# Override enabled to return False for this test
|
||||||
|
DerivedPredictionProvider.provider_enabled = False
|
||||||
|
DerivedPredictionProvider.provider_updated = False
|
||||||
|
provider.update_data(force_enable=True)
|
||||||
|
assert provider.enabled() is False, "Provider should be disabled, but enabled() is True."
|
||||||
|
assert (
|
||||||
|
DerivedPredictionProvider.provider_updated is True
|
||||||
|
), "Provider should have been executed, but was not."
|
||||||
|
|
||||||
|
def test_delete_by_datetime(self, provider, sample_start_datetime):
|
||||||
|
"""Test `delete_by_datetime` method for removing records by datetime range."""
|
||||||
|
# Add records to the provider for deletion testing
|
||||||
|
provider.records = [
|
||||||
|
self.create_test_record(sample_start_datetime - to_duration("3 hours"), 1),
|
||||||
|
self.create_test_record(sample_start_datetime - to_duration("1 hour"), 2),
|
||||||
|
self.create_test_record(sample_start_datetime + to_duration("1 hour"), 3),
|
||||||
|
]
|
||||||
|
|
||||||
|
provider.delete_by_datetime(
|
||||||
|
start_datetime=sample_start_datetime - to_duration("2 hours"),
|
||||||
|
end_datetime=sample_start_datetime + to_duration("2 hours"),
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
len(provider.records) == 1
|
||||||
|
), "Only one record should remain after deletion by datetime."
|
||||||
|
assert provider.records[0].date_time == sample_start_datetime - to_duration(
|
||||||
|
"3 hours"
|
||||||
|
), "Unexpected record remains."
|
||||||
|
|
||||||
|
|
||||||
|
class TestPredictionContainer:
|
||||||
|
# Fixture and helpers
|
||||||
|
@pytest.fixture
|
||||||
|
def container(self):
|
||||||
|
container = DerivedPredictionContainer()
|
||||||
|
return container
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def container_with_providers(self):
|
||||||
|
record1 = self.create_test_record(datetime(2023, 11, 5), 1)
|
||||||
|
record2 = self.create_test_record(datetime(2023, 11, 6), 2)
|
||||||
|
record3 = self.create_test_record(datetime(2023, 11, 7), 3)
|
||||||
|
provider = DerivedPredictionProvider()
|
||||||
|
provider.clear()
|
||||||
|
assert len(provider) == 0
|
||||||
|
provider.append(record1)
|
||||||
|
provider.append(record2)
|
||||||
|
provider.append(record3)
|
||||||
|
assert len(provider) == 3
|
||||||
|
container = DerivedPredictionContainer()
|
||||||
|
container.providers.clear()
|
||||||
|
assert len(container.providers) == 0
|
||||||
|
container.providers.append(provider)
|
||||||
|
assert len(container.providers) == 1
|
||||||
|
return container
|
||||||
|
|
||||||
|
def create_test_record(self, date, value):
|
||||||
|
"""Helper function to create a test PredictionRecord."""
|
||||||
|
return DerivedRecord(date_time=date, prediction_value=value)
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start, hours, end",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", 24, "2024-11-11 00:00:00"), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", 24, "2024-08-11 00:00:00"), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", 24, "2024-04-01 00:00:00"), # DST change (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", 24, "2024-10-28 00:00:00"), # DST change (25 hours/ day)
|
||||||
|
("2024-11-10 00:00:00", 48, "2024-11-12 00:00:00"), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", 48, "2024-08-12 00:00:00"), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", 48, "2024-04-02 00:00:00"), # DST change (47 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", 48, "2024-10-29 00:00:00"), # DST change (49 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_end_datetime(self, container, start, hours, end):
|
||||||
|
"""Test end datetime calculation from start datetime."""
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
|
settings = {
|
||||||
|
"prediction_hours": hours,
|
||||||
|
}
|
||||||
|
container.config.merge_settings_from_dict(settings)
|
||||||
|
expected = to_datetime(end, in_timezone="Europe/Berlin")
|
||||||
|
assert compare_datetimes(container.end_datetime, expected).equal
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start, historic_hours, expected_keep",
|
||||||
|
[
|
||||||
|
# Standard case
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 8, 10, 0, 0, tz="Europe/Berlin"),
|
||||||
|
24,
|
||||||
|
pendulum.datetime(2024, 8, 9, 0, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
# With DST, but should not affect historical data
|
||||||
|
(
|
||||||
|
pendulum.datetime(2024, 4, 1, 0, 0, tz="Europe/Berlin"),
|
||||||
|
24,
|
||||||
|
pendulum.datetime(2024, 3, 30, 23, 0, tz="Europe/Berlin"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_keep_datetime(self, container, start, historic_hours, expected_keep):
|
||||||
|
"""Test the `keep_datetime` property."""
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
|
settings = {
|
||||||
|
"prediction_historic_hours": historic_hours,
|
||||||
|
}
|
||||||
|
container.config.merge_settings_from_dict(settings)
|
||||||
|
expected = to_datetime(expected_keep, in_timezone="Europe/Berlin")
|
||||||
|
assert compare_datetimes(container.keep_datetime, expected).equal
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start, prediction_hours, expected_hours",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", 24, 24), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", 24, 24), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", 24, 23), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", 24, 25), # DST change in Germany (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_total_hours(self, container, start, prediction_hours, expected_hours):
|
||||||
|
"""Test the `total_hours` property."""
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
|
settings = {
|
||||||
|
"prediction_hours": prediction_hours,
|
||||||
|
}
|
||||||
|
container.config.merge_settings_from_dict(settings)
|
||||||
|
assert container.total_hours == expected_hours
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start, historic_hours, expected_hours",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", 24, 24), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", 24, 24), # DST in Germany
|
||||||
|
("2024-04-01 00:00:00", 24, 24), # DST change on 2024-03-31 in Germany (23 hours/ day)
|
||||||
|
("2024-10-28 00:00:00", 24, 24), # DST change on 2024-10-27 in Germany (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_keep_hours(self, container, start, historic_hours, expected_hours):
|
||||||
|
"""Test the `keep_hours` property."""
|
||||||
|
ems_eos = get_ems()
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start, in_timezone="Europe/Berlin"))
|
||||||
|
settings = {
|
||||||
|
"prediction_historic_hours": historic_hours,
|
||||||
|
}
|
||||||
|
container.config.merge_settings_from_dict(settings)
|
||||||
|
assert container.keep_hours == expected_hours
|
||||||
|
|
||||||
|
def test_append_provider(self, container):
|
||||||
|
assert len(container.providers) == 0
|
||||||
|
container.providers.append(DerivedPredictionProvider())
|
||||||
|
assert len(container.providers) == 1
|
||||||
|
assert isinstance(container.providers[0], DerivedPredictionProvider)
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="type check not implemented")
|
||||||
|
def test_append_provider_invalid_type(self, container):
|
||||||
|
with pytest.raises(ValueError, match="must be an instance of PredictionProvider"):
|
||||||
|
container.providers.append("not_a_provider")
|
||||||
|
|
||||||
|
def test_getitem_existing_key(self, container_with_providers):
|
||||||
|
assert len(container_with_providers.providers) == 1
|
||||||
|
# check all keys are available (don't care for position)
|
||||||
|
for key in ["prediction_value", "date_time"]:
|
||||||
|
assert key in list(container_with_providers.keys())
|
||||||
|
series = container_with_providers["prediction_value"]
|
||||||
|
assert isinstance(series, pd.Series)
|
||||||
|
assert series.name == "prediction_value"
|
||||||
|
assert series.tolist() == [1.0, 2.0, 3.0]
|
||||||
|
|
||||||
|
def test_getitem_non_existing_key(self, container_with_providers):
|
||||||
|
with pytest.raises(KeyError, match="No data found for key 'non_existent_key'"):
|
||||||
|
container_with_providers["non_existent_key"]
|
||||||
|
|
||||||
|
def test_setitem_existing_key(self, container_with_providers):
|
||||||
|
new_series = container_with_providers["prediction_value"]
|
||||||
|
new_series[:] = [4, 5, 6]
|
||||||
|
container_with_providers["prediction_value"] = new_series
|
||||||
|
series = container_with_providers["prediction_value"]
|
||||||
|
assert series.name == "prediction_value"
|
||||||
|
assert series.tolist() == [4, 5, 6]
|
||||||
|
|
||||||
|
def test_setitem_invalid_value(self, container_with_providers):
|
||||||
|
with pytest.raises(ValueError, match="Value must be an instance of pd.Series"):
|
||||||
|
container_with_providers["test_key"] = "not_a_series"
|
||||||
|
|
||||||
|
def test_setitem_non_existing_key(self, container_with_providers):
|
||||||
|
new_series = pd.Series([4, 5, 6], name="non_existent_key")
|
||||||
|
with pytest.raises(KeyError, match="Key 'non_existent_key' not found"):
|
||||||
|
container_with_providers["non_existent_key"] = new_series
|
||||||
|
|
||||||
|
def test_delitem_existing_key(self, container_with_providers):
|
||||||
|
del container_with_providers["prediction_value"]
|
||||||
|
series = container_with_providers["prediction_value"]
|
||||||
|
assert series.name == "prediction_value"
|
||||||
|
assert series.tolist() == [None, None, None]
|
||||||
|
|
||||||
|
def test_delitem_non_existing_key(self, container_with_providers):
|
||||||
|
with pytest.raises(KeyError, match="Key 'non_existent_key' not found"):
|
||||||
|
del container_with_providers["non_existent_key"]
|
||||||
|
|
||||||
|
def test_len(self, container_with_providers):
|
||||||
|
assert len(container_with_providers) == 3
|
||||||
|
|
||||||
|
def test_repr(self, container_with_providers):
|
||||||
|
representation = repr(container_with_providers)
|
||||||
|
assert representation.startswith("DerivedPredictionContainer(")
|
||||||
|
assert "DerivedPredictionProvider" in representation
|
||||||
|
|
||||||
|
def test_to_json(self, container_with_providers):
|
||||||
|
json_str = container_with_providers.to_json()
|
||||||
|
container_other = DerivedPredictionContainer.from_json(json_str)
|
||||||
|
assert container_other == container_with_providers
|
||||||
|
|
||||||
|
def test_from_json(self, container_with_providers):
|
||||||
|
json_str = container_with_providers.to_json()
|
||||||
|
container = DerivedPredictionContainer.from_json(json_str)
|
||||||
|
assert isinstance(container, DerivedPredictionContainer)
|
||||||
|
assert len(container.providers) == 1
|
||||||
|
assert container.providers[0] == container_with_providers.providers[0]
|
||||||
|
|
||||||
|
def test_provider_by_id(self, container_with_providers):
|
||||||
|
provider = container_with_providers.provider_by_id("DerivedPredictionProvider")
|
||||||
|
assert isinstance(provider, DerivedPredictionProvider)
|
@ -1,286 +0,0 @@
|
|||||||
"""Test Module for PV Power Forecasting Module.
|
|
||||||
|
|
||||||
This test module is designed to verify the functionality of the `PVForecast` class
|
|
||||||
and its methods in the `prediction.pv_forecast` module. The tests include validation for
|
|
||||||
forecast data processing, updating AC power measurements, retrieving forecast data,
|
|
||||||
and caching behavior.
|
|
||||||
|
|
||||||
Fixtures:
|
|
||||||
sample_forecast_data: Provides sample forecast data in JSON format for testing.
|
|
||||||
pv_forecast_instance: Provides an instance of `PVForecast` class with sample data loaded.
|
|
||||||
|
|
||||||
Test Cases:
|
|
||||||
- test_generate_cache_filename: Verifies correct cache filename generation based on URL and date.
|
|
||||||
- test_update_ac_power_measurement: Tests updating AC power measurement for a matching date.
|
|
||||||
- test_update_ac_power_measurement_no_match: Ensures no updates occur when there is no matching date.
|
|
||||||
- test_get_temperature_forecast_for_date: Tests retrieving the temperature forecast for a specific date.
|
|
||||||
- test_get_pv_forecast_for_date_range: Verifies retrieval of AC power forecast for a specified date range.
|
|
||||||
- test_get_forecast_dataframe: Ensures forecast data can be correctly converted into a Pandas DataFrame.
|
|
||||||
- test_cache_loading: Tests loading forecast data from a cached file to ensure caching works as expected.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
This test module uses `pytest` and requires the `akkudoktoreos.prediction.pv_forecast.py` module to be present.
|
|
||||||
Run the tests using the command: `pytest test_pv_forecast.py`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from akkudoktoreos.prediction.pv_forecast import PVForecast, validate_pv_forecast_data
|
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
|
||||||
|
|
||||||
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
|
||||||
|
|
||||||
FILE_TESTDATA_PV_FORECAST_INPUT_1 = DIR_TESTDATA.joinpath("pv_forecast_input_1.json")
|
|
||||||
FILE_TESTDATA_PV_FORECAST_RESULT_1 = DIR_TESTDATA.joinpath("pv_forecast_result_1.txt")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_forecast_data():
|
|
||||||
"""Fixture that returns sample forecast data."""
|
|
||||||
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r") as f_in:
|
|
||||||
input_data = json.load(f_in)
|
|
||||||
return input_data
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_forecast_report():
|
|
||||||
"""Fixture that returns sample forecast data report."""
|
|
||||||
with open(FILE_TESTDATA_PV_FORECAST_RESULT_1, "r") as f_res:
|
|
||||||
input_data = f_res.read()
|
|
||||||
return input_data
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_forecast_start(sample_forecast_data):
|
|
||||||
"""Fixture that returns the start date of the sample forecast data."""
|
|
||||||
forecast_start_str = sample_forecast_data["values"][0][0]["datetime"]
|
|
||||||
assert forecast_start_str == "2024-10-06T00:00:00.000+02:00"
|
|
||||||
|
|
||||||
timezone_name = sample_forecast_data["meta"]["timezone"]
|
|
||||||
assert timezone_name == "Europe/Berlin"
|
|
||||||
|
|
||||||
forecast_start = to_datetime(forecast_start_str, to_timezone=timezone_name, to_naiv=True)
|
|
||||||
assert forecast_start == datetime(2024, 10, 6)
|
|
||||||
|
|
||||||
return forecast_start
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def pv_forecast_empty_instance():
|
|
||||||
"""Fixture that returns an empty instance of PVForecast."""
|
|
||||||
empty_instance = PVForecast()
|
|
||||||
assert empty_instance.get_forecast_start() is None
|
|
||||||
|
|
||||||
return empty_instance
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def pv_forecast_instance(sample_forecast_data, sample_forecast_start):
|
|
||||||
"""Fixture that returns an instance of PVForecast with sample data loaded."""
|
|
||||||
pv_forecast = PVForecast(
|
|
||||||
data=sample_forecast_data,
|
|
||||||
forecast_start=sample_forecast_start,
|
|
||||||
prediction_hours=48,
|
|
||||||
)
|
|
||||||
return pv_forecast
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_pv_forecast_data(sample_forecast_data):
|
|
||||||
"""Test validation of PV forecast data on sample data."""
|
|
||||||
ret = validate_pv_forecast_data({})
|
|
||||||
assert ret is None
|
|
||||||
|
|
||||||
ret = validate_pv_forecast_data(sample_forecast_data)
|
|
||||||
assert ret == "Akkudoktor"
|
|
||||||
|
|
||||||
|
|
||||||
def test_process_data(sample_forecast_data, sample_forecast_start):
|
|
||||||
"""Test data processing using sample data."""
|
|
||||||
pv_forecast_instance = PVForecast(forecast_start=sample_forecast_start)
|
|
||||||
|
|
||||||
# Assure the start date is correctly set by init funtion
|
|
||||||
forecast_start = pv_forecast_instance.get_forecast_start()
|
|
||||||
expected_start = sample_forecast_start
|
|
||||||
assert forecast_start == expected_start
|
|
||||||
|
|
||||||
# Assure the prediction hours are unset
|
|
||||||
assert pv_forecast_instance.prediction_hours is None
|
|
||||||
|
|
||||||
# Load forecast with sample data - throws exceptions on error
|
|
||||||
pv_forecast_instance.process_data(data=sample_forecast_data)
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_ac_power_measurement(pv_forecast_instance, sample_forecast_start):
|
|
||||||
"""Test updating AC power measurement for a specific date."""
|
|
||||||
forecast_start = pv_forecast_instance.get_forecast_start()
|
|
||||||
assert forecast_start == sample_forecast_start
|
|
||||||
|
|
||||||
updated = pv_forecast_instance.update_ac_power_measurement(1000, forecast_start)
|
|
||||||
assert updated is True
|
|
||||||
forecast_data = pv_forecast_instance.get_forecast_data()
|
|
||||||
assert forecast_data[0].ac_power_measurement == 1000
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_ac_power_measurement_no_match(pv_forecast_instance):
|
|
||||||
"""Test updating AC power measurement where no date matches."""
|
|
||||||
date_time = datetime(2023, 10, 2, 1, 0, 0)
|
|
||||||
updated = pv_forecast_instance.update_ac_power_measurement(1000, date_time)
|
|
||||||
assert not updated
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_temperature_forecast_for_date(pv_forecast_instance, sample_forecast_start):
|
|
||||||
"""Test fetching temperature forecast for a specific date."""
|
|
||||||
forecast_temps = pv_forecast_instance.get_temperature_forecast_for_date(sample_forecast_start)
|
|
||||||
assert len(forecast_temps) == 24
|
|
||||||
assert forecast_temps[0] == 7.0
|
|
||||||
assert forecast_temps[1] == 6.5
|
|
||||||
assert forecast_temps[2] == 6.0
|
|
||||||
|
|
||||||
# Assure function bails out if there is no timezone name available for the system.
|
|
||||||
tz_name = pv_forecast_instance._tz_name
|
|
||||||
pv_forecast_instance._tz_name = None
|
|
||||||
with pytest.raises(Exception) as exc_info:
|
|
||||||
forecast_temps = pv_forecast_instance.get_temperature_forecast_for_date(
|
|
||||||
sample_forecast_start
|
|
||||||
)
|
|
||||||
pv_forecast_instance._tz_name = tz_name
|
|
||||||
assert (
|
|
||||||
exc_info.value.args[0] == "Processing without PV system timezone info ist not implemented!"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_temperature_for_date_range(pv_forecast_instance, sample_forecast_start):
|
|
||||||
"""Test fetching temperature forecast for a specific date range."""
|
|
||||||
end_date = sample_forecast_start + timedelta(hours=24)
|
|
||||||
forecast_temps = pv_forecast_instance.get_temperature_for_date_range(
|
|
||||||
sample_forecast_start, end_date
|
|
||||||
)
|
|
||||||
assert len(forecast_temps) == 48
|
|
||||||
assert forecast_temps[0] == 7.0
|
|
||||||
assert forecast_temps[1] == 6.5
|
|
||||||
assert forecast_temps[2] == 6.0
|
|
||||||
|
|
||||||
# Assure function bails out if there is no timezone name available for the system.
|
|
||||||
tz_name = pv_forecast_instance._tz_name
|
|
||||||
pv_forecast_instance._tz_name = None
|
|
||||||
with pytest.raises(Exception) as exc_info:
|
|
||||||
forecast_temps = pv_forecast_instance.get_temperature_for_date_range(
|
|
||||||
sample_forecast_start, end_date
|
|
||||||
)
|
|
||||||
pv_forecast_instance._tz_name = tz_name
|
|
||||||
assert (
|
|
||||||
exc_info.value.args[0] == "Processing without PV system timezone info ist not implemented!"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_forecast_for_date_range(pv_forecast_instance, sample_forecast_start):
|
|
||||||
"""Test fetching AC power forecast for a specific date range."""
|
|
||||||
end_date = sample_forecast_start + timedelta(hours=24)
|
|
||||||
forecast = pv_forecast_instance.get_pv_forecast_for_date_range(sample_forecast_start, end_date)
|
|
||||||
assert len(forecast) == 48
|
|
||||||
assert forecast[0] == 0.0
|
|
||||||
assert forecast[1] == 0.0
|
|
||||||
assert forecast[2] == 0.0
|
|
||||||
|
|
||||||
# Assure function bails out if there is no timezone name available for the system.
|
|
||||||
tz_name = pv_forecast_instance._tz_name
|
|
||||||
pv_forecast_instance._tz_name = None
|
|
||||||
with pytest.raises(Exception) as exc_info:
|
|
||||||
forecast = pv_forecast_instance.get_pv_forecast_for_date_range(
|
|
||||||
sample_forecast_start, end_date
|
|
||||||
)
|
|
||||||
pv_forecast_instance._tz_name = tz_name
|
|
||||||
assert (
|
|
||||||
exc_info.value.args[0] == "Processing without PV system timezone info ist not implemented!"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_forecast_dataframe(pv_forecast_instance):
|
|
||||||
"""Test converting forecast data to a DataFrame."""
|
|
||||||
df = pv_forecast_instance.get_forecast_dataframe()
|
|
||||||
assert len(df) == 288
|
|
||||||
assert list(df.columns) == ["date_time", "dc_power", "ac_power", "windspeed_10m", "temperature"]
|
|
||||||
assert df.iloc[0]["dc_power"] == 0.0
|
|
||||||
assert df.iloc[1]["ac_power"] == 0.0
|
|
||||||
assert df.iloc[2]["temperature"] == 6.0
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_data_from_file(server, pv_forecast_empty_instance):
|
|
||||||
"""Test loading data from file."""
|
|
||||||
# load from valid address file path
|
|
||||||
filepath = FILE_TESTDATA_PV_FORECAST_INPUT_1
|
|
||||||
data = pv_forecast_empty_instance.load_data_from_file(filepath)
|
|
||||||
assert len(data) > 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_data_from_url(server, pv_forecast_empty_instance):
|
|
||||||
"""Test loading data from url."""
|
|
||||||
# load from valid address of our server
|
|
||||||
url = f"{server}/gesamtlast_simple?year_energy=2000&"
|
|
||||||
data = pv_forecast_empty_instance.load_data_from_url(url)
|
|
||||||
assert len(data) > 0
|
|
||||||
|
|
||||||
# load from invalid address of our server
|
|
||||||
url = f"{server}/invalid?"
|
|
||||||
data = pv_forecast_empty_instance.load_data_from_url(url)
|
|
||||||
assert data == f"Failed to load data from `{url}`. Status Code: 404"
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_data_from_url_with_caching(
|
|
||||||
server, pv_forecast_empty_instance, sample_forecast_data, sample_forecast_start
|
|
||||||
):
|
|
||||||
"""Test loading data from url with cache."""
|
|
||||||
# load from valid address of our server
|
|
||||||
url = f"{server}/gesamtlast_simple?year_energy=2000&"
|
|
||||||
data = pv_forecast_empty_instance.load_data_from_url_with_caching(url)
|
|
||||||
assert len(data) > 0
|
|
||||||
|
|
||||||
# load from invalid address of our server
|
|
||||||
url = f"{server}/invalid?"
|
|
||||||
data = pv_forecast_empty_instance.load_data_from_url_with_caching(url)
|
|
||||||
assert data == f"Failed to load data from `{url}`. Status Code: 404"
|
|
||||||
|
|
||||||
|
|
||||||
def test_report_ac_power_and_measurement(pv_forecast_instance, sample_forecast_report):
|
|
||||||
"""Test reporting."""
|
|
||||||
report = pv_forecast_instance.report_ac_power_and_measurement()
|
|
||||||
assert report == sample_forecast_report
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
|
||||||
sys.platform.startswith("win"), reason="'other_timezone' fixture not supported on Windows"
|
|
||||||
)
|
|
||||||
def test_timezone_behaviour(
|
|
||||||
pv_forecast_instance, sample_forecast_report, sample_forecast_start, other_timezone
|
|
||||||
):
|
|
||||||
"""Test PVForecast in another timezone."""
|
|
||||||
current_time = datetime.now()
|
|
||||||
|
|
||||||
# Test updating AC power measurement for a specific date.
|
|
||||||
date_time = pv_forecast_instance.get_forecast_start()
|
|
||||||
assert date_time == sample_forecast_start
|
|
||||||
updated = pv_forecast_instance.update_ac_power_measurement(1000, date_time)
|
|
||||||
assert updated is True
|
|
||||||
forecast_data = pv_forecast_instance.get_forecast_data()
|
|
||||||
assert forecast_data[0].ac_power_measurement == 1000
|
|
||||||
|
|
||||||
# Test fetching temperature forecast for a specific date.
|
|
||||||
forecast_temps = pv_forecast_instance.get_temperature_forecast_for_date(sample_forecast_start)
|
|
||||||
assert len(forecast_temps) == 24
|
|
||||||
assert forecast_temps[0] == 7.0
|
|
||||||
assert forecast_temps[1] == 6.5
|
|
||||||
assert forecast_temps[2] == 6.0
|
|
||||||
|
|
||||||
# Test fetching AC power forecast
|
|
||||||
end_date = sample_forecast_start + timedelta(hours=24)
|
|
||||||
forecast = pv_forecast_instance.get_pv_forecast_for_date_range(sample_forecast_start, end_date)
|
|
||||||
assert len(forecast) == 48
|
|
||||||
assert forecast[0] == 1000.0 # changed before
|
|
||||||
assert forecast[1] == 0.0
|
|
||||||
assert forecast[2] == 0.0
|
|
307
tests/test_pvforecastakkudoktor.py
Normal file
307
tests/test_pvforecastakkudoktor.py
Normal file
@ -0,0 +1,307 @@
|
|||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.prediction import get_prediction
|
||||||
|
from akkudoktoreos.prediction.pvforecastakkudoktor import (
|
||||||
|
AkkudoktorForecastHorizon,
|
||||||
|
AkkudoktorForecastMeta,
|
||||||
|
AkkudoktorForecastValue,
|
||||||
|
PVForecastAkkudoktor,
|
||||||
|
PVForecastAkkudoktorDataRecord,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_PV_FORECAST_INPUT_1 = DIR_TESTDATA.joinpath("pv_forecast_input_1.json")
|
||||||
|
FILE_TESTDATA_PV_FORECAST_RESULT_1 = DIR_TESTDATA.joinpath("pv_forecast_result_1.txt")
|
||||||
|
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_settings(reset_config):
|
||||||
|
"""Fixture that adds settings data to the global config."""
|
||||||
|
settings = {
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"prediction_historic_hours": 24,
|
||||||
|
"latitude": 52.52,
|
||||||
|
"longitude": 13.405,
|
||||||
|
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||||
|
"pvforecast0_peakpower": 5.0,
|
||||||
|
"pvforecast0_surface_azimuth": -10,
|
||||||
|
"pvforecast0_surface_tilt": 7,
|
||||||
|
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
||||||
|
"pvforecast0_inverter_paco": 10000,
|
||||||
|
"pvforecast1_peakpower": 4.8,
|
||||||
|
"pvforecast1_surface_azimuth": -90,
|
||||||
|
"pvforecast1_surface_tilt": 7,
|
||||||
|
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
||||||
|
"pvforecast1_inverter_paco": 10000,
|
||||||
|
"pvforecast2_peakpower": 1.4,
|
||||||
|
"pvforecast2_surface_azimuth": -40,
|
||||||
|
"pvforecast2_surface_tilt": 60,
|
||||||
|
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
||||||
|
"pvforecast2_inverter_paco": 2000,
|
||||||
|
"pvforecast3_peakpower": 1.6,
|
||||||
|
"pvforecast3_surface_azimuth": 5,
|
||||||
|
"pvforecast3_surface_tilt": 45,
|
||||||
|
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
||||||
|
"pvforecast3_inverter_paco": 1400,
|
||||||
|
"pvforecast4_peakpower": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge settings to config
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_forecast_data():
|
||||||
|
"""Fixture that returns sample forecast data converted to pydantic model."""
|
||||||
|
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r") as f_in:
|
||||||
|
input_data = f_in.read()
|
||||||
|
return PVForecastAkkudoktor._validate_data(input_data)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_forecast_data_raw():
|
||||||
|
"""Fixture that returns raw sample forecast data."""
|
||||||
|
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r") as f_in:
|
||||||
|
input_data = f_in.read()
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_forecast_report():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_PV_FORECAST_RESULT_1, "r") as f_res:
|
||||||
|
input_data = f_res.read()
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_forecast_start(sample_forecast_data):
|
||||||
|
"""Fixture that returns the start date of the sample forecast data."""
|
||||||
|
forecast_start = to_datetime(sample_forecast_data.values[0][0].datetime)
|
||||||
|
expected_datetime = to_datetime("2024-10-06T00:00:00.000+02:00")
|
||||||
|
assert compare_datetimes(to_datetime(forecast_start), expected_datetime).equal
|
||||||
|
|
||||||
|
timezone_name = sample_forecast_data.meta.timezone
|
||||||
|
assert timezone_name == "Europe/Berlin"
|
||||||
|
return forecast_start
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def provider():
|
||||||
|
"""Fixture that returns the PVForecastAkkudoktor instance from the prediction."""
|
||||||
|
prediction = get_prediction()
|
||||||
|
provider = prediction.provider_by_id("PVForecastAkkudoktor")
|
||||||
|
assert isinstance(provider, PVForecastAkkudoktor)
|
||||||
|
return provider
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def provider_empty_instance():
|
||||||
|
"""Fixture that returns an empty instance of PVForecast."""
|
||||||
|
empty_instance = PVForecastAkkudoktor()
|
||||||
|
empty_instance.clear()
|
||||||
|
assert len(empty_instance) == 0
|
||||||
|
return empty_instance
|
||||||
|
|
||||||
|
|
||||||
|
# Sample data for testing
|
||||||
|
sample_horizon = AkkudoktorForecastHorizon(altitude=30, azimuthFrom=90, azimuthTo=180)
|
||||||
|
sample_meta = AkkudoktorForecastMeta(
|
||||||
|
lat=52.52,
|
||||||
|
lon=13.405,
|
||||||
|
power=[5000],
|
||||||
|
azimuth=[180],
|
||||||
|
tilt=[30],
|
||||||
|
timezone="Europe/Berlin",
|
||||||
|
albedo=0.25,
|
||||||
|
past_days=5,
|
||||||
|
inverterEfficiency=0.8,
|
||||||
|
powerInverter=[10000],
|
||||||
|
cellCoEff=-0.36,
|
||||||
|
range=True,
|
||||||
|
horizont=[[sample_horizon]],
|
||||||
|
horizontString=["sample_horizon"],
|
||||||
|
)
|
||||||
|
sample_value = AkkudoktorForecastValue(
|
||||||
|
datetime="2024-11-09T12:00:00",
|
||||||
|
dcPower=500.0,
|
||||||
|
power=480.0,
|
||||||
|
sunTilt=30.0,
|
||||||
|
sunAzimuth=180.0,
|
||||||
|
temperature=15.0,
|
||||||
|
relativehumidity_2m=50.0,
|
||||||
|
windspeed_10m=10.0,
|
||||||
|
)
|
||||||
|
sample_config_data = {
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"prediction_historic_hours": 24,
|
||||||
|
"latitude": 52.52,
|
||||||
|
"longitude":13.405,
|
||||||
|
"pvforecast_provider": "PVForecastAkkudoktor",
|
||||||
|
"pvforecast0_peakpower": 5.0,
|
||||||
|
"pvforecast0_surface_azimuth": 180,
|
||||||
|
"pvforecast0_surface_tilt": 30,
|
||||||
|
"pvforecast0_inverter_paco": 10000,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Tests for AkkudoktorForecastHorizon
|
||||||
|
def test_akkudoktor_forecast_horizon():
|
||||||
|
horizon = AkkudoktorForecastHorizon(altitude=30, azimuthFrom=90, azimuthTo=180)
|
||||||
|
assert horizon.altitude == 30
|
||||||
|
assert horizon.azimuthFrom == 90
|
||||||
|
assert horizon.azimuthTo == 180
|
||||||
|
|
||||||
|
|
||||||
|
# Tests for AkkudoktorForecastMeta
|
||||||
|
def test_akkudoktor_forecast_meta():
|
||||||
|
meta = sample_meta
|
||||||
|
assert meta.lat == 52.52
|
||||||
|
assert meta.lon ==13.405
|
||||||
|
assert meta.power == [5000]
|
||||||
|
assert meta.tilt == [30]
|
||||||
|
assert meta.timezone == "Europe/Berlin"
|
||||||
|
|
||||||
|
|
||||||
|
# Tests for AkkudoktorForecastValue
|
||||||
|
def test_akkudoktor_forecast_value():
|
||||||
|
value = sample_value
|
||||||
|
assert value.dcPower == 500.0
|
||||||
|
assert value.power == 480.0
|
||||||
|
assert value.temperature == 15.0
|
||||||
|
assert value.windspeed_10m == 10.0
|
||||||
|
|
||||||
|
|
||||||
|
# Tests for PVForecastAkkudoktorDataRecord
|
||||||
|
def test_pvforecast_akkudoktor_data_record():
|
||||||
|
record = PVForecastAkkudoktorDataRecord(
|
||||||
|
pvforecastakkudoktor_ac_power_measured=1000.0,
|
||||||
|
pvforecastakkudoktor_wind_speed_10m=10.0,
|
||||||
|
pvforecastakkudoktor_temp_air=15.0,
|
||||||
|
)
|
||||||
|
assert record.pvforecastakkudoktor_ac_power_measured == 1000.0
|
||||||
|
assert record.pvforecastakkudoktor_wind_speed_10m == 10.0
|
||||||
|
assert record.pvforecastakkudoktor_temp_air == 15.0
|
||||||
|
assert (
|
||||||
|
record.pvforecastakkudoktor_ac_power_any == 1000.0
|
||||||
|
) # Assuming AC power measured is preferred
|
||||||
|
|
||||||
|
|
||||||
|
def test_pvforecast_akkudoktor_validate_data(provider_empty_instance, sample_forecast_data_raw):
|
||||||
|
"""Test validation of PV forecast data on sample data."""
|
||||||
|
with pytest.raises(
|
||||||
|
ValueError,
|
||||||
|
match="Field: meta\nError: Field required\nType: missing\nField: values\nError: Field required\nType: missing\n",
|
||||||
|
):
|
||||||
|
ret = provider_empty_instance._validate_data("{}")
|
||||||
|
data = provider_empty_instance._validate_data(sample_forecast_data_raw)
|
||||||
|
# everything worked
|
||||||
|
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_pvforecast_akkudoktor_update_with_sample_forecast(
|
||||||
|
mock_get, sample_settings, sample_forecast_data_raw, sample_forecast_start, provider
|
||||||
|
):
|
||||||
|
"""Test data processing using sample forecast data."""
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = sample_forecast_data_raw
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
# Test that update properly inserts data records
|
||||||
|
ems_eos.set_start_datetime(sample_forecast_start)
|
||||||
|
provider.update_data(force_enable=True, force_update=True)
|
||||||
|
assert compare_datetimes(provider.start_datetime, sample_forecast_start).equal
|
||||||
|
assert compare_datetimes(provider[0].date_time, to_datetime(sample_forecast_start)).equal
|
||||||
|
|
||||||
|
|
||||||
|
# Report Generation Test
|
||||||
|
def test_report_ac_power_and_measurement(provider):
|
||||||
|
# Set the configuration
|
||||||
|
config = get_config()
|
||||||
|
config.merge_settings_from_dict(sample_config_data)
|
||||||
|
|
||||||
|
record = PVForecastAkkudoktorDataRecord(
|
||||||
|
pvforecastakkudoktor_ac_power_measured=900.0,
|
||||||
|
pvforecast_dc_power=450.0,
|
||||||
|
pvforecast_ac_power=400.0,
|
||||||
|
)
|
||||||
|
provider.append(record)
|
||||||
|
|
||||||
|
report = provider.report_ac_power_and_measurement()
|
||||||
|
assert "DC: 450.0" in report
|
||||||
|
assert "AC: 400.0" in report
|
||||||
|
assert "AC sampled: 900.0" in report
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.platform.startswith("win"), reason="'other_timezone' fixture not supported on Windows"
|
||||||
|
)
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_timezone_behaviour(
|
||||||
|
mock_get,
|
||||||
|
sample_settings,
|
||||||
|
sample_forecast_data_raw,
|
||||||
|
sample_forecast_start,
|
||||||
|
provider,
|
||||||
|
set_other_timezone,
|
||||||
|
):
|
||||||
|
"""Test PVForecast in another timezone."""
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = sample_forecast_data_raw
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
# sample forecast start in other timezone
|
||||||
|
other_timezone = set_other_timezone()
|
||||||
|
other_start_datetime = to_datetime(sample_forecast_start, in_timezone=other_timezone)
|
||||||
|
assert compare_datetimes(other_start_datetime, sample_forecast_start).equal
|
||||||
|
expected_datetime = to_datetime("2024-10-06T00:00:00+0200", in_timezone=other_timezone)
|
||||||
|
assert compare_datetimes(other_start_datetime, expected_datetime).equal
|
||||||
|
|
||||||
|
provider.clear()
|
||||||
|
assert len(provider) == 0
|
||||||
|
ems_eos.set_start_datetime(other_start_datetime)
|
||||||
|
provider.update_data(force_update=True)
|
||||||
|
assert compare_datetimes(provider.start_datetime, other_start_datetime).equal
|
||||||
|
# Check wether first record starts at requested sample start time
|
||||||
|
assert compare_datetimes(provider[0].date_time, sample_forecast_start).equal
|
||||||
|
|
||||||
|
# Test updating AC power measurement for a specific date.
|
||||||
|
provider.update_value(sample_forecast_start, "pvforecastakkudoktor_ac_power_measured", 1000)
|
||||||
|
# Check wether first record was filled with ac power measurement
|
||||||
|
assert provider[0].pvforecastakkudoktor_ac_power_measured == 1000
|
||||||
|
|
||||||
|
# Test fetching temperature forecast for a specific date.
|
||||||
|
other_end_datetime = other_start_datetime + to_duration("24 hours")
|
||||||
|
expected_end_datetime = to_datetime("2024-10-07T00:00:00+0200", in_timezone=other_timezone)
|
||||||
|
assert compare_datetimes(other_end_datetime, expected_end_datetime).equal
|
||||||
|
forecast_temps = provider.key_to_series(
|
||||||
|
"pvforecastakkudoktor_temp_air", other_start_datetime, other_end_datetime
|
||||||
|
)
|
||||||
|
assert len(forecast_temps) == 24
|
||||||
|
assert forecast_temps.iloc[0] == 7.0
|
||||||
|
assert forecast_temps.iloc[1] == 6.5
|
||||||
|
assert forecast_temps.iloc[2] == 6.0
|
||||||
|
|
||||||
|
# Test fetching AC power forecast
|
||||||
|
other_end_datetime = other_start_datetime + to_duration("48 hours")
|
||||||
|
forecast_measured = provider.key_to_series(
|
||||||
|
"pvforecastakkudoktor_ac_power_measured", other_start_datetime, other_end_datetime
|
||||||
|
)
|
||||||
|
assert len(forecast_measured) == 48
|
||||||
|
assert forecast_measured.iloc[0] == 1000.0 # changed before
|
110
tests/test_pvforecastimport.py
Normal file
110
tests/test_pvforecastimport.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.pvforecastimport import PVForecastImport
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_PVFORECASTIMPORT_1_JSON = DIR_TESTDATA.joinpath("import_input_1.json")
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pvforecast_provider(reset_config, sample_import_1_json):
|
||||||
|
"""Fixture to create a PVForecastProvider instance."""
|
||||||
|
settings = {
|
||||||
|
"pvforecast_provider": "PVForecastImport",
|
||||||
|
"pvforecastimport_file_path": str(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON),
|
||||||
|
"pvforecastimport_json": json.dumps(sample_import_1_json),
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
provider = PVForecastImport()
|
||||||
|
assert provider.enabled() == True
|
||||||
|
return provider
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_import_1_json():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON, "r") as f_res:
|
||||||
|
input_data = json.load(f_res)
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# General forecast
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_singleton_instance(pvforecast_provider):
|
||||||
|
"""Test that PVForecastForecast behaves as a singleton."""
|
||||||
|
another_instance = PVForecastImport()
|
||||||
|
assert pvforecast_provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_provider(pvforecast_provider):
|
||||||
|
"""Test requesting an unsupported pvforecast_provider."""
|
||||||
|
settings = {
|
||||||
|
"pvforecast_provider": "<invalid>",
|
||||||
|
"pvforecastimport_file_path": str(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON),
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
assert pvforecast_provider.enabled() == False
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Import
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start_datetime, from_file",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", True), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", True), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", True), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", True), # DST change in Germany (25 hours/ day)
|
||||||
|
("2024-11-10 00:00:00", False), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", False), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", False), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_import(pvforecast_provider, sample_import_1_json, start_datetime, from_file):
|
||||||
|
"""Test fetching forecast from import."""
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
|
if from_file:
|
||||||
|
config_eos.pvforecastimport_json = None
|
||||||
|
assert config_eos.pvforecastimport_json is None
|
||||||
|
else:
|
||||||
|
config_eos.pvforecastimport_file_path = None
|
||||||
|
assert config_eos.pvforecastimport_file_path is None
|
||||||
|
pvforecast_provider.clear()
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
pvforecast_provider.update_data()
|
||||||
|
|
||||||
|
# Assert: Verify the result is as expected
|
||||||
|
assert pvforecast_provider.start_datetime is not None
|
||||||
|
assert pvforecast_provider.total_hours is not None
|
||||||
|
assert compare_datetimes(pvforecast_provider.start_datetime, ems_eos.start_datetime).equal
|
||||||
|
values = sample_import_1_json["pvforecast_ac_power"]
|
||||||
|
value_datetime_mapping = pvforecast_provider.import_datetimes(len(values))
|
||||||
|
for i, mapping in enumerate(value_datetime_mapping):
|
||||||
|
assert i < len(pvforecast_provider.records)
|
||||||
|
expected_datetime, expected_value_index = mapping
|
||||||
|
expected_value = values[expected_value_index]
|
||||||
|
result_datetime = pvforecast_provider.records[i].date_time
|
||||||
|
result_value = pvforecast_provider.records[i]["pvforecast_ac_power"]
|
||||||
|
|
||||||
|
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
||||||
|
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
||||||
|
assert compare_datetimes(result_datetime, expected_datetime).equal
|
||||||
|
assert result_value == expected_value
|
@ -1,24 +1,17 @@
|
|||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from akkudoktoreos.config import CONFIG_FILE_NAME, load_config
|
from akkudoktoreos.config.config import get_config
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
|
||||||
|
|
||||||
def test_fixture_setup(server, tmp_path: Path) -> None:
|
def test_server(server):
|
||||||
"""Test if the fixture sets up the server with the env var."""
|
|
||||||
# validate correct path in server
|
|
||||||
config = load_config(tmp_path, False)
|
|
||||||
assert tmp_path.joinpath(CONFIG_FILE_NAME).is_file()
|
|
||||||
cache = tmp_path / config.directories.cache
|
|
||||||
assert cache.is_dir()
|
|
||||||
|
|
||||||
|
|
||||||
def test_server(server, tmp_path: Path):
|
|
||||||
"""Test the server."""
|
"""Test the server."""
|
||||||
result = requests.get(f"{server}/gesamtlast_simple?year_energy=2000&")
|
# validate correct path in server
|
||||||
assert result.status_code == HTTPStatus.OK
|
assert config_eos.data_folder_path is not None
|
||||||
|
assert config_eos.data_folder_path.is_dir()
|
||||||
|
|
||||||
config = load_config(tmp_path, False)
|
result = requests.get(f"{server}/config?")
|
||||||
assert len(result.json()) == config.eos.prediction_hours
|
assert result.status_code == HTTPStatus.OK
|
||||||
|
@ -4,7 +4,7 @@ from pathlib import Path
|
|||||||
import pytest
|
import pytest
|
||||||
from matplotlib.testing.compare import compare_images
|
from matplotlib.testing.compare import compare_images
|
||||||
|
|
||||||
from akkudoktoreos.config import AppConfig
|
from akkudoktoreos.config.config import get_config
|
||||||
from akkudoktoreos.visualize import visualisiere_ergebnisse
|
from akkudoktoreos.visualize import visualisiere_ergebnisse
|
||||||
|
|
||||||
DIR_TESTDATA = Path(__file__).parent / "testdata"
|
DIR_TESTDATA = Path(__file__).parent / "testdata"
|
||||||
@ -15,11 +15,15 @@ DIR_IMAGEDATA = DIR_TESTDATA / "images"
|
|||||||
"fn_in, fn_out, fn_out_base",
|
"fn_in, fn_out, fn_out_base",
|
||||||
[("visualize_input_1.json", "visualize_output_1.pdf", "visualize_base_output_1.pdf")],
|
[("visualize_input_1.json", "visualize_output_1.pdf", "visualize_base_output_1.pdf")],
|
||||||
)
|
)
|
||||||
def test_visualisiere_ergebnisse(fn_in, fn_out, fn_out_base, tmp_config: AppConfig):
|
def test_visualisiere_ergebnisse(fn_in, fn_out, fn_out_base):
|
||||||
with open(DIR_TESTDATA / fn_in, "r") as f:
|
with open(DIR_TESTDATA / fn_in, "r") as f:
|
||||||
input_data = json.load(f)
|
input_data = json.load(f)
|
||||||
visualisiere_ergebnisse(config=tmp_config, **input_data)
|
visualisiere_ergebnisse(**input_data)
|
||||||
output_file: Path = tmp_config.working_dir / tmp_config.directories.output / fn_out
|
|
||||||
|
config = get_config()
|
||||||
|
output_dir = config.data_output_path
|
||||||
|
output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
output_file = output_dir.joinpath(fn_out)
|
||||||
|
|
||||||
assert output_file.is_file()
|
assert output_file.is_file()
|
||||||
assert (
|
assert (
|
||||||
|
193
tests/test_weatherbrightsky.py
Normal file
193
tests/test_weatherbrightsky.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
||||||
|
from akkudoktoreos.utils.cacheutil import CacheFileStore
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON = DIR_TESTDATA.joinpath("weatherforecast_brightsky_1.json")
|
||||||
|
FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON = DIR_TESTDATA.joinpath("weatherforecast_brightsky_2.json")
|
||||||
|
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def weather_provider(monkeypatch):
|
||||||
|
"""Fixture to create a WeatherProvider instance."""
|
||||||
|
monkeypatch.setenv("weather_provider", "BrightSky")
|
||||||
|
monkeypatch.setenv("latitude", "50.0")
|
||||||
|
monkeypatch.setenv("longitude", "10.0")
|
||||||
|
return WeatherBrightSky()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_brightsky_1_json():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "r") as f_res:
|
||||||
|
input_data = json.load(f_res)
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_brightsky_2_json():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON, "r") as f_res:
|
||||||
|
input_data = json.load(f_res)
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def cache_store():
|
||||||
|
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
|
||||||
|
return CacheFileStore()
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# General forecast
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_singleton_instance(weather_provider):
|
||||||
|
"""Test that WeatherForecast behaves as a singleton."""
|
||||||
|
another_instance = WeatherBrightSky()
|
||||||
|
assert weather_provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_provider(weather_provider, monkeypatch):
|
||||||
|
"""Test requesting an unsupported weather_provider."""
|
||||||
|
monkeypatch.setenv("weather_provider", "<invalid>")
|
||||||
|
weather_provider.config.update()
|
||||||
|
assert weather_provider.enabled() == False
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_coordinates(weather_provider, monkeypatch):
|
||||||
|
"""Test invalid coordinates raise ValueError."""
|
||||||
|
monkeypatch.setenv("latitude", "1000")
|
||||||
|
monkeypatch.setenv("longitude", "1000")
|
||||||
|
with pytest.raises(
|
||||||
|
ValueError, # match="Latitude '1000' and/ or longitude `1000` out of valid range."
|
||||||
|
):
|
||||||
|
weather_provider.config.update()
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Irradiance caclulation
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_irridiance_estimate_from_cloud_cover(weather_provider):
|
||||||
|
"""Test cloud cover to irradiance estimation."""
|
||||||
|
cloud_cover_data = pd.Series(
|
||||||
|
data=[20, 50, 80], index=pd.date_range("2023-10-22", periods=3, freq="h")
|
||||||
|
)
|
||||||
|
|
||||||
|
ghi, dni, dhi = weather_provider.estimate_irradiance_from_cloud_cover(
|
||||||
|
50.0, 10.0, cloud_cover_data
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ghi == [0, 0, 0]
|
||||||
|
assert dhi == [0, 0, 0]
|
||||||
|
assert dni == [0, 0, 0]
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# BrightSky
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_request_forecast(mock_get, weather_provider, sample_brightsky_1_json):
|
||||||
|
"""Test requesting forecast from BrightSky."""
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = json.dumps(sample_brightsky_1_json)
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
# Preset, as this is usually done by update()
|
||||||
|
weather_provider.config.update()
|
||||||
|
|
||||||
|
# Test function
|
||||||
|
brightsky_data = weather_provider._request_forecast()
|
||||||
|
|
||||||
|
assert isinstance(brightsky_data, dict)
|
||||||
|
assert brightsky_data["weather"][0] == {
|
||||||
|
"timestamp": "2024-10-26T00:00:00+02:00",
|
||||||
|
"source_id": 46567,
|
||||||
|
"precipitation": 0.0,
|
||||||
|
"pressure_msl": 1022.9,
|
||||||
|
"sunshine": 0.0,
|
||||||
|
"temperature": 6.2,
|
||||||
|
"wind_direction": 40,
|
||||||
|
"wind_speed": 4.7,
|
||||||
|
"cloud_cover": 100,
|
||||||
|
"dew_point": 5.8,
|
||||||
|
"relative_humidity": 97,
|
||||||
|
"visibility": 140,
|
||||||
|
"wind_gust_direction": 70,
|
||||||
|
"wind_gust_speed": 11.9,
|
||||||
|
"condition": "dry",
|
||||||
|
"precipitation_probability": None,
|
||||||
|
"precipitation_probability_6h": None,
|
||||||
|
"solar": None,
|
||||||
|
"fallback_source_ids": {
|
||||||
|
"wind_gust_speed": 219419,
|
||||||
|
"pressure_msl": 219419,
|
||||||
|
"cloud_cover": 219419,
|
||||||
|
"wind_gust_direction": 219419,
|
||||||
|
"wind_direction": 219419,
|
||||||
|
"wind_speed": 219419,
|
||||||
|
"sunshine": 219419,
|
||||||
|
"visibility": 219419,
|
||||||
|
},
|
||||||
|
"icon": "cloudy",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_update_data(mock_get, weather_provider, sample_brightsky_1_json, cache_store):
|
||||||
|
"""Test fetching forecast from BrightSky."""
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = json.dumps(sample_brightsky_1_json)
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
cache_store.clear(clear_all=True)
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
ems_eos.set_start_datetime(to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin"))
|
||||||
|
weather_provider.update_data(force_enable=True, force_update=True)
|
||||||
|
|
||||||
|
# Assert: Verify the result is as expected
|
||||||
|
mock_get.assert_called_once()
|
||||||
|
assert len(weather_provider) == 338
|
||||||
|
|
||||||
|
# with open(FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON, "w") as f_out:
|
||||||
|
# f_out.write(weather_provider.to_json())
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Development BrightSky
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="For development only")
|
||||||
|
def test_brightsky_development_forecast_data(weather_provider):
|
||||||
|
"""Fetch data from real BrightSky server."""
|
||||||
|
# Preset, as this is usually done by update_data()
|
||||||
|
weather_provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
||||||
|
weather_provider.latitude = 50.0
|
||||||
|
weather_provider.longitude = 10.0
|
||||||
|
|
||||||
|
brightsky_data = weather_provider._request_forecast()
|
||||||
|
|
||||||
|
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "w") as f_out:
|
||||||
|
json.dump(brightsky_data, f_out, indent=4)
|
569
tests/test_weatherclearoutside.py
Normal file
569
tests/test_weatherclearoutside.py
Normal file
@ -0,0 +1,569 @@
|
|||||||
|
import re
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pvlib
|
||||||
|
import pytest
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
||||||
|
from akkudoktoreos.utils.cacheutil import CacheFileStore
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_HTML = DIR_TESTDATA.joinpath("weatherforecast_clearout_1.html")
|
||||||
|
FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA = DIR_TESTDATA.joinpath("weatherforecast_clearout_1.json")
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def weather_provider():
|
||||||
|
"""Fixture to create a WeatherProvider instance."""
|
||||||
|
settings = {
|
||||||
|
"weather_provider": "ClearOutside",
|
||||||
|
"latitude": 50.0,
|
||||||
|
"longitude": 10.0,
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
return WeatherClearOutside()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_clearout_1_html():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_HTML, "r") as f_res:
|
||||||
|
input_data = f_res.read()
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_clearout_1_data():
|
||||||
|
"""Fixture that returns sample forecast data."""
|
||||||
|
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "r") as f_in:
|
||||||
|
json_str = f_in.read()
|
||||||
|
data = WeatherClearOutside.from_json(json_str)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def cache_store():
|
||||||
|
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
|
||||||
|
return CacheFileStore()
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# General WeatherProvider
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_singleton_instance(weather_provider):
|
||||||
|
"""Test that WeatherForecast behaves as a singleton."""
|
||||||
|
another_instance = WeatherClearOutside()
|
||||||
|
assert weather_provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_provider(weather_provider):
|
||||||
|
"""Test requesting an unsupported weather_provider."""
|
||||||
|
settings = {
|
||||||
|
"weather_provider": "<invalid>",
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
assert weather_provider.enabled() == False
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_coordinates(weather_provider):
|
||||||
|
"""Test invalid coordinates raise ValueError."""
|
||||||
|
settings = {
|
||||||
|
"weather_provider": "ClearOutside",
|
||||||
|
"latitude": 1000.0,
|
||||||
|
"longitude": 1000.0,
|
||||||
|
}
|
||||||
|
with pytest.raises(
|
||||||
|
ValueError, # match="Latitude '1000' and/ or longitude `1000` out of valid range."
|
||||||
|
):
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Irradiance caclulation
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_irridiance_estimate_from_cloud_cover(weather_provider):
|
||||||
|
"""Test cloud cover to irradiance estimation."""
|
||||||
|
cloud_cover_data = pd.Series(
|
||||||
|
data=[20, 50, 80], index=pd.date_range("2023-10-22", periods=3, freq="h")
|
||||||
|
)
|
||||||
|
|
||||||
|
ghi, dni, dhi = weather_provider.estimate_irradiance_from_cloud_cover(
|
||||||
|
50.0, 10.0, cloud_cover_data
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ghi == [0, 0, 0]
|
||||||
|
assert dhi == [0, 0, 0]
|
||||||
|
assert dni == [0, 0, 0]
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# ClearOutside
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_request_forecast(mock_get, weather_provider, sample_clearout_1_html):
|
||||||
|
"""Test fetching forecast from ClearOutside."""
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = sample_clearout_1_html
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
# Preset, as this is usually done by update()
|
||||||
|
config_eos.update()
|
||||||
|
|
||||||
|
# Test function
|
||||||
|
response = weather_provider._request_forecast()
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == sample_clearout_1_html
|
||||||
|
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_update_data(mock_get, weather_provider, sample_clearout_1_html, sample_clearout_1_data):
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = sample_clearout_1_html
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
expected_start = to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin")
|
||||||
|
expected_end = to_datetime("2024-10-28 00:00:00", in_timezone="Europe/Berlin")
|
||||||
|
expected_keep = to_datetime("2024-10-24 00:00:00", in_timezone="Europe/Berlin")
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
ems_eos.set_start_datetime(expected_start)
|
||||||
|
weather_provider.update_data()
|
||||||
|
|
||||||
|
# Check for correct prediction time window
|
||||||
|
assert weather_provider.config.prediction_hours == 48
|
||||||
|
assert weather_provider.config.prediction_historic_hours == 48
|
||||||
|
assert compare_datetimes(weather_provider.start_datetime, expected_start).equal
|
||||||
|
assert compare_datetimes(weather_provider.end_datetime, expected_end).equal
|
||||||
|
assert compare_datetimes(weather_provider.keep_datetime, expected_keep).equal
|
||||||
|
|
||||||
|
# Verify the data
|
||||||
|
assert len(weather_provider) == 165 # 6 days, 24 hours per day - 7th day 21 hours
|
||||||
|
|
||||||
|
# Check that specific values match the expected output
|
||||||
|
# for i, record in enumerate(weather_data.records):
|
||||||
|
# # Compare datetime and specific values
|
||||||
|
# assert record.datetime == sample_clearout_1_data.records[i].datetime
|
||||||
|
# assert record.data['total_clouds'] == sample_clearout_1_data.records[i].data['total_clouds']
|
||||||
|
# # Check additional weather attributes as necessary
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Test fixture to be improved")
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_cache_forecast(mock_get, weather_provider, sample_clearout_1_html, cache_store):
|
||||||
|
"""Test that ClearOutside forecast data is cached with TTL.
|
||||||
|
|
||||||
|
This can not be tested with mock_get. Mock objects are not pickable and therefor can not be
|
||||||
|
cached to a file. Keep it for documentation.
|
||||||
|
"""
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = sample_clearout_1_html
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
cache_store.clear(clear_all=True)
|
||||||
|
|
||||||
|
weather_provider.update_data()
|
||||||
|
mock_get.assert_called_once()
|
||||||
|
forecast_data_first = weather_provider.to_json()
|
||||||
|
|
||||||
|
weather_provider.update_data()
|
||||||
|
forecast_data_second = weather_provider.to_json()
|
||||||
|
# Verify that cache returns the same object without calling the method again
|
||||||
|
assert forecast_data_first == forecast_data_second
|
||||||
|
# A mock object is not pickable and therefor can not be chached to file
|
||||||
|
assert mock_get.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Development ClearOutside
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="For development only")
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_development_forecast_data(mock_get, weather_provider, sample_clearout_1_html):
|
||||||
|
# Mock response object
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.content = sample_clearout_1_html
|
||||||
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
|
# Fill the instance
|
||||||
|
weather_provider.update_data(force_enable=True)
|
||||||
|
|
||||||
|
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "w") as f_out:
|
||||||
|
f_out.write(weather_provider.to_json())
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="For development only")
|
||||||
|
def test_clearoutsides_development_scraper(weather_provider, sample_clearout_1_html):
|
||||||
|
"""Test scraping from ClearOutside."""
|
||||||
|
soup = BeautifulSoup(sample_clearout_1_html, "html.parser")
|
||||||
|
|
||||||
|
# Sample was created for the loacation
|
||||||
|
lat = 50.0
|
||||||
|
lon = 10.0
|
||||||
|
|
||||||
|
# Find generation data
|
||||||
|
p_generated = soup.find("h2", string=lambda text: text and text.startswith("Generated:"))
|
||||||
|
assert p_generated is not None
|
||||||
|
|
||||||
|
# Extract forecast start and end dates
|
||||||
|
forecast_pattern = r"Forecast: (\d{2}/\d{2}/\d{2}) to (\d{2}/\d{2}/\d{2})"
|
||||||
|
forecast_match = re.search(forecast_pattern, p_generated.get_text())
|
||||||
|
if forecast_match:
|
||||||
|
forecast_start_date = forecast_match.group(1)
|
||||||
|
forecast_end_date = forecast_match.group(2)
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
assert forecast_start_date == "26/10/24"
|
||||||
|
assert forecast_end_date == "01/11/24"
|
||||||
|
|
||||||
|
# Extract timezone offset
|
||||||
|
timezone_pattern = r"Timezone: UTC([+-]\d+)\.(\d+)"
|
||||||
|
timezone_match = re.search(timezone_pattern, p_generated.get_text())
|
||||||
|
if timezone_match:
|
||||||
|
hours = int(timezone_match.group(1))
|
||||||
|
assert hours == 2
|
||||||
|
# Convert the decimal part to minutes (e.g., .50 -> 30 minutes)
|
||||||
|
minutes = int(timezone_match.group(2)) * 6 # Multiply by 6 to convert to minutes
|
||||||
|
assert minutes == 0
|
||||||
|
|
||||||
|
# Create the timezone object using timedelta for the offset
|
||||||
|
forecast_timezone = timezone(timedelta(hours=hours, minutes=minutes))
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
|
||||||
|
forecast_start_datetime = to_datetime(
|
||||||
|
forecast_start_date, in_timezone=forecast_timezone, to_naiv=False, to_maxtime=False
|
||||||
|
)
|
||||||
|
assert forecast_start_datetime == datetime(2024, 10, 26, 0, 0)
|
||||||
|
|
||||||
|
# Find all paragraphs with id 'day_<x>'. There should be seven.
|
||||||
|
p_days = soup.find_all(id=re.compile(r"day_[0-9]"))
|
||||||
|
assert len(p_days) == 7
|
||||||
|
p_day = p_days[0]
|
||||||
|
|
||||||
|
# Within day_x paragraph find the details labels
|
||||||
|
p_detail_labels = p_day.find_all(class_="fc_detail_label")
|
||||||
|
detail_names = [p.get_text() for p in p_detail_labels]
|
||||||
|
|
||||||
|
assert detail_names == [
|
||||||
|
"Total Clouds (% Sky Obscured)",
|
||||||
|
"Low Clouds (% Sky Obscured)",
|
||||||
|
"Medium Clouds (% Sky Obscured)",
|
||||||
|
"High Clouds (% Sky Obscured)",
|
||||||
|
"ISS Passover",
|
||||||
|
"Visibility (miles)",
|
||||||
|
"Fog (%)",
|
||||||
|
"Precipitation Type",
|
||||||
|
"Precipitation Probability (%)",
|
||||||
|
"Precipitation Amount (mm)",
|
||||||
|
"Wind Speed/Direction (mph)",
|
||||||
|
"Chance of Frost",
|
||||||
|
"Temperature (°C)",
|
||||||
|
"Feels Like (°C)",
|
||||||
|
"Dew Point (°C)",
|
||||||
|
"Relative Humidity (%)",
|
||||||
|
"Pressure (mb)",
|
||||||
|
"Ozone (du)",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Find all the paragraphs that are associated to the details.
|
||||||
|
# Beware there is one ul paragraph before that is not associated to a detail
|
||||||
|
p_detail_tables = p_day.find_all("ul")
|
||||||
|
assert len(p_detail_tables) == len(detail_names) + 1
|
||||||
|
p_detail_tables.pop(0)
|
||||||
|
|
||||||
|
# Create clearout data
|
||||||
|
clearout_data = {}
|
||||||
|
# Add data values
|
||||||
|
for i, detail_name in enumerate(detail_names):
|
||||||
|
p_detail_values = p_detail_tables[i].find_all("li")
|
||||||
|
detail_data = []
|
||||||
|
for p_detail_value in p_detail_values:
|
||||||
|
if (
|
||||||
|
detail_name in ("Precipitation Type", "Chance of Frost")
|
||||||
|
and hasattr(p_detail_value, "title")
|
||||||
|
and p_detail_value.title
|
||||||
|
):
|
||||||
|
value_str = p_detail_value.title.string
|
||||||
|
else:
|
||||||
|
value_str = p_detail_value.get_text()
|
||||||
|
try:
|
||||||
|
value = float(value_str)
|
||||||
|
except ValueError:
|
||||||
|
value = value_str
|
||||||
|
detail_data.append(value)
|
||||||
|
assert len(detail_data) == 24
|
||||||
|
clearout_data[detail_name] = detail_data
|
||||||
|
|
||||||
|
assert clearout_data["Temperature (°C)"] == [
|
||||||
|
14.0,
|
||||||
|
14.0,
|
||||||
|
13.0,
|
||||||
|
12.0,
|
||||||
|
11.0,
|
||||||
|
11.0,
|
||||||
|
10.0,
|
||||||
|
10.0,
|
||||||
|
9.0,
|
||||||
|
9.0,
|
||||||
|
9.0,
|
||||||
|
9.0,
|
||||||
|
9.0,
|
||||||
|
10.0,
|
||||||
|
9.0,
|
||||||
|
9.0,
|
||||||
|
10.0,
|
||||||
|
11.0,
|
||||||
|
13.0,
|
||||||
|
14.0,
|
||||||
|
15.0,
|
||||||
|
16.0,
|
||||||
|
16.0,
|
||||||
|
16.0,
|
||||||
|
]
|
||||||
|
assert clearout_data["Relative Humidity (%)"] == [
|
||||||
|
59.0,
|
||||||
|
68.0,
|
||||||
|
75.0,
|
||||||
|
81.0,
|
||||||
|
84.0,
|
||||||
|
85.0,
|
||||||
|
85.0,
|
||||||
|
91.0,
|
||||||
|
91.0,
|
||||||
|
93.0,
|
||||||
|
93.0,
|
||||||
|
93.0,
|
||||||
|
93.0,
|
||||||
|
93.0,
|
||||||
|
95.0,
|
||||||
|
95.0,
|
||||||
|
93.0,
|
||||||
|
87.0,
|
||||||
|
81.0,
|
||||||
|
76.0,
|
||||||
|
70.0,
|
||||||
|
66.0,
|
||||||
|
66.0,
|
||||||
|
69.0,
|
||||||
|
]
|
||||||
|
assert clearout_data["Wind Speed/Direction (mph)"] == [
|
||||||
|
7.0,
|
||||||
|
6.0,
|
||||||
|
4.0,
|
||||||
|
4.0,
|
||||||
|
4.0,
|
||||||
|
4.0,
|
||||||
|
4.0,
|
||||||
|
4.0,
|
||||||
|
3.0,
|
||||||
|
3.0,
|
||||||
|
3.0,
|
||||||
|
2.0,
|
||||||
|
1.0,
|
||||||
|
1.0,
|
||||||
|
1.0,
|
||||||
|
2.0,
|
||||||
|
2.0,
|
||||||
|
2.0,
|
||||||
|
4.0,
|
||||||
|
5.0,
|
||||||
|
6.0,
|
||||||
|
6.0,
|
||||||
|
5.0,
|
||||||
|
5.0,
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add datetimes of the scrapped data
|
||||||
|
clearout_data["DateTime"] = [forecast_start_datetime + timedelta(hours=i) for i in range(24)]
|
||||||
|
detail_names.append("DateTime")
|
||||||
|
|
||||||
|
assert len(clearout_data["DateTime"]) == 24
|
||||||
|
assert clearout_data["DateTime"][0] == to_datetime(
|
||||||
|
"2024-10-26 00:00:00", in_timezone=forecast_timezone
|
||||||
|
)
|
||||||
|
assert clearout_data["DateTime"][23] == to_datetime(
|
||||||
|
"2024-10-26 23:00:00", in_timezone=forecast_timezone
|
||||||
|
)
|
||||||
|
|
||||||
|
# Converting the cloud cover into Global Horizontal Irradiance (GHI) with a PVLib method
|
||||||
|
offset = 35 # The default
|
||||||
|
offset_fraction = offset / 100.0 # Adjust percentage to scaling factor
|
||||||
|
cloud_cover = pd.Series(clearout_data["Total Clouds (% Sky Obscured)"])
|
||||||
|
|
||||||
|
# Convert datetime list to a pandas DatetimeIndex
|
||||||
|
cloud_cover_times = pd.DatetimeIndex(clearout_data["DateTime"])
|
||||||
|
|
||||||
|
# Create a location object
|
||||||
|
location = pvlib.location.Location(latitude=lat, longitude=lon)
|
||||||
|
|
||||||
|
# Get solar position and clear-sky GHI using the Ineichen model
|
||||||
|
solpos = location.get_solarposition(cloud_cover_times)
|
||||||
|
clear_sky = location.get_clearsky(cloud_cover_times, model="ineichen")
|
||||||
|
|
||||||
|
# Convert cloud cover percentage to a scaling factor
|
||||||
|
cloud_cover_fraction = np.array(cloud_cover) / 100.0
|
||||||
|
|
||||||
|
# Calculate adjusted GHI with proportional offset adjustment
|
||||||
|
adjusted_ghi = clear_sky["ghi"] * (
|
||||||
|
offset_fraction + (1 - offset_fraction) * (1 - cloud_cover_fraction)
|
||||||
|
)
|
||||||
|
adjusted_ghi.fillna(0.0, inplace=True)
|
||||||
|
|
||||||
|
# Apply DISC model to estimate Direct Normal Irradiance (DNI) from adjusted GHI
|
||||||
|
disc_output = pvlib.irradiance.disc(adjusted_ghi, solpos["zenith"], cloud_cover_times)
|
||||||
|
adjusted_dni = disc_output["dni"]
|
||||||
|
adjusted_dni.fillna(0.0, inplace=True)
|
||||||
|
|
||||||
|
# Calculate Diffuse Horizontal Irradiance (DHI) as DHI = GHI - DNI * cos(zenith)
|
||||||
|
zenith_rad = np.radians(solpos["zenith"])
|
||||||
|
adjusted_dhi = adjusted_ghi - adjusted_dni * np.cos(zenith_rad)
|
||||||
|
adjusted_dhi.fillna(0.0, inplace=True)
|
||||||
|
|
||||||
|
# Add GHI, DNI, DHI to clearout data
|
||||||
|
clearout_data["Global Horizontal Irradiance (W/m2)"] = adjusted_ghi.to_list()
|
||||||
|
detail_names.append("Global Horizontal Irradiance (W/m2)")
|
||||||
|
clearout_data["Direct Normal Irradiance (W/m2)"] = adjusted_dni.to_list()
|
||||||
|
detail_names.append("Direct Normal Irradiance (W/m2)")
|
||||||
|
clearout_data["Diffuse Horizontal Irradiance (W/m2)"] = adjusted_dhi.to_list()
|
||||||
|
detail_names.append("Diffuse Horizontal Irradiance (W/m2)")
|
||||||
|
|
||||||
|
assert clearout_data["Global Horizontal Irradiance (W/m2)"] == [
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
24.291000436601216,
|
||||||
|
85.88494154645998,
|
||||||
|
136.09269403109946,
|
||||||
|
139.26925350542064,
|
||||||
|
146.7174434892616,
|
||||||
|
149.0167479382964,
|
||||||
|
138.97458866666065,
|
||||||
|
103.47132353697396,
|
||||||
|
46.81279774519421,
|
||||||
|
0.12972168074047014,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
]
|
||||||
|
assert clearout_data["Direct Normal Irradiance (W/m2)"] == [
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
10.19687368654253,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
2.9434862632289804,
|
||||||
|
9.621272744657047,
|
||||||
|
9.384995789935898,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
]
|
||||||
|
assert clearout_data["Diffuse Horizontal Irradiance (W/m2)"] == [
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
24.291000436601216,
|
||||||
|
85.88494154645998,
|
||||||
|
132.32210426501337,
|
||||||
|
139.26925350542064,
|
||||||
|
146.7174434892616,
|
||||||
|
147.721968406295,
|
||||||
|
135.32240392326145,
|
||||||
|
100.82522311704261,
|
||||||
|
46.81279774519421,
|
||||||
|
0.12972168074047014,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
]
|
||||||
|
|
||||||
|
# Preciptable Water (PWAT) with a PVLib method
|
||||||
|
clearout_data["Preciptable Water (cm)"] = pvlib.atmosphere.gueymard94_pw(
|
||||||
|
pd.Series(data=clearout_data["Temperature (°C)"]),
|
||||||
|
pd.Series(data=clearout_data["Relative Humidity (%)"]),
|
||||||
|
).to_list()
|
||||||
|
detail_names.append("Preciptable Water (cm)")
|
||||||
|
|
||||||
|
assert clearout_data["Preciptable Water (cm)"] == [
|
||||||
|
1.5345406562673334,
|
||||||
|
1.7686231292572652,
|
||||||
|
1.8354895631381385,
|
||||||
|
1.8651290310892348,
|
||||||
|
1.8197998755611786,
|
||||||
|
1.8414641597940502,
|
||||||
|
1.7325709431177607,
|
||||||
|
1.8548700685143087,
|
||||||
|
1.7453005409540279,
|
||||||
|
1.783658794601369,
|
||||||
|
1.783658794601369,
|
||||||
|
1.783658794601369,
|
||||||
|
1.783658794601369,
|
||||||
|
1.8956364436464912,
|
||||||
|
1.8220170482487101,
|
||||||
|
1.8220170482487101,
|
||||||
|
1.8956364436464912,
|
||||||
|
1.8847927282597918,
|
||||||
|
1.9823287281891897,
|
||||||
|
1.9766964385816497,
|
||||||
|
1.9346943880237457,
|
||||||
|
1.9381315133101413,
|
||||||
|
1.9381315133101413,
|
||||||
|
2.026228400278784,
|
||||||
|
]
|
110
tests/test_weatherimport.py
Normal file
110
tests/test_weatherimport.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
||||||
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
|
||||||
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||||
|
|
||||||
|
FILE_TESTDATA_WEATHERIMPORT_1_JSON = DIR_TESTDATA.joinpath("import_input_1.json")
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
ems_eos = get_ems()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def weather_provider(reset_config, sample_import_1_json):
|
||||||
|
"""Fixture to create a WeatherProvider instance."""
|
||||||
|
settings = {
|
||||||
|
"weather_provider": "WeatherImport",
|
||||||
|
"weatherimport_file_path": str(FILE_TESTDATA_WEATHERIMPORT_1_JSON),
|
||||||
|
"weatherimport_json": json.dumps(sample_import_1_json),
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
provider = WeatherImport()
|
||||||
|
assert provider.enabled() == True
|
||||||
|
return provider
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_import_1_json():
|
||||||
|
"""Fixture that returns sample forecast data report."""
|
||||||
|
with open(FILE_TESTDATA_WEATHERIMPORT_1_JSON, "r") as f_res:
|
||||||
|
input_data = json.load(f_res)
|
||||||
|
return input_data
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# General forecast
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_singleton_instance(weather_provider):
|
||||||
|
"""Test that WeatherForecast behaves as a singleton."""
|
||||||
|
another_instance = WeatherImport()
|
||||||
|
assert weather_provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_provider(weather_provider, monkeypatch):
|
||||||
|
"""Test requesting an unsupported weather_provider."""
|
||||||
|
settings = {
|
||||||
|
"weather_provider": "<invalid>",
|
||||||
|
"weatherimport_file_path": str(FILE_TESTDATA_WEATHERIMPORT_1_JSON),
|
||||||
|
}
|
||||||
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
assert weather_provider.enabled() == False
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------
|
||||||
|
# Import
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"start_datetime, from_file",
|
||||||
|
[
|
||||||
|
("2024-11-10 00:00:00", True), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", True), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", True), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", True), # DST change in Germany (25 hours/ day)
|
||||||
|
("2024-11-10 00:00:00", False), # No DST in Germany
|
||||||
|
("2024-08-10 00:00:00", False), # DST in Germany
|
||||||
|
("2024-03-31 00:00:00", False), # DST change in Germany (23 hours/ day)
|
||||||
|
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_import(weather_provider, sample_import_1_json, start_datetime, from_file):
|
||||||
|
"""Test fetching forecast from Import."""
|
||||||
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
|
if from_file:
|
||||||
|
config_eos.weatherimport_json = None
|
||||||
|
assert config_eos.weatherimport_json is None
|
||||||
|
else:
|
||||||
|
config_eos.weatherimport_file_path = None
|
||||||
|
assert config_eos.weatherimport_file_path is None
|
||||||
|
weather_provider.clear()
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
weather_provider.update_data()
|
||||||
|
|
||||||
|
# Assert: Verify the result is as expected
|
||||||
|
assert weather_provider.start_datetime is not None
|
||||||
|
assert weather_provider.total_hours is not None
|
||||||
|
assert compare_datetimes(weather_provider.start_datetime, ems_eos.start_datetime).equal
|
||||||
|
values = sample_import_1_json["weather_temp_air"]
|
||||||
|
value_datetime_mapping = weather_provider.import_datetimes(len(values))
|
||||||
|
for i, mapping in enumerate(value_datetime_mapping):
|
||||||
|
assert i < len(weather_provider.records)
|
||||||
|
expected_datetime, expected_value_index = mapping
|
||||||
|
expected_value = values[expected_value_index]
|
||||||
|
result_datetime = weather_provider.records[i].date_time
|
||||||
|
result_value = weather_provider.records[i]["weather_temp_air"]
|
||||||
|
|
||||||
|
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
||||||
|
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
||||||
|
assert compare_datetimes(result_datetime, expected_datetime).equal
|
||||||
|
assert result_value == expected_value
|
122
tests/testdata/EOS.config.json
vendored
Normal file
122
tests/testdata/EOS.config.json
vendored
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
{
|
||||||
|
"config_file_path": null,
|
||||||
|
"config_folder_path": null,
|
||||||
|
"data_cache_path": null,
|
||||||
|
"data_cache_subpath": null,
|
||||||
|
"data_folder_path": null,
|
||||||
|
"data_output_path": null,
|
||||||
|
"data_output_subpath": null,
|
||||||
|
"elecprice_provider": null,
|
||||||
|
"elecpriceimport_file_path": null,
|
||||||
|
"latitude": null,
|
||||||
|
"load0_import_file_path": null,
|
||||||
|
"load0_name": null,
|
||||||
|
"load0_provider": null,
|
||||||
|
"load1_import_file_path": null,
|
||||||
|
"load1_name": null,
|
||||||
|
"load1_provider": null,
|
||||||
|
"load2_import_file_path": null,
|
||||||
|
"load2_name": null,
|
||||||
|
"load2_provider": null,
|
||||||
|
"load3_import_file_path": null,
|
||||||
|
"load3_name": null,
|
||||||
|
"load3_provider": null,
|
||||||
|
"load4_import_file_path": null,
|
||||||
|
"load4_name": null,
|
||||||
|
"load4_provider": null,
|
||||||
|
"loadakkudoktor_year_energy": null,
|
||||||
|
"longitude": null,
|
||||||
|
"optimization_ev_available_charge_rates_percent": [],
|
||||||
|
"optimization_hours": 24,
|
||||||
|
"optimization_penalty": null,
|
||||||
|
"prediction_historic_hours": 48,
|
||||||
|
"prediction_hours": 48,
|
||||||
|
"pvforecast0_albedo": null,
|
||||||
|
"pvforecast0_inverter_model": null,
|
||||||
|
"pvforecast0_inverter_paco": null,
|
||||||
|
"pvforecast0_loss": null,
|
||||||
|
"pvforecast0_module_model": null,
|
||||||
|
"pvforecast0_modules_per_string": null,
|
||||||
|
"pvforecast0_mountingplace": "free",
|
||||||
|
"pvforecast0_optimal_surface_tilt": false,
|
||||||
|
"pvforecast0_optimalangles": false,
|
||||||
|
"pvforecast0_peakpower": null,
|
||||||
|
"pvforecast0_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast0_strings_per_inverter": null,
|
||||||
|
"pvforecast0_surface_azimuth": 180,
|
||||||
|
"pvforecast0_surface_tilt": 0,
|
||||||
|
"pvforecast0_trackingtype": 0,
|
||||||
|
"pvforecast0_userhorizon": null,
|
||||||
|
"pvforecast1_albedo": null,
|
||||||
|
"pvforecast1_inverter_model": null,
|
||||||
|
"pvforecast1_inverter_paco": null,
|
||||||
|
"pvforecast1_loss": 0,
|
||||||
|
"pvforecast1_module_model": null,
|
||||||
|
"pvforecast1_modules_per_string": null,
|
||||||
|
"pvforecast1_mountingplace": "free",
|
||||||
|
"pvforecast1_optimal_surface_tilt": false,
|
||||||
|
"pvforecast1_optimalangles": false,
|
||||||
|
"pvforecast1_peakpower": null,
|
||||||
|
"pvforecast1_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast1_strings_per_inverter": null,
|
||||||
|
"pvforecast1_surface_azimuth": 180,
|
||||||
|
"pvforecast1_surface_tilt": 0,
|
||||||
|
"pvforecast1_trackingtype": 0,
|
||||||
|
"pvforecast1_userhorizon": null,
|
||||||
|
"pvforecast2_albedo": null,
|
||||||
|
"pvforecast2_inverter_model": null,
|
||||||
|
"pvforecast2_inverter_paco": null,
|
||||||
|
"pvforecast2_loss": 0,
|
||||||
|
"pvforecast2_module_model": null,
|
||||||
|
"pvforecast2_modules_per_string": null,
|
||||||
|
"pvforecast2_mountingplace": "free",
|
||||||
|
"pvforecast2_optimal_surface_tilt": false,
|
||||||
|
"pvforecast2_optimalangles": false,
|
||||||
|
"pvforecast2_peakpower": null,
|
||||||
|
"pvforecast2_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast2_strings_per_inverter": null,
|
||||||
|
"pvforecast2_surface_azimuth": 180,
|
||||||
|
"pvforecast2_surface_tilt": 0,
|
||||||
|
"pvforecast2_trackingtype": 0,
|
||||||
|
"pvforecast2_userhorizon": null,
|
||||||
|
"pvforecast3_albedo": null,
|
||||||
|
"pvforecast3_inverter_model": null,
|
||||||
|
"pvforecast3_inverter_paco": null,
|
||||||
|
"pvforecast3_loss": 0,
|
||||||
|
"pvforecast3_module_model": null,
|
||||||
|
"pvforecast3_modules_per_string": null,
|
||||||
|
"pvforecast3_mountingplace": "free",
|
||||||
|
"pvforecast3_optimal_surface_tilt": false,
|
||||||
|
"pvforecast3_optimalangles": false,
|
||||||
|
"pvforecast3_peakpower": null,
|
||||||
|
"pvforecast3_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast3_strings_per_inverter": null,
|
||||||
|
"pvforecast3_surface_azimuth": 180,
|
||||||
|
"pvforecast3_surface_tilt": 0,
|
||||||
|
"pvforecast3_trackingtype": 0,
|
||||||
|
"pvforecast3_userhorizon": null,
|
||||||
|
"pvforecast4_albedo": null,
|
||||||
|
"pvforecast4_inverter_model": null,
|
||||||
|
"pvforecast4_inverter_paco": null,
|
||||||
|
"pvforecast4_loss": 0,
|
||||||
|
"pvforecast4_module_model": null,
|
||||||
|
"pvforecast4_modules_per_string": null,
|
||||||
|
"pvforecast4_mountingplace": "free",
|
||||||
|
"pvforecast4_optimal_surface_tilt": false,
|
||||||
|
"pvforecast4_optimalangles": false,
|
||||||
|
"pvforecast4_peakpower": null,
|
||||||
|
"pvforecast4_pvtechchoice": "crystSi",
|
||||||
|
"pvforecast4_strings_per_inverter": null,
|
||||||
|
"pvforecast4_surface_azimuth": 180,
|
||||||
|
"pvforecast4_surface_tilt": 0,
|
||||||
|
"pvforecast4_trackingtype": 0,
|
||||||
|
"pvforecast4_userhorizon": null,
|
||||||
|
"pvforecast_provider": null,
|
||||||
|
"pvforecastimport_file_path": null,
|
||||||
|
"server_fastapi_host": "0.0.0.0",
|
||||||
|
"server_fastapi_port": 8503,
|
||||||
|
"server_fasthtml_host": "0.0.0.0",
|
||||||
|
"server_fasthtml_port": 8504,
|
||||||
|
"weather_provider": null,
|
||||||
|
"weatherimport_file_path": null
|
||||||
|
}
|
1
tests/testdata/elecpriceforecast_akkudoktor_1.json
vendored
Normal file
1
tests/testdata/elecpriceforecast_akkudoktor_1.json
vendored
Normal file
File diff suppressed because one or more lines are too long
30
tests/testdata/import_input_1.json
vendored
Normal file
30
tests/testdata/import_input_1.json
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"load0_mean": [
|
||||||
|
676.71, 876.19, 527.13, 468.88, 531.38, 517.95, 483.15, 472.28, 1011.68, 995.00,
|
||||||
|
1053.07, 1063.91, 1320.56, 1132.03, 1163.67, 1176.82, 1216.22, 1103.78, 1129.12,
|
||||||
|
1178.71, 1050.98, 988.56, 912.38, 704.61, 516.37, 868.05, 694.34, 608.79, 556.31,
|
||||||
|
488.89, 506.91, 804.89, 1141.98, 1056.97, 992.46, 1155.99, 827.01, 1257.98, 1232.67,
|
||||||
|
871.26, 860.88, 1158.03, 1222.72, 1221.04, 949.99, 987.01, 733.99, 592.97
|
||||||
|
],
|
||||||
|
"elecprice_marketprice": [
|
||||||
|
0.3384, 0.3318, 0.3284, 0.3283, 0.3289, 0.3334, 0.3290,
|
||||||
|
0.3302, 0.3042, 0.2430, 0.2280, 0.2212, 0.2093, 0.1879,
|
||||||
|
0.1838, 0.2004, 0.2198, 0.2270, 0.2997, 0.3195, 0.3081,
|
||||||
|
0.2969, 0.2921, 0.2780, 0.3384, 0.3318, 0.3284, 0.3283,
|
||||||
|
0.3289, 0.3334, 0.3290, 0.3302, 0.3042, 0.2430, 0.2280,
|
||||||
|
0.2212, 0.2093, 0.1879, 0.1838, 0.2004, 0.2198, 0.2270,
|
||||||
|
0.2997, 0.3195, 0.3081, 0.2969, 0.2921, 0.2780
|
||||||
|
],
|
||||||
|
"pvforecast_ac_power": [
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 8.05, 352.91, 728.51, 930.28, 1043.25, 1106.74, 1161.69,
|
||||||
|
6018.82, 5519.07, 3969.88, 3017.96, 1943.07, 1007.17, 319.67, 7.88, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 5.04, 335.59, 705.32, 1121.12, 1604.79, 2157.38, 1433.25, 5718.49,
|
||||||
|
4553.96, 3027.55, 2574.46, 1720.4, 963.4, 383.3, 0, 0, 0
|
||||||
|
],
|
||||||
|
"weather_temp_air": [
|
||||||
|
18.3, 17.8, 16.9, 16.2, 15.6, 15.1, 14.6, 14.2, 14.3, 14.8, 15.7, 16.7,
|
||||||
|
17.4, 18.0, 18.6, 19.2, 19.1, 18.7, 18.5, 17.7, 16.2, 14.6, 13.6, 13.0,
|
||||||
|
12.6, 12.2, 11.7, 11.6, 11.3, 11.0, 10.7, 10.2, 11.4, 14.4, 16.4, 18.3,
|
||||||
|
19.5, 20.7, 21.9, 22.7, 23.1, 23.1, 22.8, 21.8, 20.2, 19.1, 18.0, 17.4
|
||||||
|
]
|
||||||
|
}
|
10592
tests/testdata/weatherforecast_brightsky_1.json
vendored
Normal file
10592
tests/testdata/weatherforecast_brightsky_1.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
340
tests/testdata/weatherforecast_brightsky_2.json
vendored
Normal file
340
tests/testdata/weatherforecast_brightsky_2.json
vendored
Normal file
@ -0,0 +1,340 @@
|
|||||||
|
[
|
||||||
|
"{\"date_time\":\"2024-10-25T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":140.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.5700713856497344,\"wind_speed\":4.7,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":6.2,\"feels_like\":null,\"dew_point\":5.8,\"relative_humidity\":97.0,\"pressure\":1022.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":90.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6249985327562004,\"wind_speed\":6.1,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":6.6,\"feels_like\":null,\"dew_point\":6.3,\"relative_humidity\":98.0,\"pressure\":1023.1,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":740.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7334253361156824,\"wind_speed\":3.6,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":7.5,\"feels_like\":null,\"dew_point\":7.3,\"relative_humidity\":99.0,\"pressure\":1023.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":580.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8455721653466037,\"wind_speed\":2.9,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":8.7,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":98.0,\"pressure\":1022.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":140.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9376328848315296,\"wind_speed\":4.7,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":9.5,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":98.0,\"pressure\":1022.6,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T04:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":250.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9532243896939583,\"wind_speed\":4.7,\"wind_direction\":80.0,\"frost_chance\":null,\"temp_air\":9.8,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":97.0,\"pressure\":1022.6,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2280.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9807797336643354,\"wind_speed\":6.1,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.5,\"relative_humidity\":96.0,\"pressure\":1022.6,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2650.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9601466114386656,\"wind_speed\":7.6,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":95.0,\"pressure\":1022.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2290.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9395134892129953,\"wind_speed\":8.3,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":94.0,\"pressure\":1022.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2530.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9721291734030872,\"wind_speed\":7.6,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":10.3,\"feels_like\":null,\"dew_point\":9.5,\"relative_humidity\":95.0,\"pressure\":1022.4,\"ozone\":null,\"ghi\":22.22705922303379,\"dni\":0.0,\"dhi\":22.22705922303379}",
|
||||||
|
"{\"date_time\":\"2024-10-26T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":1660.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9601466114386656,\"wind_speed\":6.8,\"wind_direction\":80.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":95.0,\"pressure\":1022.4,\"ozone\":null,\"ghi\":68.16265202099999,\"dni\":0.0,\"dhi\":68.16265202099999}",
|
||||||
|
"{\"date_time\":\"2024-10-26T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4420.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9395134892129953,\"wind_speed\":7.2,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":94.0,\"pressure\":1022.1,\"ozone\":null,\"ghi\":108.0100746278567,\"dni\":0.0,\"dhi\":108.0100746278567}",
|
||||||
|
"{\"date_time\":\"2024-10-26T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2010.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9513699189462126,\"wind_speed\":7.6,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":10.3,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":94.0,\"pressure\":1021.8,\"ozone\":null,\"ghi\":134.2816493853918,\"dni\":0.0,\"dhi\":134.2816493853918}",
|
||||||
|
"{\"date_time\":\"2024-10-26T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4340.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8683329011187144,\"wind_speed\":7.9,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":10.3,\"feels_like\":null,\"dew_point\":8.8,\"relative_humidity\":90.0,\"pressure\":1021.2,\"ozone\":null,\"ghi\":144.04237088707308,\"dni\":0.0,\"dhi\":144.04237088707308}",
|
||||||
|
"{\"date_time\":\"2024-10-26T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9060.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9164012929980223,\"wind_speed\":7.6,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":10.9,\"feels_like\":null,\"dew_point\":9.2,\"relative_humidity\":89.0,\"pressure\":1020.8,\"ozone\":null,\"ghi\":136.35519419190516,\"dni\":0.0,\"dhi\":136.35519419190516}",
|
||||||
|
"{\"date_time\":\"2024-10-26T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13650.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.979008294319185,\"wind_speed\":6.8,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":11.8,\"feels_like\":null,\"dew_point\":9.7,\"relative_humidity\":87.0,\"pressure\":1020.0,\"ozone\":null,\"ghi\":111.94730962791996,\"dni\":0.0,\"dhi\":111.94730962791996}",
|
||||||
|
"{\"date_time\":\"2024-10-26T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17560.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9946143405085994,\"wind_speed\":7.2,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":12.9,\"feels_like\":null,\"dew_point\":9.8,\"relative_humidity\":82.0,\"pressure\":1019.8,\"ozone\":null,\"ghi\":73.45834328182735,\"dni\":0.0,\"dhi\":73.45834328182735}",
|
||||||
|
"{\"date_time\":\"2024-10-26T16:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17960.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9099689648264042,\"wind_speed\":8.3,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":12.8,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":79.0,\"pressure\":1019.2,\"ozone\":null,\"ghi\":34.07062080450064,\"dni\":0.0,\"dhi\":34.07062080450064}",
|
||||||
|
"{\"date_time\":\"2024-10-26T17:00:00+01:00\",\"total_clouds\":62.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14910.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.843603353612786,\"wind_speed\":6.8,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":9.9,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":91.0,\"pressure\":1018.8,\"ozone\":null,\"ghi\":0.11256372587508819,\"dni\":0.0,\"dhi\":0.11256372587508819}",
|
||||||
|
"{\"date_time\":\"2024-10-26T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":6400.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7277309224807302,\"wind_speed\":5.4,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":8.3,\"feels_like\":null,\"dew_point\":7.5,\"relative_humidity\":94.0,\"pressure\":1018.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":720.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6911147392871873,\"wind_speed\":5.4,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":7.6,\"feels_like\":null,\"dew_point\":6.9,\"relative_humidity\":96.0,\"pressure\":1018.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T20:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5730.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6378354583834347,\"wind_speed\":4.7,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":6.9,\"feels_like\":null,\"dew_point\":6.4,\"relative_humidity\":97.0,\"pressure\":1018.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T21:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":90.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.618159751885058,\"wind_speed\":4.7,\"wind_direction\":340.0,\"frost_chance\":null,\"temp_air\":6.7,\"feels_like\":null,\"dew_point\":6.3,\"relative_humidity\":97.0,\"pressure\":1018.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T22:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":90.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7087305178214287,\"wind_speed\":7.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":7.6,\"feels_like\":null,\"dew_point\":7.1,\"relative_humidity\":97.0,\"pressure\":1019.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-26T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.764491154873937,\"wind_speed\":10.4,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":8.3,\"feels_like\":null,\"dew_point\":7.6,\"relative_humidity\":96.0,\"pressure\":1019.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3610.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.830030033835469,\"wind_speed\":8.3,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":8.9,\"feels_like\":null,\"dew_point\":8.3,\"relative_humidity\":96.0,\"pressure\":1019.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5770.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8928153879508465,\"wind_speed\":9.0,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":9.8,\"feels_like\":null,\"dew_point\":8.9,\"relative_humidity\":94.0,\"pressure\":1018.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5980.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.945096824920762,\"wind_speed\":7.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":10.6,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":92.0,\"pressure\":1018.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7950.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.947556370182495,\"wind_speed\":6.5,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":10.8,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":91.0,\"pressure\":1018.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":6120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9903598068898019,\"wind_speed\":4.7,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":10.8,\"feels_like\":null,\"dew_point\":9.7,\"relative_humidity\":93.0,\"pressure\":1018.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4830.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.987381538505997,\"wind_speed\":4.3,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":10.6,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":94.0,\"pressure\":1018.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5360.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9963174492677347,\"wind_speed\":3.6,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":9.8,\"relative_humidity\":95.0,\"pressure\":1018.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4660.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0085238952986137,\"wind_speed\":2.2,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":10.6,\"feels_like\":null,\"dew_point\":9.8,\"relative_humidity\":95.0,\"pressure\":1019.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5570.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.024064286986675,\"wind_speed\":3.2,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":10.9,\"feels_like\":null,\"dew_point\":10.0,\"relative_humidity\":94.0,\"pressure\":1020.5,\"ozone\":null,\"ghi\":20.901591088639343,\"dni\":0.0,\"dhi\":20.901591088639343}",
|
||||||
|
"{\"date_time\":\"2024-10-27T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7670.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.020123074823889,\"wind_speed\":7.2,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":10.1,\"relative_humidity\":91.0,\"pressure\":1021.1,\"ozone\":null,\"ghi\":66.41841804602629,\"dni\":0.0,\"dhi\":66.41841804602629}",
|
||||||
|
"{\"date_time\":\"2024-10-27T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8790.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0744781114397735,\"wind_speed\":11.2,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":12.2,\"feels_like\":null,\"dew_point\":10.4,\"relative_humidity\":89.0,\"pressure\":1021.4,\"ozone\":null,\"ghi\":106.12345605852113,\"dni\":0.0,\"dhi\":106.12345605852113}",
|
||||||
|
"{\"date_time\":\"2024-10-27T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8040.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.067588035893061,\"wind_speed\":10.8,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":12.9,\"feels_like\":null,\"dew_point\":10.4,\"relative_humidity\":85.0,\"pressure\":1022.0,\"ozone\":null,\"ghi\":132.31929512932624,\"dni\":0.0,\"dhi\":132.31929512932624}",
|
||||||
|
"{\"date_time\":\"2024-10-27T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0813625709358203,\"wind_speed\":11.5,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":13.4,\"feels_like\":null,\"dew_point\":10.6,\"relative_humidity\":83.0,\"pressure\":1022.6,\"ozone\":null,\"ghi\":142.03807516868267,\"dni\":0.0,\"dhi\":142.03807516868267}",
|
||||||
|
"{\"date_time\":\"2024-10-27T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9560.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.119817892495729,\"wind_speed\":10.8,\"wind_direction\":230.0,\"frost_chance\":null,\"temp_air\":13.9,\"feels_like\":null,\"dew_point\":10.9,\"relative_humidity\":82.0,\"pressure\":1022.7,\"ozone\":null,\"ghi\":134.33853283469773,\"dni\":0.0,\"dhi\":134.33853283469773}",
|
||||||
|
"{\"date_time\":\"2024-10-27T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12400.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.119593668836106,\"wind_speed\":10.4,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":14.1,\"feels_like\":null,\"dew_point\":11.0,\"relative_humidity\":81.0,\"pressure\":1022.9,\"ozone\":null,\"ghi\":109.95561941571053,\"dni\":0.0,\"dhi\":109.95561941571053}",
|
||||||
|
"{\"date_time\":\"2024-10-27T15:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1843956516771077,\"wind_speed\":11.9,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":14.8,\"feels_like\":null,\"dew_point\":11.4,\"relative_humidity\":80.0,\"pressure\":1023.0,\"ozone\":null,\"ghi\":88.84019629738314,\"dni\":0.0,\"dhi\":88.84019629738314}",
|
||||||
|
"{\"date_time\":\"2024-10-27T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13590.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.183440809341084,\"wind_speed\":13.3,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":15.0,\"feels_like\":null,\"dew_point\":11.4,\"relative_humidity\":79.0,\"pressure\":1023.8,\"ozone\":null,\"ghi\":25.90303659201319,\"dni\":0.0,\"dhi\":25.90303659201319}",
|
||||||
|
"{\"date_time\":\"2024-10-27T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14720.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1291678932402403,\"wind_speed\":8.6,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":13.0,\"feels_like\":null,\"dew_point\":11.0,\"relative_humidity\":87.0,\"pressure\":1024.4,\"ozone\":null,\"ghi\":0.027781191847857583,\"dni\":0.0,\"dhi\":0.027781191847857583}",
|
||||||
|
"{\"date_time\":\"2024-10-27T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0574147959693145,\"wind_speed\":9.0,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":11.7,\"feels_like\":null,\"dew_point\":10.3,\"relative_humidity\":91.0,\"pressure\":1025.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12740.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0324781084809054,\"wind_speed\":7.2,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.5,\"feels_like\":null,\"dew_point\":10.0,\"relative_humidity\":91.0,\"pressure\":1025.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12320.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9995340646535906,\"wind_speed\":7.6,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":10.7,\"feels_like\":null,\"dew_point\":9.8,\"relative_humidity\":94.0,\"pressure\":1026.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10820.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1311188481658605,\"wind_speed\":9.0,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":10.8,\"relative_humidity\":96.0,\"pressure\":1026.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8040.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1609860685094553,\"wind_speed\":9.4,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.8,\"feels_like\":null,\"dew_point\":11.0,\"relative_humidity\":95.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-27T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4860.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.174201406952952,\"wind_speed\":8.6,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":11.2,\"relative_humidity\":95.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3910.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.174201406952952,\"wind_speed\":11.9,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":11.1,\"relative_humidity\":95.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T01:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7410.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1284287457539426,\"wind_speed\":12.6,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":10.9,\"relative_humidity\":93.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4020.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.174201406952952,\"wind_speed\":5.8,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":11.1,\"relative_humidity\":95.0,\"pressure\":1027.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3500.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.125241657374896,\"wind_speed\":6.8,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":11.7,\"feels_like\":null,\"dew_point\":10.7,\"relative_humidity\":94.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2770.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1089196934974663,\"wind_speed\":5.0,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":10.6,\"relative_humidity\":95.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0519715153105804,\"wind_speed\":2.9,\"wind_direction\":230.0,\"frost_chance\":null,\"temp_air\":11.3,\"feels_like\":null,\"dew_point\":10.3,\"relative_humidity\":93.0,\"pressure\":1026.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3330.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.045596885784406,\"wind_speed\":5.8,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":10.9,\"feels_like\":null,\"dew_point\":10.2,\"relative_humidity\":95.0,\"pressure\":1026.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0545649619507635,\"wind_speed\":8.3,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":10.8,\"feels_like\":null,\"dew_point\":10.1,\"relative_humidity\":96.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2900.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.020805703639267,\"wind_speed\":6.5,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":10.7,\"feels_like\":null,\"dew_point\":9.9,\"relative_humidity\":95.0,\"pressure\":1027.09,\"ozone\":null,\"ghi\":19.602868340686165,\"dni\":0.0,\"dhi\":19.602868340686165}",
|
||||||
|
"{\"date_time\":\"2024-10-28T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":1960.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0581070021227617,\"wind_speed\":8.3,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":11.0,\"feels_like\":null,\"dew_point\":10.3,\"relative_humidity\":95.0,\"pressure\":1027.2,\"ozone\":null,\"ghi\":64.68374862563667,\"dni\":0.0,\"dhi\":64.68374862563667}",
|
||||||
|
"{\"date_time\":\"2024-10-28T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4930.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.017567894096528,\"wind_speed\":9.4,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":11.2,\"feels_like\":null,\"dew_point\":10.0,\"relative_humidity\":92.0,\"pressure\":1027.3,\"ozone\":null,\"ghi\":104.24512318045389,\"dni\":0.0,\"dhi\":104.24512318045389}",
|
||||||
|
"{\"date_time\":\"2024-10-28T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5410.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0574147959693145,\"wind_speed\":10.8,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":11.7,\"feels_like\":null,\"dew_point\":10.3,\"relative_humidity\":91.0,\"pressure\":1027.4,\"ozone\":null,\"ghi\":130.36720205010565,\"dni\":0.0,\"dhi\":130.36720205010565}",
|
||||||
|
"{\"date_time\":\"2024-10-28T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5520.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0744781114397735,\"wind_speed\":10.1,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":12.2,\"feels_like\":null,\"dew_point\":10.4,\"relative_humidity\":89.0,\"pressure\":1027.2,\"ozone\":null,\"ghi\":140.04739059314485,\"dni\":0.0,\"dhi\":140.04739059314485}",
|
||||||
|
"{\"date_time\":\"2024-10-28T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":6840.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1175533778696294,\"wind_speed\":9.4,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":13.1,\"feels_like\":null,\"dew_point\":10.8,\"relative_humidity\":86.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":132.3396471104131,\"dni\":0.0,\"dhi\":132.3396471104131}",
|
||||||
|
"{\"date_time\":\"2024-10-28T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":11920.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.158848465791691,\"wind_speed\":11.9,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":14.2,\"feels_like\":null,\"dew_point\":11.2,\"relative_humidity\":82.0,\"pressure\":1026.0,\"ozone\":null,\"ghi\":107.98652507155819,\"dni\":0.0,\"dhi\":107.98652507155819}",
|
||||||
|
"{\"date_time\":\"2024-10-28T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13760.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1586051188053688,\"wind_speed\":7.6,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":14.4,\"feels_like\":null,\"dew_point\":11.2,\"relative_humidity\":81.0,\"pressure\":1025.8,\"ozone\":null,\"ghi\":69.69629401139778,\"dni\":0.0,\"dhi\":69.69629401139778}",
|
||||||
|
"{\"date_time\":\"2024-10-28T16:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":21080.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.172012702785343,\"wind_speed\":7.2,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":14.3,\"feels_like\":null,\"dew_point\":11.4,\"relative_humidity\":82.0,\"pressure\":1025.4,\"ozone\":null,\"ghi\":30.299076442472742,\"dni\":0.0,\"dhi\":30.299076442472742}",
|
||||||
|
"{\"date_time\":\"2024-10-28T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":16720.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0723655900991496,\"wind_speed\":8.6,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":12.0,\"feels_like\":null,\"dew_point\":10.5,\"relative_humidity\":90.0,\"pressure\":1025.2,\"ozone\":null,\"ghi\":0.007313988512718714,\"dni\":0.0,\"dhi\":0.007313988512718714}",
|
||||||
|
"{\"date_time\":\"2024-10-28T18:00:00+01:00\",\"total_clouds\":75.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8950.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9542897134936768,\"wind_speed\":3.6,\"wind_direction\":280.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":9.5,\"relative_humidity\":93.0,\"pressure\":1025.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T19:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7810.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8443259335596536,\"wind_speed\":3.2,\"wind_direction\":330.0,\"frost_chance\":null,\"temp_air\":9.2,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":95.0,\"pressure\":1026.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":90.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8378811294487585,\"wind_speed\":2.5,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":8.8,\"feels_like\":null,\"dew_point\":8.3,\"relative_humidity\":97.0,\"pressure\":1026.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.925871372365559,\"wind_speed\":2.5,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":9.4,\"feels_like\":null,\"dew_point\":9.1,\"relative_humidity\":98.0,\"pressure\":1026.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":60.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.801251387267144,\"wind_speed\":2.9,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":8.3,\"feels_like\":null,\"dew_point\":8.0,\"relative_humidity\":98.0,\"pressure\":1026.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-28T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":90.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7720765939942027,\"wind_speed\":3.2,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":8.2,\"feels_like\":null,\"dew_point\":7.8,\"relative_humidity\":97.0,\"pressure\":1026.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":80.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8455721653466037,\"wind_speed\":3.6,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":8.7,\"feels_like\":null,\"dew_point\":8.4,\"relative_humidity\":98.0,\"pressure\":1026.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":160.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.779508453411574,\"wind_speed\":2.2,\"wind_direction\":330.0,\"frost_chance\":null,\"temp_air\":8.1,\"feels_like\":null,\"dew_point\":7.8,\"relative_humidity\":98.0,\"pressure\":1026.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7580400895133157,\"wind_speed\":2.5,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":7.9,\"feels_like\":null,\"dew_point\":7.5,\"relative_humidity\":98.0,\"pressure\":1026.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8196315034637476,\"wind_speed\":1.4,\"wind_direction\":0.0,\"frost_chance\":null,\"temp_air\":8.3,\"feels_like\":null,\"dew_point\":8.1,\"relative_humidity\":99.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7976667029361821,\"wind_speed\":1.8,\"wind_direction\":320.0,\"frost_chance\":null,\"temp_air\":8.1,\"feels_like\":null,\"dew_point\":8.0,\"relative_humidity\":99.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.786788492758286,\"wind_speed\":1.4,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":8.0,\"feels_like\":null,\"dew_point\":7.9,\"relative_humidity\":99.0,\"pressure\":1026.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7022172735352934,\"wind_speed\":2.2,\"wind_direction\":240.0,\"frost_chance\":null,\"temp_air\":7.2,\"feels_like\":null,\"dew_point\":7.1,\"relative_humidity\":99.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.722955851908333,\"wind_speed\":2.9,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":7.4,\"feels_like\":null,\"dew_point\":7.3,\"relative_humidity\":99.0,\"pressure\":1027.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":130.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7652386971092027,\"wind_speed\":3.2,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":7.8,\"feels_like\":null,\"dew_point\":7.6,\"relative_humidity\":99.0,\"pressure\":1027.5,\"ozone\":null,\"ghi\":18.332702353547074,\"dni\":0.0,\"dhi\":18.332702353547074}",
|
||||||
|
"{\"date_time\":\"2024-10-29T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":170.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8644045343807525,\"wind_speed\":4.3,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":8.7,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":99.0,\"pressure\":1027.59,\"ozone\":null,\"ghi\":62.95962823548937,\"dni\":0.0,\"dhi\":62.95962823548937}",
|
||||||
|
"{\"date_time\":\"2024-10-29T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":340.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9141831337652606,\"wind_speed\":2.5,\"wind_direction\":340.0,\"frost_chance\":null,\"temp_air\":9.3,\"feels_like\":null,\"dew_point\":9.0,\"relative_humidity\":98.0,\"pressure\":1027.59,\"ozone\":null,\"ghi\":102.37603626073972,\"dni\":0.0,\"dhi\":102.37603626073972}",
|
||||||
|
"{\"date_time\":\"2024-10-29T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":270.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.013647682316836,\"wind_speed\":3.6,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":10.3,\"feels_like\":null,\"dew_point\":9.9,\"relative_humidity\":97.0,\"pressure\":1027.5,\"ozone\":null,\"ghi\":128.42638331065623,\"dni\":0.0,\"dhi\":128.42638331065623}",
|
||||||
|
"{\"date_time\":\"2024-10-29T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":320.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1313720084431926,\"wind_speed\":5.0,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":12.1,\"feels_like\":null,\"dew_point\":10.9,\"relative_humidity\":92.0,\"pressure\":1027.2,\"ozone\":null,\"ghi\":138.07137517499854,\"dni\":0.0,\"dhi\":138.07137517499854}",
|
||||||
|
"{\"date_time\":\"2024-10-29T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":11100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.291852348361256,\"wind_speed\":6.1,\"wind_direction\":160.0,\"frost_chance\":null,\"temp_air\":14.4,\"feels_like\":null,\"dew_point\":12.1,\"relative_humidity\":86.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":130.35961341605747,\"dni\":0.0,\"dhi\":130.35961341605747}",
|
||||||
|
"{\"date_time\":\"2024-10-29T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":16900.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.3255616039808618,\"wind_speed\":5.0,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":16.9,\"feels_like\":null,\"dew_point\":12.4,\"relative_humidity\":75.0,\"pressure\":1025.9,\"ozone\":null,\"ghi\":106.04108723155429,\"dni\":0.0,\"dhi\":106.04108723155429}",
|
||||||
|
"{\"date_time\":\"2024-10-29T15:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":34440.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.1836631339806405,\"wind_speed\":6.5,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":17.0,\"feels_like\":null,\"dew_point\":11.6,\"relative_humidity\":70.0,\"pressure\":1025.7,\"ozone\":null,\"ghi\":84.24287206656912,\"dni\":0.0,\"dhi\":84.24287206656912}",
|
||||||
|
"{\"date_time\":\"2024-10-29T16:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":58950.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.2475055388051737,\"wind_speed\":4.3,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":15.9,\"feels_like\":null,\"dew_point\":11.9,\"relative_humidity\":77.0,\"pressure\":1025.4,\"ozone\":null,\"ghi\":49.81579897492729,\"dni\":14.538665837772964,\"dhi\":47.79892049551232}",
|
||||||
|
"{\"date_time\":\"2024-10-29T17:00:00+01:00\",\"total_clouds\":50.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":47830.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0652816544204247,\"wind_speed\":10.4,\"wind_direction\":250.0,\"frost_chance\":null,\"temp_air\":12.5,\"feels_like\":null,\"dew_point\":10.4,\"relative_humidity\":87.0,\"pressure\":1025.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T18:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":41010.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.020123074823889,\"wind_speed\":6.1,\"wind_direction\":260.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":10.0,\"relative_humidity\":91.0,\"pressure\":1025.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T19:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":18250.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9662391817133793,\"wind_speed\":5.4,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":10.6,\"feels_like\":null,\"dew_point\":9.5,\"relative_humidity\":93.0,\"pressure\":1026.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T20:00:00+01:00\",\"total_clouds\":12.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7840.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0394979799019253,\"wind_speed\":6.5,\"wind_direction\":230.0,\"frost_chance\":null,\"temp_air\":11.2,\"feels_like\":null,\"dew_point\":10.2,\"relative_humidity\":93.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7370.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7702426892100076,\"wind_speed\":5.0,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":8.7,\"feels_like\":null,\"dew_point\":7.8,\"relative_humidity\":94.0,\"pressure\":1026.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3850.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8220170482487101,\"wind_speed\":4.3,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":9.0,\"feels_like\":null,\"dew_point\":8.3,\"relative_humidity\":95.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-29T23:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7260.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8443259335596536,\"wind_speed\":4.3,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":9.2,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":95.0,\"pressure\":1026.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T00:00:00+01:00\",\"total_clouds\":50.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":180.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.729577309288815,\"wind_speed\":3.6,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":7.8,\"feels_like\":null,\"dew_point\":7.3,\"relative_humidity\":97.0,\"pressure\":1026.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":70.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6378354583834347,\"wind_speed\":2.2,\"wind_direction\":240.0,\"frost_chance\":null,\"temp_air\":6.9,\"feels_like\":null,\"dew_point\":6.5,\"relative_humidity\":97.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7263462963556704,\"wind_speed\":2.9,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":7.6,\"feels_like\":null,\"dew_point\":7.3,\"relative_humidity\":98.0,\"pressure\":1027.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6952547605843942,\"wind_speed\":1.8,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":7.3,\"feels_like\":null,\"dew_point\":7.1,\"relative_humidity\":98.0,\"pressure\":1027.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":90.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8268830865919616,\"wind_speed\":2.2,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":8.2,\"feels_like\":null,\"dew_point\":8.2,\"relative_humidity\":100.0,\"pressure\":1027.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":150.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8492109088214057,\"wind_speed\":1.4,\"wind_direction\":290.0,\"frost_chance\":null,\"temp_air\":8.4,\"feels_like\":null,\"dew_point\":8.3,\"relative_humidity\":100.0,\"pressure\":1027.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7840.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8872184723928271,\"wind_speed\":2.5,\"wind_direction\":150.0,\"frost_chance\":null,\"temp_air\":8.9,\"feels_like\":null,\"dew_point\":8.8,\"relative_humidity\":99.0,\"pressure\":1027.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2810.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8872184723928271,\"wind_speed\":2.5,\"wind_direction\":290.0,\"frost_chance\":null,\"temp_air\":8.9,\"feels_like\":null,\"dew_point\":8.7,\"relative_humidity\":99.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T08:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":460.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.921981762341113,\"wind_speed\":2.2,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":9.2,\"feels_like\":null,\"dew_point\":9.0,\"relative_humidity\":99.0,\"pressure\":1028.3,\"ozone\":null,\"ghi\":21.219671542362477,\"dni\":0.0,\"dhi\":21.219671542362477}",
|
||||||
|
"{\"date_time\":\"2024-10-30T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":1620.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9813912463341716,\"wind_speed\":2.5,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":9.7,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":99.0,\"pressure\":1028.4,\"ozone\":null,\"ghi\":61.24705643100921,\"dni\":0.0,\"dhi\":61.24705643100921}",
|
||||||
|
"{\"date_time\":\"2024-10-30T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13580.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0886168395153506,\"wind_speed\":1.8,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":10.4,\"feels_like\":null,\"dew_point\":10.4,\"relative_humidity\":100.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":100.51716584295312,\"dni\":0.0,\"dhi\":100.51716584295312}",
|
||||||
|
"{\"date_time\":\"2024-10-30T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":22490.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0364427178898907,\"wind_speed\":5.4,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.0,\"feels_like\":null,\"dew_point\":10.2,\"relative_humidity\":94.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":126.49785855652105,\"dni\":0.0,\"dhi\":126.49785855652105}",
|
||||||
|
"{\"date_time\":\"2024-10-30T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17130.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.108204921394897,\"wind_speed\":5.4,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":12.1,\"feels_like\":null,\"dew_point\":10.7,\"relative_humidity\":91.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":136.11108832250144,\"dni\":0.0,\"dhi\":136.11108832250144}",
|
||||||
|
"{\"date_time\":\"2024-10-30T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":27620.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0744781114397735,\"wind_speed\":5.8,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":12.2,\"feels_like\":null,\"dew_point\":10.4,\"relative_humidity\":89.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":128.39950312276366,\"dni\":0.0,\"dhi\":128.39950312276366}",
|
||||||
|
"{\"date_time\":\"2024-10-30T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":23300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0792067212034273,\"wind_speed\":10.8,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":12.8,\"feels_like\":null,\"dew_point\":10.6,\"relative_humidity\":86.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":104.12035323578755,\"dni\":0.0,\"dhi\":104.12035323578755}",
|
||||||
|
"{\"date_time\":\"2024-10-30T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17420.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.067588035893061,\"wind_speed\":10.4,\"wind_direction\":160.0,\"frost_chance\":null,\"temp_air\":12.9,\"feels_like\":null,\"dew_point\":10.5,\"relative_humidity\":85.0,\"pressure\":1027.7,\"ozone\":null,\"ghi\":66.05384442265539,\"dni\":0.0,\"dhi\":66.05384442265539}",
|
||||||
|
"{\"date_time\":\"2024-10-30T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12780.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.116237166149368,\"wind_speed\":9.0,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":12.9,\"feels_like\":null,\"dew_point\":10.7,\"relative_humidity\":87.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":21.554012703673507,\"dni\":0.0,\"dhi\":21.554012703673507}",
|
||||||
|
"{\"date_time\":\"2024-10-30T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9610.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.077906151217825,\"wind_speed\":5.0,\"wind_direction\":150.0,\"frost_chance\":null,\"temp_air\":12.6,\"feels_like\":null,\"dew_point\":10.5,\"relative_humidity\":87.0,\"pressure\":1027.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10940.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0723655900991496,\"wind_speed\":3.2,\"wind_direction\":160.0,\"frost_chance\":null,\"temp_air\":12.0,\"feels_like\":null,\"dew_point\":10.4,\"relative_humidity\":90.0,\"pressure\":1028.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7710.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.04490856047528,\"wind_speed\":4.0,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.6,\"feels_like\":null,\"dew_point\":10.1,\"relative_humidity\":91.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3170.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0324781084809054,\"wind_speed\":3.2,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.5,\"feels_like\":null,\"dew_point\":10.1,\"relative_humidity\":91.0,\"pressure\":1028.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3950.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.020123074823889,\"wind_speed\":6.8,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":10.0,\"relative_humidity\":91.0,\"pressure\":1028.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2590.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0423222294922834,\"wind_speed\":4.3,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":10.2,\"relative_humidity\":92.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-30T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2940.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0394979799019253,\"wind_speed\":2.5,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":11.2,\"feels_like\":null,\"dew_point\":10.2,\"relative_humidity\":93.0,\"pressure\":1028.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":1970.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9424136607492761,\"wind_speed\":3.2,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":10.4,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":93.0,\"pressure\":1028.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2510.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9395134892129953,\"wind_speed\":4.7,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":94.0,\"pressure\":1028.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2060.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0085238952986137,\"wind_speed\":6.1,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":10.6,\"feels_like\":null,\"dew_point\":9.9,\"relative_humidity\":95.0,\"pressure\":1028.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":240.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0050721659347364,\"wind_speed\":5.8,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":10.4,\"feels_like\":null,\"dew_point\":9.8,\"relative_humidity\":96.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2970.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0173313171547624,\"wind_speed\":6.8,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":9.9,\"relative_humidity\":96.0,\"pressure\":1028.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3290.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9963174492677347,\"wind_speed\":3.6,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":9.8,\"relative_humidity\":95.0,\"pressure\":1029.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T06:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5990.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":2.0173313171547624,\"wind_speed\":4.3,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":10.0,\"relative_humidity\":96.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T07:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":170.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9753035813807054,\"wind_speed\":5.4,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":9.7,\"relative_humidity\":94.0,\"pressure\":1029.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5700.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9513699189462126,\"wind_speed\":7.6,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":10.3,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":94.0,\"pressure\":1029.2,\"ozone\":null,\"ghi\":15.885486574503958,\"dni\":0.0,\"dhi\":15.885486574503958}",
|
||||||
|
"{\"date_time\":\"2024-10-31T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8690.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9835068514961423,\"wind_speed\":5.0,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":11.1,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":91.0,\"pressure\":1029.5,\"ozone\":null,\"ghi\":59.547045720693454,\"dni\":0.0,\"dhi\":59.547045720693454}",
|
||||||
|
"{\"date_time\":\"2024-10-31T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":15600.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9682244315574093,\"wind_speed\":2.2,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":9.7,\"relative_humidity\":86.0,\"pressure\":1029.59,\"ozone\":null,\"ghi\":98.66949090664382,\"dni\":0.0,\"dhi\":98.66949090664382}",
|
||||||
|
"{\"date_time\":\"2024-10-31T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":25190.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9682244315574093,\"wind_speed\":2.2,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":86.0,\"pressure\":1029.3,\"ozone\":null,\"ghi\":124.5826522461644,\"dni\":0.0,\"dhi\":124.5826522461644}",
|
||||||
|
"{\"date_time\":\"2024-10-31T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":33140.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9459652102522924,\"wind_speed\":7.6,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":12.9,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":80.0,\"pressure\":1028.9,\"ozone\":null,\"ghi\":134.16758927009357,\"dni\":0.0,\"dhi\":134.16758927009357}",
|
||||||
|
"{\"date_time\":\"2024-10-31T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":36550.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.827634823721784,\"wind_speed\":9.0,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":13.6,\"feels_like\":null,\"dew_point\":8.7,\"relative_humidity\":72.0,\"pressure\":1028.4,\"ozone\":null,\"ghi\":126.46038107187944,\"dni\":0.0,\"dhi\":126.46038107187944}",
|
||||||
|
"{\"date_time\":\"2024-10-31T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":57100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8500141411416122,\"wind_speed\":9.0,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":13.8,\"feels_like\":null,\"dew_point\":8.8,\"relative_humidity\":72.0,\"pressure\":1027.9,\"ozone\":null,\"ghi\":102.22535560233246,\"dni\":0.0,\"dhi\":102.22535560233246}",
|
||||||
|
"{\"date_time\":\"2024-10-31T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":52570.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8317476150435485,\"wind_speed\":5.4,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":14.1,\"feels_like\":null,\"dew_point\":8.7,\"relative_humidity\":70.0,\"pressure\":1027.7,\"ozone\":null,\"ghi\":64.2799263126679,\"dni\":0.0,\"dhi\":64.2799263126679}",
|
||||||
|
"{\"date_time\":\"2024-10-31T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":45660.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.861303515362104,\"wind_speed\":5.8,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":13.9,\"feels_like\":null,\"dew_point\":8.9,\"relative_humidity\":72.0,\"pressure\":1027.4,\"ozone\":null,\"ghi\":20.199874599546952,\"dni\":0.0,\"dhi\":20.199874599546952}",
|
||||||
|
"{\"date_time\":\"2024-10-31T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":51810.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.848666949739678,\"wind_speed\":3.6,\"wind_direction\":230.0,\"frost_chance\":null,\"temp_air\":12.9,\"feels_like\":null,\"dew_point\":8.7,\"relative_humidity\":76.0,\"pressure\":1027.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":44110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8876079019771812,\"wind_speed\":2.9,\"wind_direction\":230.0,\"frost_chance\":null,\"temp_air\":11.6,\"feels_like\":null,\"dew_point\":9.0,\"relative_humidity\":84.0,\"pressure\":1027.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":48080.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9313264561503112,\"wind_speed\":2.2,\"wind_direction\":110.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":9.2,\"relative_humidity\":87.0,\"pressure\":1027.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T20:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":30700.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9443700269600115,\"wind_speed\":3.6,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.7,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":86.0,\"pressure\":1027.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T21:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":25400.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9313264561503112,\"wind_speed\":2.9,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":87.0,\"pressure\":1027.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":25670.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9298475508749398,\"wind_speed\":4.3,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":11.2,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":88.0,\"pressure\":1027.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-10-31T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":24450.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9281212967255343,\"wind_speed\":2.9,\"wind_direction\":230.0,\"frost_chance\":null,\"temp_air\":11.0,\"feels_like\":null,\"dew_point\":9.3,\"relative_humidity\":89.0,\"pressure\":1027.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":24100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.947556370182495,\"wind_speed\":2.9,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":10.8,\"feels_like\":null,\"dew_point\":9.4,\"relative_humidity\":91.0,\"pressure\":1026.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":20630.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9753035813807054,\"wind_speed\":5.0,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":9.5,\"relative_humidity\":94.0,\"pressure\":1026.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":24020.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9807797336643354,\"wind_speed\":6.5,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":96.0,\"pressure\":1026.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8730.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9807797336643354,\"wind_speed\":9.4,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":96.0,\"pressure\":1026.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5560.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.9687457153823724,\"wind_speed\":9.0,\"wind_direction\":250.0,\"frost_chance\":null,\"temp_air\":10.1,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":96.0,\"pressure\":1025.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":5030.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.1,\"preciptable_water\":1.9807797336643354,\"wind_speed\":8.3,\"wind_direction\":250.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":9.6,\"relative_humidity\":96.0,\"pressure\":1026.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7340.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.1,\"preciptable_water\":1.9449002411739278,\"wind_speed\":7.9,\"wind_direction\":270.0,\"frost_chance\":null,\"temp_air\":9.9,\"feels_like\":null,\"dew_point\":9.2,\"relative_humidity\":96.0,\"pressure\":1025.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":6340.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8669161262727356,\"wind_speed\":3.2,\"wind_direction\":260.0,\"frost_chance\":null,\"temp_air\":9.4,\"feels_like\":null,\"dew_point\":8.7,\"relative_humidity\":95.0,\"pressure\":1025.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12030.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8360532099381073,\"wind_speed\":5.8,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":9.3,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":94.0,\"pressure\":1025.7,\"ozone\":null,\"ghi\":14.712237798164347,\"dni\":0.0,\"dhi\":14.712237798164347}",
|
||||||
|
"{\"date_time\":\"2024-11-01T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":29210.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8301128504372512,\"wind_speed\":11.9,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":9.6,\"feels_like\":null,\"dew_point\":8.4,\"relative_humidity\":92.0,\"pressure\":1026.0,\"ozone\":null,\"ghi\":57.8606202914984,\"dni\":0.0,\"dhi\":57.8606202914984}",
|
||||||
|
"{\"date_time\":\"2024-11-01T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":24570.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8548700685143087,\"wind_speed\":12.2,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":10.0,\"feels_like\":null,\"dew_point\":8.6,\"relative_humidity\":91.0,\"pressure\":1025.9,\"ozone\":null,\"ghi\":96.83399790992624,\"dni\":0.0,\"dhi\":96.83399790992624}",
|
||||||
|
"{\"date_time\":\"2024-11-01T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":19290.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8475736466618398,\"wind_speed\":11.9,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":10.3,\"feels_like\":null,\"dew_point\":8.6,\"relative_humidity\":89.0,\"pressure\":1026.3,\"ozone\":null,\"ghi\":122.68179272930055,\"dni\":0.0,\"dhi\":122.68179272930055}",
|
||||||
|
"{\"date_time\":\"2024-11-01T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":38750.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8492203740585325,\"wind_speed\":12.6,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":8.6,\"relative_humidity\":88.0,\"pressure\":1025.8,\"ozone\":null,\"ghi\":132.24193611040087,\"dni\":0.0,\"dhi\":132.24193611040087}",
|
||||||
|
"{\"date_time\":\"2024-11-01T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":47950.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.850632591753855,\"wind_speed\":13.3,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":10.7,\"feels_like\":null,\"dew_point\":8.6,\"relative_humidity\":87.0,\"pressure\":1025.2,\"ozone\":null,\"ghi\":124.54330452319319,\"dni\":0.0,\"dhi\":124.54330452319319}",
|
||||||
|
"{\"date_time\":\"2024-11-01T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":39370.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8087382990093692,\"wind_speed\":10.4,\"wind_direction\":240.0,\"frost_chance\":null,\"temp_air\":10.9,\"feels_like\":null,\"dew_point\":8.3,\"relative_humidity\":84.0,\"pressure\":1024.7,\"ozone\":null,\"ghi\":100.35711080592202,\"dni\":0.0,\"dhi\":100.35711080592202}",
|
||||||
|
"{\"date_time\":\"2024-11-01T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":45050.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8201971218479545,\"wind_speed\":10.1,\"wind_direction\":240.0,\"frost_chance\":null,\"temp_air\":11.2,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":83.0,\"pressure\":1024.3,\"ozone\":null,\"ghi\":62.53879690593976,\"dni\":0.0,\"dhi\":62.53879690593976}",
|
||||||
|
"{\"date_time\":\"2024-11-01T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":50820.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8313294168900875,\"wind_speed\":9.0,\"wind_direction\":220.0,\"frost_chance\":null,\"temp_air\":11.3,\"feels_like\":null,\"dew_point\":8.5,\"relative_humidity\":83.0,\"pressure\":1024.0,\"ozone\":null,\"ghi\":18.895062599949565,\"dni\":0.0,\"dhi\":18.895062599949565}",
|
||||||
|
"{\"date_time\":\"2024-11-01T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":44670.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7977443417069183,\"wind_speed\":9.0,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":10.8,\"feels_like\":null,\"dew_point\":8.2,\"relative_humidity\":84.0,\"pressure\":1024.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":49650.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7548156137872095,\"wind_speed\":5.4,\"wind_direction\":240.0,\"frost_chance\":null,\"temp_air\":10.6,\"feels_like\":null,\"dew_point\":7.9,\"relative_humidity\":83.0,\"pressure\":1024.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":34320.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.785295883291216,\"wind_speed\":9.0,\"wind_direction\":270.0,\"frost_chance\":null,\"temp_air\":10.3,\"feels_like\":null,\"dew_point\":8.0,\"relative_humidity\":86.0,\"pressure\":1025.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":28820.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7021447330910096,\"wind_speed\":13.3,\"wind_direction\":300.0,\"frost_chance\":null,\"temp_air\":10.1,\"feels_like\":null,\"dew_point\":7.3,\"relative_humidity\":83.0,\"pressure\":1025.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":27910.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6511793809783977,\"wind_speed\":12.2,\"wind_direction\":290.0,\"frost_chance\":null,\"temp_air\":9.8,\"feels_like\":null,\"dew_point\":6.9,\"relative_humidity\":82.0,\"pressure\":1025.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":27860.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.621195919413435,\"wind_speed\":10.4,\"wind_direction\":280.0,\"frost_chance\":null,\"temp_air\":9.3,\"feels_like\":null,\"dew_point\":6.6,\"relative_humidity\":83.0,\"pressure\":1026.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-01T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":26550.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.620878591918624,\"wind_speed\":7.9,\"wind_direction\":300.0,\"frost_chance\":null,\"temp_air\":9.1,\"feels_like\":null,\"dew_point\":6.5,\"relative_humidity\":84.0,\"pressure\":1026.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":33360.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.601582418205307,\"wind_speed\":6.1,\"wind_direction\":330.0,\"frost_chance\":null,\"temp_air\":9.1,\"feels_like\":null,\"dew_point\":6.4,\"relative_humidity\":83.0,\"pressure\":1027.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":30030.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.639401905310941,\"wind_speed\":4.0,\"wind_direction\":230.0,\"frost_chance\":null,\"temp_air\":8.9,\"feels_like\":null,\"dew_point\":6.7,\"relative_humidity\":86.0,\"pressure\":1027.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":34110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6572484750051133,\"wind_speed\":4.7,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":8.7,\"feels_like\":null,\"dew_point\":6.8,\"relative_humidity\":88.0,\"pressure\":1027.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":27390.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6572484750051133,\"wind_speed\":5.8,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":8.7,\"feels_like\":null,\"dew_point\":6.8,\"relative_humidity\":88.0,\"pressure\":1027.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":25060.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6484088480622887,\"wind_speed\":2.9,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":8.8,\"feels_like\":null,\"dew_point\":6.8,\"relative_humidity\":87.0,\"pressure\":1027.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":23760.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.70525053247823,\"wind_speed\":4.3,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":8.8,\"feels_like\":null,\"dew_point\":7.2,\"relative_humidity\":90.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":21300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6863033043395828,\"wind_speed\":5.8,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":8.8,\"feels_like\":null,\"dew_point\":7.1,\"relative_humidity\":89.0,\"pressure\":1028.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":18210.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.70525053247823,\"wind_speed\":4.0,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":8.8,\"feels_like\":null,\"dew_point\":7.3,\"relative_humidity\":90.0,\"pressure\":1028.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":18570.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7173594604852087,\"wind_speed\":1.8,\"wind_direction\":260.0,\"frost_chance\":null,\"temp_air\":9.1,\"feels_like\":null,\"dew_point\":7.4,\"relative_humidity\":89.0,\"pressure\":1029.4,\"ozone\":null,\"ghi\":13.575130473514132,\"dni\":0.0,\"dhi\":13.575130473514132}",
|
||||||
|
"{\"date_time\":\"2024-11-02T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":19450.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7306501955221831,\"wind_speed\":2.5,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":9.6,\"feels_like\":null,\"dew_point\":7.6,\"relative_humidity\":87.0,\"pressure\":1030.0,\"ozone\":null,\"ghi\":56.188815268002756,\"dni\":0.0,\"dhi\":56.188815268002756}",
|
||||||
|
"{\"date_time\":\"2024-11-02T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":20370.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7861787703974465,\"wind_speed\":1.8,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":10.5,\"feels_like\":null,\"dew_point\":8.2,\"relative_humidity\":85.0,\"pressure\":1030.5,\"ozone\":null,\"ghi\":95.01168041969699,\"dni\":0.0,\"dhi\":95.01168041969699}",
|
||||||
|
"{\"date_time\":\"2024-11-02T11:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":32720.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7981315281399455,\"wind_speed\":1.8,\"wind_direction\":150.0,\"frost_chance\":null,\"temp_air\":11.4,\"feels_like\":null,\"dew_point\":8.3,\"relative_humidity\":81.0,\"pressure\":1030.4,\"ozone\":null,\"ghi\":149.95999291205823,\"dni\":12.813701117903975,\"dhi\":144.84157493139483}",
|
||||||
|
"{\"date_time\":\"2024-11-02T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":63890.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7119416707379567,\"wind_speed\":4.0,\"wind_direction\":110.0,\"frost_chance\":null,\"temp_air\":12.3,\"feels_like\":null,\"dew_point\":7.6,\"relative_humidity\":73.0,\"pressure\":1030.4,\"ozone\":null,\"ghi\":130.33518534986166,\"dni\":0.0,\"dhi\":130.33518534986166}",
|
||||||
|
"{\"date_time\":\"2024-11-02T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":75000.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7341189545032323,\"wind_speed\":5.0,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":13.2,\"feels_like\":null,\"dew_point\":7.8,\"relative_humidity\":70.0,\"pressure\":1030.09,\"ozone\":null,\"ghi\":122.64932256772778,\"dni\":0.0,\"dhi\":122.64932256772778}",
|
||||||
|
"{\"date_time\":\"2024-11-02T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":70510.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.7674144274726324,\"wind_speed\":4.0,\"wind_direction\":110.0,\"frost_chance\":null,\"temp_air\":12.6,\"feels_like\":null,\"dew_point\":8.2,\"relative_humidity\":74.0,\"pressure\":1029.8,\"ozone\":null,\"ghi\":98.5166184540087,\"dni\":0.0,\"dhi\":98.5166184540087}",
|
||||||
|
"{\"date_time\":\"2024-11-02T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":73670.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.815182384971893,\"wind_speed\":5.0,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":12.6,\"feels_like\":null,\"dew_point\":8.4,\"relative_humidity\":76.0,\"pressure\":1029.7,\"ozone\":null,\"ghi\":60.83135423410029,\"dni\":0.0,\"dhi\":60.83135423410029}",
|
||||||
|
"{\"date_time\":\"2024-11-02T16:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":75000.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.850307043871555,\"wind_speed\":7.6,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":12.7,\"feels_like\":null,\"dew_point\":8.7,\"relative_humidity\":77.0,\"pressure\":1030.0,\"ozone\":null,\"ghi\":21.89924464521845,\"dni\":0.0,\"dhi\":21.89924464521845}",
|
||||||
|
"{\"date_time\":\"2024-11-02T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":75000.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8087382990093692,\"wind_speed\":9.7,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":10.9,\"feels_like\":null,\"dew_point\":8.3,\"relative_humidity\":84.0,\"pressure\":1030.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":31130.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8833512151215333,\"wind_speed\":10.4,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":10.8,\"feels_like\":null,\"dew_point\":9.0,\"relative_humidity\":88.0,\"pressure\":1030.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":75000.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.8745229585567942,\"wind_speed\":11.5,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":11.1,\"feels_like\":null,\"dew_point\":8.9,\"relative_humidity\":86.0,\"pressure\":1031.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T20:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":75000.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.6919160225049534,\"wind_speed\":11.5,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":10.2,\"feels_like\":null,\"dew_point\":7.3,\"relative_humidity\":82.0,\"pressure\":1031.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":73550.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.5255496443180916,\"wind_speed\":8.6,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":8.3,\"feels_like\":null,\"dew_point\":5.6,\"relative_humidity\":83.0,\"pressure\":1032.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T22:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":64570.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4958879070461668,\"wind_speed\":9.0,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":7.2,\"feels_like\":null,\"dew_point\":5.2,\"relative_humidity\":87.0,\"pressure\":1032.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-02T23:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":66050.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.390501273322162,\"wind_speed\":5.0,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":5.8,\"feels_like\":null,\"dew_point\":4.0,\"relative_humidity\":88.0,\"pressure\":1033.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T00:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":68320.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4252928707378512,\"wind_speed\":5.8,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":6.4,\"feels_like\":null,\"dew_point\":4.4,\"relative_humidity\":87.0,\"pressure\":1033.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T01:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":62300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3403838896380875,\"wind_speed\":7.2,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":5.0,\"feels_like\":null,\"dew_point\":3.4,\"relative_humidity\":89.0,\"pressure\":1033.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T02:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":52300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3623270721453318,\"wind_speed\":6.8,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":4.9,\"feels_like\":null,\"dew_point\":3.6,\"relative_humidity\":91.0,\"pressure\":1033.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T03:00:00+01:00\",\"total_clouds\":75.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":46680.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2794609296319466,\"wind_speed\":9.0,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":3.3,\"feels_like\":null,\"dew_point\":2.5,\"relative_humidity\":94.0,\"pressure\":1033.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T04:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":25080.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.199274182505457,\"wind_speed\":7.6,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.2,\"feels_like\":null,\"dew_point\":1.3,\"relative_humidity\":94.0,\"pressure\":1033.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T05:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":31420.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2392275984186982,\"wind_speed\":7.6,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.4,\"feels_like\":null,\"dew_point\":1.8,\"relative_humidity\":96.0,\"pressure\":1032.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T06:00:00+01:00\",\"total_clouds\":12.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7340.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2105500551096946,\"wind_speed\":5.8,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.0,\"feels_like\":null,\"dew_point\":1.4,\"relative_humidity\":96.0,\"pressure\":1032.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T07:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14380.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1434707306695828,\"wind_speed\":6.1,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":1.2,\"feels_like\":null,\"dew_point\":0.5,\"relative_humidity\":95.0,\"pressure\":1032.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T08:00:00+01:00\",\"total_clouds\":25.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":190.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1488384034632042,\"wind_speed\":6.1,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":1.1,\"feels_like\":null,\"dew_point\":0.5,\"relative_humidity\":96.0,\"pressure\":1033.09,\"ozone\":null,\"ghi\":29.853526900099773,\"dni\":0.0,\"dhi\":29.853526900099773}",
|
||||||
|
"{\"date_time\":\"2024-11-03T09:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14830.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1982804081056098,\"wind_speed\":6.5,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":3.5,\"feels_like\":null,\"dew_point\":1.6,\"relative_humidity\":87.0,\"pressure\":1033.5,\"ozone\":null,\"ghi\":155.80764565813612,\"dni\":359.7647285960036,\"dhi\":73.0052545725479}",
|
||||||
|
"{\"date_time\":\"2024-11-03T10:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":29490.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3676770322583323,\"wind_speed\":3.6,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":6.9,\"feels_like\":null,\"dew_point\":3.9,\"relative_humidity\":81.0,\"pressure\":1033.09,\"ozone\":null,\"ghi\":266.29582531703034,\"dni\":542.6987046512027,\"dhi\":86.93886764577462}",
|
||||||
|
"{\"date_time\":\"2024-11-03T11:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":35910.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.413965797496583,\"wind_speed\":3.2,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":8.5,\"feels_like\":null,\"dew_point\":4.5,\"relative_humidity\":76.0,\"pressure\":1032.59,\"ozone\":null,\"ghi\":258.0721216984891,\"dni\":250.78797925254707,\"dhi\":159.12427101947398}",
|
||||||
|
"{\"date_time\":\"2024-11-03T12:00:00+01:00\",\"total_clouds\":62.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":40750.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.441145619265592,\"wind_speed\":5.0,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":10.4,\"feels_like\":null,\"dew_point\":5.0,\"relative_humidity\":69.0,\"pressure\":1032.0,\"ozone\":null,\"ghi\":219.09625671981206,\"dni\":99.04311158893928,\"dhi\":177.70064914620303}",
|
||||||
|
"{\"date_time\":\"2024-11-03T13:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":45690.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.510497819567314,\"wind_speed\":4.3,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":5.8,\"relative_humidity\":66.0,\"pressure\":1031.3,\"ozone\":null,\"ghi\":262.09146259237264,\"dni\":252.71731147093138,\"dhi\":161.2289221818615}",
|
||||||
|
"{\"date_time\":\"2024-11-03T14:00:00+01:00\",\"total_clouds\":25.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":20640.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4989629850536337,\"wind_speed\":4.7,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":12.8,\"feels_like\":null,\"dew_point\":5.8,\"relative_humidity\":62.0,\"pressure\":1030.7,\"ozone\":null,\"ghi\":231.40091702563853,\"dni\":315.5806670947059,\"dhi\":124.32565938039977}",
|
||||||
|
"{\"date_time\":\"2024-11-03T15:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":39980.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.5138875813494541,\"wind_speed\":2.9,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":12.7,\"feels_like\":null,\"dew_point\":5.8,\"relative_humidity\":63.0,\"pressure\":1030.3,\"ozone\":null,\"ghi\":128.3738640423681,\"dni\":143.98607922934983,\"dhi\":93.44607846301753}",
|
||||||
|
"{\"date_time\":\"2024-11-03T16:00:00+01:00\",\"total_clouds\":25.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":52750.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.510497819567314,\"wind_speed\":3.2,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":11.9,\"feels_like\":null,\"dew_point\":5.8,\"relative_humidity\":66.0,\"pressure\":1029.8,\"ozone\":null,\"ghi\":39.33002179401603,\"dni\":3.9206199292686486,\"dhi\":38.87702761430884}",
|
||||||
|
"{\"date_time\":\"2024-11-03T17:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":49460.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4883850499964026,\"wind_speed\":4.0,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":8.5,\"feels_like\":null,\"dew_point\":5.3,\"relative_humidity\":80.0,\"pressure\":1029.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":26380.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.398891798445423,\"wind_speed\":3.2,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":5.9,\"feels_like\":null,\"dew_point\":4.1,\"relative_humidity\":88.0,\"pressure\":1030.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":32120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3609257229104745,\"wind_speed\":5.0,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":4.7,\"feels_like\":null,\"dew_point\":3.5,\"relative_humidity\":92.0,\"pressure\":1030.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T20:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":24750.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2733596216558292,\"wind_speed\":5.8,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":3.4,\"feels_like\":null,\"dew_point\":2.4,\"relative_humidity\":93.0,\"pressure\":1030.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T21:00:00+01:00\",\"total_clouds\":25.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":16220.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.226318977601837,\"wind_speed\":5.8,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":2.4,\"feels_like\":null,\"dew_point\":1.6,\"relative_humidity\":95.0,\"pressure\":1031.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T22:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8920.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1965035965509712,\"wind_speed\":6.1,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":1.8,\"feels_like\":null,\"dew_point\":1.2,\"relative_humidity\":96.0,\"pressure\":1031.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-03T23:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":6530.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1368713367604626,\"wind_speed\":7.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":1.1,\"feels_like\":null,\"dew_point\":0.4,\"relative_humidity\":95.0,\"pressure\":1031.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T00:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":690.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1356387287723233,\"wind_speed\":7.2,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":0.9,\"feels_like\":null,\"dew_point\":0.4,\"relative_humidity\":96.0,\"pressure\":1031.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T01:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":290.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.090861721609437,\"wind_speed\":7.2,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":0.2,\"feels_like\":null,\"dew_point\":-0.3,\"relative_humidity\":96.0,\"pressure\":1031.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T02:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":480.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.0835031721133945,\"wind_speed\":7.2,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":-0.1,\"feels_like\":null,\"dew_point\":-0.5,\"relative_humidity\":97.0,\"pressure\":1031.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1009339806416942,\"wind_speed\":6.8,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":0.0,\"feels_like\":null,\"dew_point\":-0.3,\"relative_humidity\":98.0,\"pressure\":1031.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1135880074763005,\"wind_speed\":6.8,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":0.2,\"feels_like\":null,\"dew_point\":-0.1,\"relative_humidity\":98.0,\"pressure\":1031.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":160.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1264206880151613,\"wind_speed\":4.7,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":0.4,\"feels_like\":null,\"dew_point\":0.2,\"relative_humidity\":98.0,\"pressure\":1031.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1460092992885946,\"wind_speed\":4.3,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":0.7,\"feels_like\":null,\"dew_point\":0.5,\"relative_humidity\":98.0,\"pressure\":1031.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.157703271730315,\"wind_speed\":5.4,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":0.7,\"feels_like\":null,\"dew_point\":0.6,\"relative_humidity\":99.0,\"pressure\":1031.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":90.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1541137262196406,\"wind_speed\":5.0,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":1.0,\"feels_like\":null,\"dew_point\":0.6,\"relative_humidity\":97.0,\"pressure\":1032.0,\"ozone\":null,\"ghi\":11.417075555130713,\"dni\":0.0,\"dhi\":11.417075555130713}",
|
||||||
|
"{\"date_time\":\"2024-11-04T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.22143075481245,\"wind_speed\":5.0,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":1.8,\"feels_like\":null,\"dew_point\":1.6,\"relative_humidity\":98.0,\"pressure\":1032.2,\"ozone\":null,\"ghi\":52.89325642044978,\"dni\":0.0,\"dhi\":52.89325642044978}",
|
||||||
|
"{\"date_time\":\"2024-11-04T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":150.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3316895356787095,\"wind_speed\":5.0,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":3.1,\"feels_like\":null,\"dew_point\":2.9,\"relative_humidity\":99.0,\"pressure\":1032.0,\"ozone\":null,\"ghi\":91.41057965841121,\"dni\":0.0,\"dhi\":91.41057965841121}",
|
||||||
|
"{\"date_time\":\"2024-11-04T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3555118553110441,\"wind_speed\":4.0,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":3.4,\"feels_like\":null,\"dew_point\":3.2,\"relative_humidity\":99.0,\"pressure\":1031.7,\"ozone\":null,\"ghi\":117.07563013335894,\"dni\":0.0,\"dhi\":117.07563013335894}",
|
||||||
|
"{\"date_time\":\"2024-11-04T12:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":350.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.411639131909973,\"wind_speed\":3.6,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":4.6,\"feels_like\":null,\"dew_point\":4.0,\"relative_humidity\":96.0,\"pressure\":1030.9,\"ozone\":null,\"ghi\":157.14326578481194,\"dni\":14.639827403702798,\"dhi\":151.09589042551917}",
|
||||||
|
"{\"date_time\":\"2024-11-04T13:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":21410.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4587699149332078,\"wind_speed\":4.3,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":5.5,\"feels_like\":null,\"dew_point\":4.6,\"relative_humidity\":94.0,\"pressure\":1029.9,\"ozone\":null,\"ghi\":339.81370340762766,\"dni\":626.3444200231016,\"dhi\":92.86155820300033}",
|
||||||
|
"{\"date_time\":\"2024-11-04T14:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":39060.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.5252929600670635,\"wind_speed\":2.9,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":8.1,\"feels_like\":null,\"dew_point\":5.5,\"relative_humidity\":84.0,\"pressure\":1029.0,\"ozone\":null,\"ghi\":271.20800751093515,\"dni\":549.6301264862547,\"dhi\":87.32285840607426}",
|
||||||
|
"{\"date_time\":\"2024-11-04T15:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":36810.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.5619693663437841,\"wind_speed\":5.4,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":9.5,\"feels_like\":null,\"dew_point\":6.0,\"relative_humidity\":79.0,\"pressure\":1028.5,\"ozone\":null,\"ghi\":164.34557743320204,\"dni\":378.3659028909279,\"dhi\":74.29470308711969}",
|
||||||
|
"{\"date_time\":\"2024-11-04T16:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":61290.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.5325727324790135,\"wind_speed\":5.4,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":7.6,\"feels_like\":null,\"dew_point\":5.6,\"relative_humidity\":87.0,\"pressure\":1028.09,\"ozone\":null,\"ghi\":43.66802704534112,\"dni\":44.64467562847794,\"dhi\":38.705191642431814}",
|
||||||
|
"{\"date_time\":\"2024-11-04T17:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":45940.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3447696273196648,\"wind_speed\":4.3,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":4.5,\"feels_like\":null,\"dew_point\":3.3,\"relative_humidity\":92.0,\"pressure\":1028.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T18:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":44410.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2349798532829575,\"wind_speed\":4.3,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":2.7,\"feels_like\":null,\"dew_point\":1.8,\"relative_humidity\":94.0,\"pressure\":1028.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T19:00:00+01:00\",\"total_clouds\":12.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":44940.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2247906544736582,\"wind_speed\":6.1,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.2,\"feels_like\":null,\"dew_point\":1.7,\"relative_humidity\":96.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T20:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":47840.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2160391817564757,\"wind_speed\":7.6,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":1.9,\"feels_like\":null,\"dew_point\":1.5,\"relative_humidity\":97.0,\"pressure\":1029.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T21:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":37930.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2430135737816375,\"wind_speed\":7.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":2.1,\"feels_like\":null,\"dew_point\":1.9,\"relative_humidity\":98.0,\"pressure\":1029.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T22:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":18950.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2503071264418593,\"wind_speed\":4.0,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.2,\"feels_like\":null,\"dew_point\":1.9,\"relative_humidity\":98.0,\"pressure\":1029.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-04T23:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.255697385759001,\"wind_speed\":7.6,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.1,\"feels_like\":null,\"dew_point\":1.9,\"relative_humidity\":99.0,\"pressure\":1029.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T00:00:00+01:00\",\"total_clouds\":12.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":6660.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2430135737816375,\"wind_speed\":7.6,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.1,\"feels_like\":null,\"dew_point\":1.8,\"relative_humidity\":98.0,\"pressure\":1028.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T01:00:00+01:00\",\"total_clouds\":12.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":1020.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1847396035714295,\"wind_speed\":8.6,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":1.1,\"feels_like\":null,\"dew_point\":0.9,\"relative_humidity\":99.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T02:00:00+01:00\",\"total_clouds\":75.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":4820.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.0896999604310647,\"wind_speed\":7.9,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":0.0,\"feels_like\":null,\"dew_point\":-0.4,\"relative_humidity\":97.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T03:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":230.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.0472255738551546,\"wind_speed\":7.9,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":-0.7,\"feels_like\":null,\"dew_point\":-1.1,\"relative_humidity\":97.0,\"pressure\":1028.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T04:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":1170.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.0402709520121391,\"wind_speed\":6.8,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":-1.0,\"feels_like\":null,\"dew_point\":-1.3,\"relative_humidity\":98.0,\"pressure\":1028.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T05:00:00+01:00\",\"total_clouds\":0.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":430.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":0.9964253886852408,\"wind_speed\":7.2,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":-1.4,\"feels_like\":null,\"dew_point\":-1.9,\"relative_humidity\":96.0,\"pressure\":1028.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T06:00:00+01:00\",\"total_clouds\":25.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":0.9908715007922109,\"wind_speed\":7.9,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":-1.5,\"feels_like\":null,\"dew_point\":-2.0,\"relative_humidity\":96.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T07:00:00+01:00\",\"total_clouds\":37.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":180.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":0.9845964358395034,\"wind_speed\":7.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":-1.8,\"feels_like\":null,\"dew_point\":-2.2,\"relative_humidity\":97.0,\"pressure\":1028.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T08:00:00+01:00\",\"total_clouds\":87.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":250.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.01904093258332,\"wind_speed\":7.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":-1.0,\"feels_like\":null,\"dew_point\":-1.5,\"relative_humidity\":96.0,\"pressure\":1029.2,\"ozone\":null,\"ghi\":12.910799386398546,\"dni\":0.0,\"dhi\":12.910799386398546}",
|
||||||
|
"{\"date_time\":\"2024-11-05T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":150.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.1949683522039627,\"wind_speed\":4.7,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":1.6,\"feels_like\":null,\"dew_point\":1.1,\"relative_humidity\":97.0,\"pressure\":1029.09,\"ozone\":null,\"ghi\":51.27161617785122,\"dni\":0.0,\"dhi\":51.27161617785122}",
|
||||||
|
"{\"date_time\":\"2024-11-05T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2630.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2612560203740841,\"wind_speed\":3.2,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":2.7,\"feels_like\":null,\"dew_point\":2.2,\"relative_humidity\":96.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":89.63381300017787,\"dni\":0.0,\"dhi\":89.63381300017787}",
|
||||||
|
"{\"date_time\":\"2024-11-05T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":11070.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2885323670339885,\"wind_speed\":5.8,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":3.6,\"feels_like\":null,\"dew_point\":2.6,\"relative_humidity\":93.0,\"pressure\":1028.2,\"ozone\":null,\"ghi\":115.24250625572647,\"dni\":0.0,\"dhi\":115.24250625572647}",
|
||||||
|
"{\"date_time\":\"2024-11-05T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12260.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3288271029735363,\"wind_speed\":7.9,\"wind_direction\":100.0,\"frost_chance\":null,\"temp_air\":4.3,\"feels_like\":null,\"dew_point\":3.2,\"relative_humidity\":92.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":124.73888203411694,\"dni\":0.0,\"dhi\":124.73888203411694}",
|
||||||
|
"{\"date_time\":\"2024-11-05T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13640.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3461330520092738,\"wind_speed\":12.6,\"wind_direction\":160.0,\"frost_chance\":null,\"temp_air\":4.7,\"feels_like\":null,\"dew_point\":3.4,\"relative_humidity\":91.0,\"pressure\":1027.7,\"ozone\":null,\"ghi\":117.11630570621142,\"dni\":0.0,\"dhi\":117.11630570621142}",
|
||||||
|
"{\"date_time\":\"2024-11-05T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14990.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.380031254206577,\"wind_speed\":10.8,\"wind_direction\":170.0,\"frost_chance\":null,\"temp_air\":5.3,\"feels_like\":null,\"dew_point\":3.8,\"relative_humidity\":90.0,\"pressure\":1027.5,\"ozone\":null,\"ghi\":93.17139021360312,\"dni\":0.0,\"dhi\":93.17139021360312}",
|
||||||
|
"{\"date_time\":\"2024-11-05T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14400.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3738841320739368,\"wind_speed\":8.6,\"wind_direction\":160.0,\"frost_chance\":null,\"temp_air\":5.6,\"feels_like\":null,\"dew_point\":3.8,\"relative_humidity\":88.0,\"pressure\":1027.3,\"ozone\":null,\"ghi\":55.91961682093465,\"dni\":0.0,\"dhi\":55.91961682093465}",
|
||||||
|
"{\"date_time\":\"2024-11-05T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":15490.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.381175770521867,\"wind_speed\":6.1,\"wind_direction\":150.0,\"frost_chance\":null,\"temp_air\":5.5,\"feels_like\":null,\"dew_point\":3.8,\"relative_humidity\":89.0,\"pressure\":1027.3,\"ozone\":null,\"ghi\":14.182910983589771,\"dni\":0.0,\"dhi\":14.182910983589771}",
|
||||||
|
"{\"date_time\":\"2024-11-05T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12550.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4260322960134626,\"wind_speed\":5.4,\"wind_direction\":140.0,\"frost_chance\":null,\"temp_air\":5.3,\"feels_like\":null,\"dew_point\":4.2,\"relative_humidity\":93.0,\"pressure\":1027.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3050.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4479915832345371,\"wind_speed\":4.0,\"wind_direction\":100.0,\"frost_chance\":null,\"temp_air\":5.2,\"feels_like\":null,\"dew_point\":4.4,\"relative_humidity\":95.0,\"pressure\":1027.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":470.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4346133748222865,\"wind_speed\":3.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":4.3,\"relative_humidity\":93.0,\"pressure\":1027.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":320.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4346133748222865,\"wind_speed\":5.0,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":4.4,\"relative_humidity\":93.0,\"pressure\":1027.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":350.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4587699149332078,\"wind_speed\":4.0,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":5.5,\"feels_like\":null,\"dew_point\":4.7,\"relative_humidity\":94.0,\"pressure\":1027.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4808912256230051,\"wind_speed\":4.0,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":4.8,\"relative_humidity\":96.0,\"pressure\":1027.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-05T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":170.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4873670184226437,\"wind_speed\":3.6,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":5.3,\"feels_like\":null,\"dew_point\":4.9,\"relative_humidity\":97.0,\"pressure\":1027.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":210.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4784756165657906,\"wind_speed\":2.5,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":5.2,\"feels_like\":null,\"dew_point\":4.8,\"relative_humidity\":97.0,\"pressure\":1027.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":170.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4696426588157527,\"wind_speed\":2.5,\"wind_direction\":10.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":4.7,\"relative_humidity\":97.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":690.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4696426588157527,\"wind_speed\":3.2,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":4.7,\"relative_humidity\":97.0,\"pressure\":1028.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":900.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.452150835143925,\"wind_speed\":5.8,\"wind_direction\":200.0,\"frost_chance\":null,\"temp_air\":4.9,\"feels_like\":null,\"dew_point\":4.5,\"relative_humidity\":97.0,\"pressure\":1028.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2150.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.1,\"preciptable_water\":1.4458073416320942,\"wind_speed\":5.0,\"wind_direction\":190.0,\"frost_chance\":null,\"temp_air\":5.0,\"feels_like\":null,\"dew_point\":4.4,\"relative_humidity\":96.0,\"pressure\":1028.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":2410.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4608678347740953,\"wind_speed\":2.5,\"wind_direction\":210.0,\"frost_chance\":null,\"temp_air\":5.0,\"feels_like\":null,\"dew_point\":4.5,\"relative_humidity\":97.0,\"pressure\":1028.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":3520.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.484793614061276,\"wind_speed\":1.8,\"wind_direction\":180.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":4.8,\"relative_humidity\":98.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":900.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4632335999001638,\"wind_speed\":2.2,\"wind_direction\":160.0,\"frost_chance\":null,\"temp_air\":5.2,\"feels_like\":null,\"dew_point\":4.6,\"relative_humidity\":96.0,\"pressure\":1029.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12750.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4808912256230051,\"wind_speed\":1.8,\"wind_direction\":120.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":4.9,\"relative_humidity\":96.0,\"pressure\":1029.7,\"ozone\":null,\"ghi\":9.426582342676907,\"dni\":0.0,\"dhi\":9.426582342676907}",
|
||||||
|
"{\"date_time\":\"2024-11-06T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":6980.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4831703698525456,\"wind_speed\":2.2,\"wind_direction\":340.0,\"frost_chance\":null,\"temp_air\":5.6,\"feels_like\":null,\"dew_point\":4.8,\"relative_humidity\":95.0,\"pressure\":1030.5,\"ozone\":null,\"ghi\":49.66881557471828,\"dni\":0.0,\"dhi\":49.66881557471828}",
|
||||||
|
"{\"date_time\":\"2024-11-06T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":15760.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4233298143003077,\"wind_speed\":3.2,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":6.0,\"feels_like\":null,\"dew_point\":4.3,\"relative_humidity\":89.0,\"pressure\":1030.9,\"ozone\":null,\"ghi\":87.87424898748947,\"dni\":0.0,\"dhi\":87.87424898748947}",
|
||||||
|
"{\"date_time\":\"2024-11-06T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":29630.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4252928707378512,\"wind_speed\":5.0,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":6.4,\"feels_like\":null,\"dew_point\":4.4,\"relative_humidity\":87.0,\"pressure\":1031.5,\"ozone\":null,\"ghi\":113.42891032649719,\"dni\":0.0,\"dhi\":113.42891032649719}",
|
||||||
|
"{\"date_time\":\"2024-11-06T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":19110.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.409437502900786,\"wind_speed\":3.6,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":6.6,\"feels_like\":null,\"dew_point\":4.2,\"relative_humidity\":85.0,\"pressure\":1031.59,\"ozone\":null,\"ghi\":122.91825781571202,\"dni\":0.0,\"dhi\":122.91825781571202}",
|
||||||
|
"{\"date_time\":\"2024-11-06T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":25120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3846109217160805,\"wind_speed\":5.0,\"wind_direction\":360.0,\"frost_chance\":null,\"temp_air\":6.7,\"feels_like\":null,\"dew_point\":4.0,\"relative_humidity\":83.0,\"pressure\":1031.7,\"ozone\":null,\"ghi\":115.32501526045236,\"dni\":0.0,\"dhi\":115.32501526045236}",
|
||||||
|
"{\"date_time\":\"2024-11-06T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":28400.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4012929810138648,\"wind_speed\":4.0,\"wind_direction\":340.0,\"frost_chance\":null,\"temp_air\":6.7,\"feels_like\":null,\"dew_point\":4.2,\"relative_humidity\":84.0,\"pressure\":1031.5,\"ozone\":null,\"ghi\":91.45155097121383,\"dni\":0.0,\"dhi\":91.45155097121383}",
|
||||||
|
"{\"date_time\":\"2024-11-06T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":25950.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3844732659494836,\"wind_speed\":4.3,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":6.5,\"feels_like\":null,\"dew_point\":4.1,\"relative_humidity\":84.0,\"pressure\":1031.59,\"ozone\":null,\"ghi\":54.35521490956087,\"dni\":0.0,\"dhi\":54.35521490956087}",
|
||||||
|
"{\"date_time\":\"2024-11-06T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":22070.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3678703280167073,\"wind_speed\":4.7,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":6.3,\"feels_like\":null,\"dew_point\":3.8,\"relative_humidity\":84.0,\"pressure\":1031.8,\"ozone\":null,\"ghi\":13.133993037937158,\"dni\":0.0,\"dhi\":13.133993037937158}",
|
||||||
|
"{\"date_time\":\"2024-11-06T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":22360.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.36757098972718,\"wind_speed\":6.1,\"wind_direction\":100.0,\"frost_chance\":null,\"temp_air\":6.1,\"feels_like\":null,\"dew_point\":3.8,\"relative_humidity\":85.0,\"pressure\":1032.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17470.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3829953007358158,\"wind_speed\":7.6,\"wind_direction\":110.0,\"frost_chance\":null,\"temp_air\":5.9,\"feels_like\":null,\"dew_point\":3.9,\"relative_humidity\":87.0,\"pressure\":1032.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":18050.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3114348533433033,\"wind_speed\":2.2,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":5.6,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":84.0,\"pressure\":1032.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":16940.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3112057726870359,\"wind_speed\":4.7,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":85.0,\"pressure\":1032.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":16260.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3266317229539424,\"wind_speed\":5.8,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":3.3,\"relative_humidity\":86.0,\"pressure\":1033.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14540.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3112057726870359,\"wind_speed\":4.3,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":3.0,\"relative_humidity\":85.0,\"pressure\":1033.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-06T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14150.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2803293999126435,\"wind_speed\":2.9,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":5.2,\"feels_like\":null,\"dew_point\":2.8,\"relative_humidity\":84.0,\"pressure\":1033.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2878311958694741,\"wind_speed\":6.5,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":2.8,\"relative_humidity\":85.0,\"pressure\":1033.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":20750.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2952024102120847,\"wind_speed\":6.5,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":5.0,\"feels_like\":null,\"dew_point\":2.9,\"relative_humidity\":86.0,\"pressure\":1033.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":19330.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2725033091467384,\"wind_speed\":5.4,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":4.9,\"feels_like\":null,\"dew_point\":2.7,\"relative_humidity\":85.0,\"pressure\":1033.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17310.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2500209307860815,\"wind_speed\":5.0,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":5.0,\"feels_like\":null,\"dew_point\":2.4,\"relative_humidity\":83.0,\"pressure\":1033.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":28980.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2878311958694741,\"wind_speed\":7.2,\"wind_direction\":80.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":2.8,\"relative_humidity\":85.0,\"pressure\":1033.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":22600.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2726802406239508,\"wind_speed\":7.6,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":2.7,\"relative_humidity\":84.0,\"pressure\":1033.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":21600.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2952024102120847,\"wind_speed\":5.4,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":5.0,\"feels_like\":null,\"dew_point\":2.8,\"relative_humidity\":86.0,\"pressure\":1033.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":19120.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2803293999126435,\"wind_speed\":5.8,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":5.2,\"feels_like\":null,\"dew_point\":2.7,\"relative_humidity\":84.0,\"pressure\":1033.3,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":16970.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3112057726870359,\"wind_speed\":5.4,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":85.0,\"pressure\":1033.8,\"ozone\":null,\"ghi\":8.498732233274223,\"dni\":0.0,\"dhi\":8.498732233274223}",
|
||||||
|
"{\"date_time\":\"2024-11-07T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":20190.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3036333508504807,\"wind_speed\":6.8,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":5.7,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":83.0,\"pressure\":1034.2,\"ozone\":null,\"ghi\":48.08590935974426,\"dni\":0.0,\"dhi\":48.08590935974426}",
|
||||||
|
"{\"date_time\":\"2024-11-07T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":20410.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.311382525535115,\"wind_speed\":6.1,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":6.0,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":82.0,\"pressure\":1034.4,\"ozone\":null,\"ghi\":86.13289253240517,\"dni\":0.0,\"dhi\":86.13289253240517}",
|
||||||
|
"{\"date_time\":\"2024-11-07T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":23940.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3032147078576655,\"wind_speed\":6.8,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":6.1,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":81.0,\"pressure\":1034.5,\"ozone\":null,\"ghi\":111.63587311647375,\"dni\":0.0,\"dhi\":111.63587311647375}",
|
||||||
|
"{\"date_time\":\"2024-11-07T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":22710.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3106141340118171,\"wind_speed\":7.9,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":6.4,\"feels_like\":null,\"dew_point\":3.3,\"relative_humidity\":80.0,\"pressure\":1034.0,\"ozone\":null,\"ghi\":121.12176907062565,\"dni\":0.0,\"dhi\":121.12176907062565}",
|
||||||
|
"{\"date_time\":\"2024-11-07T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":22430.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2701653045869423,\"wind_speed\":9.4,\"wind_direction\":80.0,\"frost_chance\":null,\"temp_air\":6.3,\"feels_like\":null,\"dew_point\":2.7,\"relative_humidity\":78.0,\"pressure\":1033.7,\"ozone\":null,\"ghi\":113.56192265137761,\"dni\":0.0,\"dhi\":113.56192265137761}",
|
||||||
|
"{\"date_time\":\"2024-11-07T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":23210.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3020641429763,\"wind_speed\":7.9,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":6.5,\"feels_like\":null,\"dew_point\":3.2,\"relative_humidity\":79.0,\"pressure\":1033.3,\"ozone\":null,\"ghi\":89.7641924158202,\"dni\":0.0,\"dhi\":89.7641924158202}",
|
||||||
|
"{\"date_time\":\"2024-11-07T15:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":23220.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2845185659293759,\"wind_speed\":7.2,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":6.7,\"feels_like\":null,\"dew_point\":3.0,\"relative_humidity\":77.0,\"pressure\":1032.9,\"ozone\":null,\"ghi\":52.82846758743223,\"dni\":0.0,\"dhi\":52.82846758743223}",
|
||||||
|
"{\"date_time\":\"2024-11-07T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":24990.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.293366179132486,\"wind_speed\":8.3,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":6.6,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":78.0,\"pressure\":1032.8,\"ozone\":null,\"ghi\":12.137164591797857,\"dni\":0.0,\"dhi\":12.137164591797857}",
|
||||||
|
"{\"date_time\":\"2024-11-07T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":23620.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2855823183816633,\"wind_speed\":5.8,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":6.5,\"feels_like\":null,\"dew_point\":2.9,\"relative_humidity\":78.0,\"pressure\":1032.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":20330.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2949042355874099,\"wind_speed\":5.0,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":6.2,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":80.0,\"pressure\":1032.8,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":23590.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2625316296977247,\"wind_speed\":4.0,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":6.2,\"feels_like\":null,\"dew_point\":2.7,\"relative_humidity\":78.0,\"pressure\":1032.9,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":18610.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3032147078576655,\"wind_speed\":6.1,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":6.1,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":81.0,\"pressure\":1033.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":15020.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.311382525535115,\"wind_speed\":9.0,\"wind_direction\":80.0,\"frost_chance\":null,\"temp_air\":6.0,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":82.0,\"pressure\":1033.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14560.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.311382525535115,\"wind_speed\":9.4,\"wind_direction\":80.0,\"frost_chance\":null,\"temp_air\":6.0,\"feels_like\":null,\"dew_point\":3.1,\"relative_humidity\":82.0,\"pressure\":1033.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-07T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14290.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2956943683229234,\"wind_speed\":9.4,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":5.8,\"feels_like\":null,\"dew_point\":2.9,\"relative_humidity\":82.0,\"pressure\":1033.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T00:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13230.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2879269249366194,\"wind_speed\":9.0,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":5.7,\"feels_like\":null,\"dew_point\":2.8,\"relative_humidity\":82.0,\"pressure\":1033.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T01:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10600.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.295822533660645,\"wind_speed\":6.8,\"wind_direction\":80.0,\"frost_chance\":null,\"temp_air\":5.6,\"feels_like\":null,\"dew_point\":2.9,\"relative_humidity\":83.0,\"pressure\":1033.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T02:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2726954899905094,\"wind_speed\":9.0,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":5.3,\"feels_like\":null,\"dew_point\":2.7,\"relative_humidity\":83.0,\"pressure\":1032.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T03:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10800.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2726802406239508,\"wind_speed\":9.0,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":2.7,\"relative_humidity\":84.0,\"pressure\":1032.0,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T04:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10600.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.25003374788666,\"wind_speed\":9.0,\"wind_direction\":50.0,\"frost_chance\":null,\"temp_air\":4.8,\"feels_like\":null,\"dew_point\":2.3,\"relative_humidity\":84.0,\"pressure\":1031.5,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T05:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":11200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.257532681980306,\"wind_speed\":11.9,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":4.9,\"feels_like\":null,\"dew_point\":2.4,\"relative_humidity\":84.0,\"pressure\":1031.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T06:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":11200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2351842404212265,\"wind_speed\":9.0,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":4.6,\"feels_like\":null,\"dew_point\":2.2,\"relative_humidity\":84.0,\"pressure\":1031.09,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T07:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13800.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.22783313798752,\"wind_speed\":10.1,\"wind_direction\":30.0,\"frost_chance\":null,\"temp_air\":4.5,\"feels_like\":null,\"dew_point\":2.1,\"relative_humidity\":84.0,\"pressure\":1030.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T08:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2351842404212265,\"wind_speed\":9.0,\"wind_direction\":20.0,\"frost_chance\":null,\"temp_air\":4.6,\"feels_like\":null,\"dew_point\":2.1,\"relative_humidity\":84.0,\"pressure\":1030.59,\"ozone\":null,\"ghi\":7.618065280685061,\"dni\":0.0,\"dhi\":7.618065280685061}",
|
||||||
|
"{\"date_time\":\"2024-11-08T09:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":13200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.257532681980306,\"wind_speed\":2.9,\"wind_direction\":350.0,\"frost_chance\":null,\"temp_air\":4.9,\"feels_like\":null,\"dew_point\":2.5,\"relative_humidity\":84.0,\"pressure\":1030.5,\"ozone\":null,\"ghi\":46.52393962571433,\"dni\":0.0,\"dhi\":46.52393962571433}",
|
||||||
|
"{\"date_time\":\"2024-11-08T10:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12300.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2952024102120847,\"wind_speed\":5.0,\"wind_direction\":40.0,\"frost_chance\":null,\"temp_air\":5.0,\"feels_like\":null,\"dew_point\":2.8,\"relative_humidity\":86.0,\"pressure\":1030.3,\"ozone\":null,\"ghi\":84.41073757319909,\"dni\":0.0,\"dhi\":84.41073757319909}",
|
||||||
|
"{\"date_time\":\"2024-11-08T11:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":11900.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3635859720970902,\"wind_speed\":6.8,\"wind_direction\":60.0,\"frost_chance\":null,\"temp_air\":5.1,\"feels_like\":null,\"dew_point\":3.6,\"relative_humidity\":90.0,\"pressure\":1030.2,\"ozone\":null,\"ghi\":109.86441465473348,\"dni\":0.0,\"dhi\":109.86441465473348}",
|
||||||
|
"{\"date_time\":\"2024-11-08T12:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9500.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4695070274881938,\"wind_speed\":7.9,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":5.8,\"feels_like\":null,\"dew_point\":4.7,\"relative_humidity\":93.0,\"pressure\":1029.5,\"ozone\":null,\"ghi\":119.35043772145661,\"dni\":0.0,\"dhi\":119.35043772145661}",
|
||||||
|
"{\"date_time\":\"2024-11-08T13:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":14500.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.498143692589727,\"wind_speed\":6.1,\"wind_direction\":70.0,\"frost_chance\":null,\"temp_air\":6.3,\"feels_like\":null,\"dew_point\":5.1,\"relative_humidity\":92.0,\"pressure\":1028.8,\"ozone\":null,\"ghi\":111.82801067882036,\"dni\":0.0,\"dhi\":111.82801067882036}",
|
||||||
|
"{\"date_time\":\"2024-11-08T14:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.1,\"preciptable_water\":1.4923455913067147,\"wind_speed\":11.2,\"wind_direction\":90.0,\"frost_chance\":null,\"temp_air\":6.6,\"feels_like\":null,\"dew_point\":5.1,\"relative_humidity\":90.0,\"pressure\":1028.4,\"ozone\":null,\"ghi\":88.1102014938565,\"dni\":0.0,\"dhi\":88.1102014938565}",
|
||||||
|
"{\"date_time\":\"2024-11-08T15:00:00+01:00\",\"total_clouds\":88.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4801944829988296,\"wind_speed\":14.0,\"wind_direction\":130.0,\"frost_chance\":null,\"temp_air\":6.1,\"feels_like\":null,\"dew_point\":4.9,\"relative_humidity\":92.0,\"pressure\":1028.4,\"ozone\":null,\"ghi\":62.78155778969431,\"dni\":0.0,\"dhi\":62.78155778969431}",
|
||||||
|
"{\"date_time\":\"2024-11-08T16:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":12000.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.456767265035836,\"wind_speed\":13.0,\"wind_direction\":120.0,\"frost_chance\":null,\"temp_air\":6.2,\"feels_like\":null,\"dew_point\":4.7,\"relative_humidity\":90.0,\"pressure\":1028.3,\"ozone\":null,\"ghi\":11.192374342113588,\"dni\":0.0,\"dhi\":11.192374342113588}",
|
||||||
|
"{\"date_time\":\"2024-11-08T17:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":7900.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.4243946591461507,\"wind_speed\":11.2,\"wind_direction\":110.0,\"frost_chance\":null,\"temp_air\":6.2,\"feels_like\":null,\"dew_point\":4.4,\"relative_humidity\":88.0,\"pressure\":1028.2,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T18:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9200.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3913448746531096,\"wind_speed\":15.1,\"wind_direction\":130.0,\"frost_chance\":null,\"temp_air\":6.0,\"feels_like\":null,\"dew_point\":4.0,\"relative_humidity\":87.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T19:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9500.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3829953007358158,\"wind_speed\":9.0,\"wind_direction\":140.0,\"frost_chance\":null,\"temp_air\":5.9,\"feels_like\":null,\"dew_point\":3.9,\"relative_humidity\":87.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T20:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":10100.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3588989716557491,\"wind_speed\":9.0,\"wind_direction\":140.0,\"frost_chance\":null,\"temp_air\":5.8,\"feels_like\":null,\"dew_point\":3.7,\"relative_humidity\":86.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T21:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":9900.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.3430978208225428,\"wind_speed\":9.0,\"wind_direction\":140.0,\"frost_chance\":null,\"temp_air\":5.8,\"feels_like\":null,\"dew_point\":3.5,\"relative_humidity\":85.0,\"pressure\":1028.59,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T22:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":17700.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.295822533660645,\"wind_speed\":10.1,\"wind_direction\":150.0,\"frost_chance\":null,\"temp_air\":5.6,\"feels_like\":null,\"dew_point\":3.0,\"relative_humidity\":83.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-08T23:00:00+01:00\",\"total_clouds\":100.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":15500.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":null,\"precip_amt\":0.0,\"preciptable_water\":1.2957798224201298,\"wind_speed\":10.1,\"wind_direction\":130.0,\"frost_chance\":null,\"temp_air\":5.4,\"feels_like\":null,\"dew_point\":2.9,\"relative_humidity\":84.0,\"pressure\":1028.7,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}",
|
||||||
|
"{\"date_time\":\"2024-11-09T00:00:00+01:00\",\"total_clouds\":86.0,\"low_clouds\":null,\"medium_clouds\":null,\"high_clouds\":null,\"visibility\":8700.0,\"fog\":null,\"precip_type\":null,\"precip_prob\":1.0,\"precip_amt\":0.0,\"preciptable_water\":null,\"wind_speed\":5.5,\"wind_direction\":87.0,\"frost_chance\":null,\"temp_air\":5.3,\"feels_like\":null,\"dew_point\":2.8,\"relative_humidity\":null,\"pressure\":1027.4,\"ozone\":null,\"ghi\":0.0,\"dni\":0.0,\"dhi\":0.0}"
|
||||||
|
]
|
2221
tests/testdata/weatherforecast_clearout_1.html
vendored
Normal file
2221
tests/testdata/weatherforecast_clearout_1.html
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
tests/testdata/weatherforecast_clearout_1.json
vendored
Normal file
1
tests/testdata/weatherforecast_clearout_1.json
vendored
Normal file
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user