2024-10-06 18:16:43 +02:00
|
|
|
import json
|
|
|
|
from pathlib import Path
|
2024-11-10 23:22:30 +01:00
|
|
|
from typing import Any
|
|
|
|
from unittest.mock import patch
|
2024-10-06 18:16:43 +02:00
|
|
|
|
2024-10-04 19:03:41 +02:00
|
|
|
import pytest
|
|
|
|
|
2024-12-30 13:41:39 +01:00
|
|
|
from akkudoktoreos.config.config import ConfigEOS
|
2024-11-19 21:47:43 +01:00
|
|
|
from akkudoktoreos.optimization.genetic import (
|
2024-11-15 22:27:25 +01:00
|
|
|
OptimizationParameters,
|
|
|
|
OptimizeResponse,
|
|
|
|
optimization_problem,
|
|
|
|
)
|
2024-12-24 13:10:31 +01:00
|
|
|
from akkudoktoreos.utils.visualize import (
|
|
|
|
prepare_visualize, # Import the new prepare_visualize
|
|
|
|
)
|
2024-10-04 19:03:41 +02:00
|
|
|
|
2024-10-06 18:16:43 +02:00
|
|
|
DIR_TESTDATA = Path(__file__).parent / "testdata"
|
2024-10-04 19:03:41 +02:00
|
|
|
|
|
|
|
|
2024-11-10 23:22:30 +01:00
|
|
|
def compare_dict(actual: dict[str, Any], expected: dict[str, Any]):
|
|
|
|
assert set(actual) == set(expected)
|
|
|
|
|
|
|
|
for key, value in expected.items():
|
|
|
|
if isinstance(value, dict):
|
|
|
|
assert isinstance(actual[key], dict)
|
|
|
|
compare_dict(actual[key], value)
|
|
|
|
elif isinstance(value, list):
|
|
|
|
assert isinstance(actual[key], list)
|
|
|
|
assert actual[key] == pytest.approx(value)
|
|
|
|
else:
|
|
|
|
assert actual[key] == pytest.approx(value)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"fn_in, fn_out, ngen",
|
|
|
|
[
|
|
|
|
("optimize_input_1.json", "optimize_result_1.json", 3),
|
|
|
|
("optimize_input_2.json", "optimize_result_2.json", 3),
|
|
|
|
("optimize_input_2.json", "optimize_result_2_full.json", 400),
|
|
|
|
],
|
|
|
|
)
|
2024-12-30 13:41:39 +01:00
|
|
|
def test_optimize(
|
|
|
|
fn_in: str,
|
|
|
|
fn_out: str,
|
|
|
|
ngen: int,
|
|
|
|
config_eos: ConfigEOS,
|
|
|
|
is_full_run: bool,
|
|
|
|
):
|
2024-11-11 21:38:13 +01:00
|
|
|
"""Test optimierung_ems."""
|
2024-12-15 14:40:03 +01:00
|
|
|
# Assure configuration holds the correct values
|
2025-01-12 05:19:37 +01:00
|
|
|
config_eos.merge_settings_from_dict(
|
2025-01-18 14:26:34 +01:00
|
|
|
{"prediction": {"hours": 48}, "optimization": {"hours": 48}}
|
2025-01-12 05:19:37 +01:00
|
|
|
)
|
2024-12-15 14:40:03 +01:00
|
|
|
|
2024-10-06 18:16:43 +02:00
|
|
|
# Load input and output data
|
2024-11-11 21:38:13 +01:00
|
|
|
file = DIR_TESTDATA / fn_in
|
|
|
|
with file.open("r") as f_in:
|
2024-11-15 22:27:25 +01:00
|
|
|
input_data = OptimizationParameters(**json.load(f_in))
|
2024-10-04 19:03:41 +02:00
|
|
|
|
2024-11-11 21:38:13 +01:00
|
|
|
file = DIR_TESTDATA / fn_out
|
2024-12-15 15:32:58 +01:00
|
|
|
# In case a new test case is added, we don't want to fail here, so the new output is written to disk before
|
|
|
|
try:
|
|
|
|
with file.open("r") as f_out:
|
|
|
|
expected_result = OptimizeResponse(**json.load(f_out))
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
2024-10-04 19:03:41 +02:00
|
|
|
|
2024-12-15 14:40:03 +01:00
|
|
|
opt_class = optimization_problem(fixed_seed=42)
|
2024-10-06 18:16:43 +02:00
|
|
|
start_hour = 10
|
2024-10-04 19:03:41 +02:00
|
|
|
|
2024-12-25 19:12:38 +01:00
|
|
|
# Activate with pytest --full-run
|
2024-11-10 23:22:30 +01:00
|
|
|
if ngen > 10 and not is_full_run:
|
|
|
|
pytest.skip()
|
|
|
|
|
2024-12-15 15:32:58 +01:00
|
|
|
visualize_filename = str((DIR_TESTDATA / f"new_{fn_out}").with_suffix(".pdf"))
|
2024-10-04 19:03:41 +02:00
|
|
|
|
2024-12-15 15:32:58 +01:00
|
|
|
with patch(
|
2024-12-24 13:10:31 +01:00
|
|
|
"akkudoktoreos.utils.visualize.prepare_visualize",
|
|
|
|
side_effect=lambda parameters, results, *args, **kwargs: prepare_visualize(
|
|
|
|
parameters, results, filename=visualize_filename, **kwargs
|
|
|
|
),
|
|
|
|
) as prepare_visualize_patch:
|
2024-12-15 15:32:58 +01:00
|
|
|
# Call the optimization function
|
|
|
|
ergebnis = opt_class.optimierung_ems(
|
|
|
|
parameters=input_data, start_hour=start_hour, ngen=ngen
|
|
|
|
)
|
|
|
|
# Write test output to file, so we can take it as new data on intended change
|
2025-02-12 21:35:51 +01:00
|
|
|
TESTDATA_FILE = DIR_TESTDATA / f"new_{fn_out}"
|
|
|
|
with TESTDATA_FILE.open("w", encoding="utf-8", newline="\n") as f_out:
|
2024-12-15 15:32:58 +01:00
|
|
|
f_out.write(ergebnis.model_dump_json(indent=4, exclude_unset=True))
|
|
|
|
|
|
|
|
assert ergebnis.result.Gesamtbilanz_Euro == pytest.approx(
|
|
|
|
expected_result.result.Gesamtbilanz_Euro
|
|
|
|
)
|
|
|
|
|
|
|
|
# Assert that the output contains all expected entries.
|
|
|
|
# This does not assert that the optimization always gives the same result!
|
|
|
|
# Reproducibility and mathematical accuracy should be tested on the level of individual components.
|
|
|
|
compare_dict(ergebnis.model_dump(), expected_result.model_dump())
|
|
|
|
|
|
|
|
# The function creates a visualization result PDF as a side-effect.
|
2024-12-24 13:10:31 +01:00
|
|
|
prepare_visualize_patch.assert_called_once()
|
2024-12-30 13:41:39 +01:00
|
|
|
assert Path(visualize_filename).exists()
|