2024-12-15 14:40:03 +01:00
|
|
|
import json
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from unittest.mock import Mock, patch
|
|
|
|
|
|
|
|
|
|
import pandas as pd
|
|
|
|
|
import pytest
|
|
|
|
|
|
2025-02-12 21:35:51 +01:00
|
|
|
from akkudoktoreos.core.cache import CacheFileStore
|
Add database support for measurements and historic prediction data. (#848)
The database supports backend selection, compression, incremental data load,
automatic data saving to storage, automatic vaccum and compaction.
Make SQLite3 and LMDB database backends available.
Update tests for new interface conventions regarding data sequences,
data containers, data providers. This includes the measurements provider and
the prediction providers.
Add database documentation.
The fix includes several bug fixes that are not directly related to the database
implementation but are necessary to keep EOS running properly and to test and
document the changes.
* fix: config eos test setup
Make the config_eos fixture generate a new instance of the config_eos singleton.
Use correct env names to setup data folder path.
* fix: startup with no config
Make cache and measurements complain about missing data path configuration but
do not bail out.
* fix: soc data preparation and usage for genetic optimization.
Search for soc measurments 48 hours around the optimization start time.
Only clamp soc to maximum in battery device simulation.
* fix: dashboard bailout on zero value solution display
Do not use zero values to calculate the chart values adjustment for display.
* fix: openapi generation script
Make the script also replace data_folder_path and data_output_path to hide
real (test) environment pathes.
* feat: add make repeated task function
make_repeated_task allows to wrap a function to be repeated cyclically.
* chore: removed index based data sequence access
Index based data sequence access does not make sense as the sequence can be backed
by the database. The sequence is now purely time series data.
* chore: refactor eos startup to avoid module import startup
Avoid module import initialisation expecially of the EOS configuration.
Config mutation, singleton initialization, logging setup, argparse parsing,
background task definitions depending on config and environment-dependent behavior
is now done at function startup.
* chore: introduce retention manager
A single long-running background task that owns the scheduling of all periodic
server-maintenance jobs (cache cleanup, DB autosave, …)
* chore: canonicalize timezone name for UTC
Timezone names that are semantically identical to UTC are canonicalized to UTC.
* chore: extend config file migration for default value handling
Extend the config file migration handling values None or nonexisting values
that will invoke a default value generation in the new config file. Also
adapt test to handle this situation.
* chore: extend datetime util test cases
* chore: make version test check for untracked files
Check for files that are not tracked by git. Version calculation will be
wrong if these files will not be commited.
* chore: bump pandas to 3.0.0
Pandas 3.0 now performs inference on the appropriate resolution (a.k.a. unit)
for the output dtype which may become datetime64[us] (before it was ns). Also
numeric dtype detection is now more strict which needs a different detection for
numerics.
* chore: bump pydantic-settings to 2.12.0
pydantic-settings 2.12.0 under pytest creates a different behaviour. The tests
were adapted and a workaround was introduced. Also ConfigEOS was adapted
to allow for fine grain initialization control to be able to switch
off certain settings such as file settings during test.
* chore: remove sci learn kit from dependencies
The sci learn kit is not strictly necessary as long as we have scipy.
* chore: add documentation mode guarding for sphinx autosummary
Sphinx autosummary excecutes functions. Prevent exceptions in case of pure doc
mode.
* chore: adapt docker-build CI workflow to stricter GitHub handling
Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
2026-02-22 14:12:42 +01:00
|
|
|
from akkudoktoreos.core.coreabc import get_ems
|
2024-12-15 14:40:03 +01:00
|
|
|
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
|
|
|
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
|
|
|
|
|
|
|
|
|
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
|
|
|
|
|
|
|
|
|
FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON = DIR_TESTDATA.joinpath("weatherforecast_brightsky_1.json")
|
|
|
|
|
FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON = DIR_TESTDATA.joinpath("weatherforecast_brightsky_2.json")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2025-01-18 14:26:34 +01:00
|
|
|
def provider(monkeypatch):
|
2024-12-15 14:40:03 +01:00
|
|
|
"""Fixture to create a WeatherProvider instance."""
|
2025-01-12 05:19:37 +01:00
|
|
|
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "BrightSky")
|
2025-02-12 21:35:51 +01:00
|
|
|
monkeypatch.setenv("EOS_GENERAL__LATITUDE", "50.0")
|
|
|
|
|
monkeypatch.setenv("EOS_GENERAL__LONGITUDE", "10.0")
|
2024-12-15 14:40:03 +01:00
|
|
|
return WeatherBrightSky()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
|
def sample_brightsky_1_json():
|
|
|
|
|
"""Fixture that returns sample forecast data report."""
|
2025-02-12 21:35:51 +01:00
|
|
|
with FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
|
2024-12-15 14:40:03 +01:00
|
|
|
input_data = json.load(f_res)
|
|
|
|
|
return input_data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
|
def sample_brightsky_2_json():
|
|
|
|
|
"""Fixture that returns sample forecast data report."""
|
2025-02-12 21:35:51 +01:00
|
|
|
with FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON.open("r", encoding="utf-8", newline=None) as f_res:
|
2024-12-15 14:40:03 +01:00
|
|
|
input_data = json.load(f_res)
|
|
|
|
|
return input_data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
|
def cache_store():
|
|
|
|
|
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
|
|
|
|
|
return CacheFileStore()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
# General forecast
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
2025-01-18 14:26:34 +01:00
|
|
|
def test_singleton_instance(provider):
|
2024-12-15 14:40:03 +01:00
|
|
|
"""Test that WeatherForecast behaves as a singleton."""
|
|
|
|
|
another_instance = WeatherBrightSky()
|
2025-01-18 14:26:34 +01:00
|
|
|
assert provider is another_instance
|
2024-12-15 14:40:03 +01:00
|
|
|
|
|
|
|
|
|
2025-01-18 14:26:34 +01:00
|
|
|
def test_invalid_provider(provider, monkeypatch):
|
|
|
|
|
"""Test requesting an unsupported provider."""
|
2025-01-12 05:19:37 +01:00
|
|
|
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "<invalid>")
|
2025-01-18 14:26:34 +01:00
|
|
|
provider.config.reset_settings()
|
|
|
|
|
assert not provider.enabled()
|
2024-12-15 14:40:03 +01:00
|
|
|
|
|
|
|
|
|
2025-01-18 14:26:34 +01:00
|
|
|
def test_invalid_coordinates(provider, monkeypatch):
|
2024-12-15 14:40:03 +01:00
|
|
|
"""Test invalid coordinates raise ValueError."""
|
2025-01-20 22:58:59 +01:00
|
|
|
monkeypatch.setenv("EOS_GENERAL__LATITUDE", "1000")
|
|
|
|
|
monkeypatch.setenv("EOS_GENERAL__LONGITUDE", "1000")
|
2024-12-15 14:40:03 +01:00
|
|
|
with pytest.raises(
|
|
|
|
|
ValueError, # match="Latitude '1000' and/ or longitude `1000` out of valid range."
|
|
|
|
|
):
|
2025-01-18 14:26:34 +01:00
|
|
|
provider.config.reset_settings()
|
2024-12-15 14:40:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
# Irradiance caclulation
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
2025-01-18 14:26:34 +01:00
|
|
|
def test_irridiance_estimate_from_cloud_cover(provider):
|
2024-12-15 14:40:03 +01:00
|
|
|
"""Test cloud cover to irradiance estimation."""
|
|
|
|
|
cloud_cover_data = pd.Series(
|
|
|
|
|
data=[20, 50, 80], index=pd.date_range("2023-10-22", periods=3, freq="h")
|
|
|
|
|
)
|
|
|
|
|
|
2025-01-18 14:26:34 +01:00
|
|
|
ghi, dni, dhi = provider.estimate_irradiance_from_cloud_cover(50.0, 10.0, cloud_cover_data)
|
2024-12-15 14:40:03 +01:00
|
|
|
|
|
|
|
|
assert ghi == [0, 0, 0]
|
|
|
|
|
assert dhi == [0, 0, 0]
|
|
|
|
|
assert dni == [0, 0, 0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
# BrightSky
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("requests.get")
|
2025-01-18 14:26:34 +01:00
|
|
|
def test_request_forecast(mock_get, provider, sample_brightsky_1_json):
|
2024-12-15 14:40:03 +01:00
|
|
|
"""Test requesting forecast from BrightSky."""
|
|
|
|
|
# Mock response object
|
|
|
|
|
mock_response = Mock()
|
|
|
|
|
mock_response.status_code = 200
|
|
|
|
|
mock_response.content = json.dumps(sample_brightsky_1_json)
|
|
|
|
|
mock_get.return_value = mock_response
|
|
|
|
|
|
|
|
|
|
# Test function
|
2025-01-18 14:26:34 +01:00
|
|
|
brightsky_data = provider._request_forecast()
|
2024-12-15 14:40:03 +01:00
|
|
|
|
|
|
|
|
assert isinstance(brightsky_data, dict)
|
|
|
|
|
assert brightsky_data["weather"][0] == {
|
|
|
|
|
"timestamp": "2024-10-26T00:00:00+02:00",
|
|
|
|
|
"source_id": 46567,
|
|
|
|
|
"precipitation": 0.0,
|
|
|
|
|
"pressure_msl": 1022.9,
|
|
|
|
|
"sunshine": 0.0,
|
|
|
|
|
"temperature": 6.2,
|
|
|
|
|
"wind_direction": 40,
|
|
|
|
|
"wind_speed": 4.7,
|
|
|
|
|
"cloud_cover": 100,
|
|
|
|
|
"dew_point": 5.8,
|
|
|
|
|
"relative_humidity": 97,
|
|
|
|
|
"visibility": 140,
|
|
|
|
|
"wind_gust_direction": 70,
|
|
|
|
|
"wind_gust_speed": 11.9,
|
|
|
|
|
"condition": "dry",
|
|
|
|
|
"precipitation_probability": None,
|
|
|
|
|
"precipitation_probability_6h": None,
|
|
|
|
|
"solar": None,
|
|
|
|
|
"fallback_source_ids": {
|
|
|
|
|
"wind_gust_speed": 219419,
|
|
|
|
|
"pressure_msl": 219419,
|
|
|
|
|
"cloud_cover": 219419,
|
|
|
|
|
"wind_gust_direction": 219419,
|
|
|
|
|
"wind_direction": 219419,
|
|
|
|
|
"wind_speed": 219419,
|
|
|
|
|
"sunshine": 219419,
|
|
|
|
|
"visibility": 219419,
|
|
|
|
|
},
|
|
|
|
|
"icon": "cloudy",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("requests.get")
|
2025-01-18 14:26:34 +01:00
|
|
|
def test_update_data(mock_get, provider, sample_brightsky_1_json, cache_store):
|
2024-12-15 14:40:03 +01:00
|
|
|
"""Test fetching forecast from BrightSky."""
|
|
|
|
|
# Mock response object
|
|
|
|
|
mock_response = Mock()
|
|
|
|
|
mock_response.status_code = 200
|
|
|
|
|
mock_response.content = json.dumps(sample_brightsky_1_json)
|
|
|
|
|
mock_get.return_value = mock_response
|
|
|
|
|
|
|
|
|
|
cache_store.clear(clear_all=True)
|
|
|
|
|
|
|
|
|
|
# Call the method
|
2024-12-30 13:41:39 +01:00
|
|
|
ems_eos = get_ems()
|
2024-12-15 14:40:03 +01:00
|
|
|
ems_eos.set_start_datetime(to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin"))
|
2025-01-18 14:26:34 +01:00
|
|
|
provider.update_data(force_enable=True, force_update=True)
|
2024-12-15 14:40:03 +01:00
|
|
|
|
|
|
|
|
# Assert: Verify the result is as expected
|
|
|
|
|
mock_get.assert_called_once()
|
2025-01-22 23:47:28 +01:00
|
|
|
assert len(provider) == 50
|
2024-12-15 14:40:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
# Development BrightSky
|
|
|
|
|
# ------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
2025-02-12 21:35:51 +01:00
|
|
|
def test_brightsky_development_forecast_data(provider, config_eos, is_system_test):
|
2024-12-15 14:40:03 +01:00
|
|
|
"""Fetch data from real BrightSky server."""
|
2025-02-12 21:35:51 +01:00
|
|
|
if not is_system_test:
|
|
|
|
|
return
|
|
|
|
|
|
2024-12-15 14:40:03 +01:00
|
|
|
# Preset, as this is usually done by update_data()
|
2025-02-12 21:35:51 +01:00
|
|
|
ems_eos = get_ems()
|
|
|
|
|
ems_eos.set_start_datetime(to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin"))
|
|
|
|
|
config_eos.general.latitude = 50.0
|
|
|
|
|
config_eos.general.longitude = 10.0
|
2024-12-15 14:40:03 +01:00
|
|
|
|
2025-01-18 14:26:34 +01:00
|
|
|
brightsky_data = provider._request_forecast()
|
2024-12-15 14:40:03 +01:00
|
|
|
|
2025-02-12 21:35:51 +01:00
|
|
|
with FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON.open("w", encoding="utf-8", newline="\n") as f_out:
|
2024-12-15 14:40:03 +01:00
|
|
|
json.dump(brightsky_data, f_out, indent=4)
|
2025-01-22 23:47:28 +01:00
|
|
|
|
|
|
|
|
provider.update_data(force_enable=True, force_update=True)
|
|
|
|
|
|
|
|
|
|
with FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON.open("w", encoding="utf-8", newline="\n") as f_out:
|
|
|
|
|
f_out.write(provider.model_dump_json(indent=4))
|