Improve caching. (#431)

* Move the caching module to core.

Add an in memory cache that for caching function and method results
during an energy management run (optimization run). Two decorators
are provided for methods and functions.

* Improve the file cache store by load and save functions.

Make EOS load the cache file store on startup and save it on shutdown.
Add a cyclic task that cleans the cache file store from outdated cache files.

* Improve startup of EOSdash by EOS

Make EOS starting EOSdash adhere to path configuration given in EOS.
The whole environment from EOS is now passed to EOSdash.
Should also prevent test errors due to unwanted/ wrong config file creation.

Both servers now provide a health endpoint that can be used to detect whether
the server is running. This is also used for testing now.

* Improve startup of EOS

EOS now has got an energy management task that runs shortly after startup.
It tries to execute energy management runs with predictions newly fetched
or initialized from cached data on first run.

* Improve shutdown of EOS

EOS has now a shutdown task that shuts EOS down gracefully with some
time delay to allow REST API requests for shutdwon or restart to be fully serviced.

* Improve EMS

Add energy management task for repeated energy management controlled by
startup delay and interval configuration parameters.
Translate EnergieManagementSystem to english EnergyManagement.

* Add administration endpoints

  - endpoints to control caching from REST API.
  - endpoints to control server restart (will not work on Windows) and shutdown from REST API

* Improve doc generation

Use "\n" linenend convention also on Windows when generating doc files.
Replace Windows specific 127.0.0.1 address by standard 0.0.0.0.

* Improve test support (to be able to test caching)

  - Add system test option to pytest for running tests with "real" resources
  - Add new test fixture to start server for test class and test function
  - Make kill signal adapt to Windows/ Linux
  - Use consistently "\n" for lineends when writing text files in  doc test
  - Fix test_logging under Windows
  - Fix conftest config_default_dirs test fixture under Windows

From @Lasall

* Improve Windows support

 - Use 127.0.0.1 as default config host (model defaults) and
   addionally redirect 0.0.0.0 to localhost on Windows (because default
   config file still has 0.0.0.0).
 - Update install/startup instructions as package installation is
   required atm.

Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
Bobby Noelte
2025-02-12 21:35:51 +01:00
committed by GitHub
parent 1a2cb4d37d
commit 80bfe4d0f0
54 changed files with 3661 additions and 894 deletions

View File

@@ -1,18 +1,26 @@
import json
import logging
import os
import signal
import subprocess
import sys
import tempfile
import time
from contextlib import contextmanager
from http import HTTPStatus
from pathlib import Path
from typing import Optional
from typing import Generator, Optional, Union
from unittest.mock import PropertyMock, patch
import pendulum
import psutil
import pytest
from xprocess import ProcessStarter
import requests
from xprocess import ProcessStarter, XProcess
from akkudoktoreos.config.config import ConfigEOS, get_config
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.server.server import get_default_host
logger = get_logger(__name__)
@@ -48,6 +56,12 @@ def pytest_addoption(parser):
default=False,
help="Verify that user config file is non-existent (will also fail if user config file exists before test run).",
)
parser.addoption(
"--system-test",
action="store_true",
default=False,
help="System test mode. Tests may access real resources, like prediction providers!",
)
@pytest.fixture
@@ -64,6 +78,18 @@ def config_mixin(config_eos):
yield config_mixin_patch
@pytest.fixture
def is_system_test(request):
yield bool(request.config.getoption("--system-test"))
@pytest.fixture
def prediction_eos():
from akkudoktoreos.prediction.prediction import get_prediction
return get_prediction()
@pytest.fixture
def devices_eos(config_mixin):
from akkudoktoreos.devices.devices import get_devices
@@ -87,13 +113,33 @@ def devices_mixin(devices_eos):
# Before activating, make sure that no user config file exists (e.g. ~/.config/net.akkudoktoreos.eos/EOS.config.json)
@pytest.fixture(autouse=True)
def cfg_non_existent(request):
yield
if bool(request.config.getoption("--check-config-side-effect")):
from platformdirs import user_config_dir
if not bool(request.config.getoption("--check-config-side-effect")):
yield
return
user_dir = user_config_dir(ConfigEOS.APP_NAME)
assert not Path(user_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME).exists()
assert not Path.cwd().joinpath(ConfigEOS.CONFIG_FILE_NAME).exists()
# Before test
from platformdirs import user_config_dir
user_dir = user_config_dir(ConfigEOS.APP_NAME)
user_config_file = Path(user_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME)
cwd_config_file = Path.cwd().joinpath(ConfigEOS.CONFIG_FILE_NAME)
assert (
not user_config_file.exists()
), f"Config file {user_config_file} exists, please delete before test!"
assert (
not cwd_config_file.exists()
), f"Config file {cwd_config_file} exists, please delete before test!"
# Yield to test
yield
# After test
assert (
not user_config_file.exists()
), f"Config file {user_config_file} created, please check test!"
assert (
not cwd_config_file.exists()
), f"Config file {cwd_config_file} created, please check test!"
@pytest.fixture(autouse=True)
@@ -149,52 +195,252 @@ def config_eos(
assert config_file.exists()
assert not config_file_cwd.exists()
assert config_default_dirs[-1] / "data" == config_eos.general.data_folder_path
assert config_default_dirs[-1] / "data/cache" == config_eos.general.data_cache_path
assert config_default_dirs[-1] / "data/cache" == config_eos.cache.path()
assert config_default_dirs[-1] / "data/output" == config_eos.general.data_output_path
return config_eos
@pytest.fixture
def config_default_dirs():
def config_default_dirs(tmpdir):
"""Fixture that provides a list of directories to be used as config dir."""
with tempfile.TemporaryDirectory() as tmp_user_home_dir:
# Default config directory from platform user config directory
config_default_dir_user = Path(tmp_user_home_dir) / "config"
tmp_user_home_dir = Path(tmpdir)
# Default config directory from current working directory
config_default_dir_cwd = Path(tmp_user_home_dir) / "cwd"
config_default_dir_cwd.mkdir()
# Default config directory from platform user config directory
config_default_dir_user = tmp_user_home_dir / "config"
# Default config directory from default config file
config_default_dir_default = Path(__file__).parent.parent.joinpath("src/akkudoktoreos/data")
# Default config directory from current working directory
config_default_dir_cwd = tmp_user_home_dir / "cwd"
config_default_dir_cwd.mkdir()
# Default data directory from platform user data directory
data_default_dir_user = Path(tmp_user_home_dir)
yield (
config_default_dir_user,
config_default_dir_cwd,
config_default_dir_default,
data_default_dir_user,
)
# Default config directory from default config file
config_default_dir_default = Path(__file__).parent.parent.joinpath("src/akkudoktoreos/data")
# Default data directory from platform user data directory
data_default_dir_user = tmp_user_home_dir
return (
config_default_dir_user,
config_default_dir_cwd,
config_default_dir_default,
data_default_dir_user,
)
@contextmanager
def server_base(xprocess: XProcess) -> Generator[dict[str, Union[str, int]], None, None]:
"""Fixture to start the server with temporary EOS_DIR and default config.
Args:
xprocess (XProcess): The pytest-xprocess fixture to manage the server process.
Yields:
dict[str, str]: A dictionary containing:
- "server" (str): URL of the server.
- "eos_dir" (str): Path to the temporary EOS_DIR.
"""
host = get_default_host()
port = 8503
eosdash_port = 8504
# Port of server may be still blocked by a server usage despite the other server already
# shut down. CLOSE_WAIT, TIME_WAIT may typically take up to 120 seconds.
server_timeout = 120
server = f"http://{host}:{port}"
eosdash_server = f"http://{host}:{eosdash_port}"
eos_tmp_dir = tempfile.TemporaryDirectory()
eos_dir = str(eos_tmp_dir.name)
class Starter(ProcessStarter):
# assure server to be installed
try:
project_dir = Path(__file__).parent.parent
subprocess.run(
[sys.executable, "-c", "import", "akkudoktoreos.server.eos"],
check=True,
env=os.environ,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=project_dir,
)
except subprocess.CalledProcessError:
subprocess.run(
[sys.executable, "-m", "pip", "install", "-e", str(project_dir)],
env=os.environ,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=project_dir,
)
# Set environment for server run
env = os.environ.copy()
env["EOS_DIR"] = eos_dir
env["EOS_CONFIG_DIR"] = eos_dir
# command to start server process
args = [
sys.executable,
"-m",
"akkudoktoreos.server.eos",
"--host",
host,
"--port",
str(port),
]
# Will wait for 'server_timeout' seconds before timing out
timeout = server_timeout
# xprocess will now attempt to clean up upon interruptions
terminate_on_interrupt = True
# checks if our server is ready
def startup_check(self):
try:
result = requests.get(f"{server}/v1/health", timeout=2)
if result.status_code == 200:
return True
except:
pass
return False
def cleanup_eos_eosdash():
# Cleanup any EOS process left.
if os.name == "nt":
# Windows does not provide SIGKILL
sigkill = signal.SIGTERM
else:
sigkill = signal.SIGKILL
# - Use pid on EOS health endpoint
try:
result = requests.get(f"{server}/v1/health", timeout=2)
if result.status_code == HTTPStatus.OK:
pid = result.json()["pid"]
os.kill(pid, sigkill)
time.sleep(1)
result = requests.get(f"{server}/v1/health", timeout=2)
assert result.status_code != HTTPStatus.OK
except:
pass
# - Use pids from processes on EOS port
for retries in range(int(server_timeout / 3)):
pids: list[int] = []
for conn in psutil.net_connections(kind="inet"):
if conn.laddr.port == port:
if conn.pid not in pids:
# Get fresh process info
try:
process = psutil.Process(conn.pid)
process_info = process.as_dict(attrs=["pid", "cmdline"])
if "akkudoktoreos.server.eos" in process_info["cmdline"]:
pids.append(conn.pid)
except:
# PID may already be dead
pass
for pid in pids:
os.kill(pid, sigkill)
if len(pids) == 0:
break
time.sleep(3)
assert len(pids) == 0
# Cleanup any EOSdash processes left.
# - Use pid on EOSdash health endpoint
try:
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
if result.status_code == HTTPStatus.OK:
pid = result.json()["pid"]
os.kill(pid, sigkill)
time.sleep(1)
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
assert result.status_code != HTTPStatus.OK
except:
pass
# - Use pids from processes on EOSdash port
for retries in range(int(server_timeout / 3)):
pids = []
for conn in psutil.net_connections(kind="inet"):
if conn.laddr.port == eosdash_port:
if conn.pid not in pids:
# Get fresh process info
try:
process = psutil.Process(conn.pid)
process_info = process.as_dict(attrs=["pid", "cmdline"])
if "akkudoktoreos.server.eosdash" in process_info["cmdline"]:
pids.append(conn.pid)
except:
# PID may already be dead
pass
for pid in pids:
os.kill(pid, sigkill)
if len(pids) == 0:
break
time.sleep(3)
assert len(pids) == 0
# Kill all running eos and eosdash process - just to be sure
cleanup_eos_eosdash()
# Ensure there is an empty config file in the temporary EOS directory
config_file_path = Path(eos_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME)
with config_file_path.open(mode="w", encoding="utf-8", newline="\n") as fd:
json.dump({}, fd)
# ensure process is running and return its logfile
pid, logfile = xprocess.ensure("eos", Starter)
logger.info(f"Started EOS ({pid}). This may take very long (up to {server_timeout} seconds).")
logger.info(f"View xprocess logfile at: {logfile}")
yield {
"server": server,
"eosdash_server": eosdash_server,
"eos_dir": eos_dir,
"timeout": server_timeout,
}
# clean up whole process tree afterwards
xprocess.getinfo("eos").terminate()
# Cleanup any EOS process left.
cleanup_eos_eosdash()
# Remove temporary EOS_DIR
eos_tmp_dir.cleanup()
@pytest.fixture(scope="class")
def server_setup_for_class(xprocess) -> Generator[dict[str, Union[str, int]], None, None]:
"""A fixture to start the server for a test class."""
with server_base(xprocess) as result:
yield result
@pytest.fixture(scope="function")
def server_setup_for_function(xprocess) -> Generator[dict[str, Union[str, int]], None, None]:
"""A fixture to start the server for a test function."""
with server_base(xprocess) as result:
yield result
@pytest.fixture
def server(xprocess, config_eos, config_default_dirs):
def server(xprocess, config_eos, config_default_dirs) -> Generator[str, None, None]:
"""Fixture to start the server.
Provides URL of the server.
"""
# create url/port info to the server
url = "http://0.0.0.0:8503"
class Starter(ProcessStarter):
# Set environment before any subprocess run, to keep custom config dir
env = os.environ.copy()
env["EOS_DIR"] = str(config_default_dirs[-1])
project_dir = config_eos.package_root_path
project_dir = config_eos.package_root_path.parent.parent
# assure server to be installed
try:
subprocess.run(
[sys.executable, "-c", "import akkudoktoreos.server.eos"],
[sys.executable, "-c", "import", "akkudoktoreos.server.eos"],
check=True,
env=env,
stdout=subprocess.PIPE,
@@ -203,7 +449,7 @@ def server(xprocess, config_eos, config_default_dirs):
)
except subprocess.CalledProcessError:
subprocess.run(
[sys.executable, "-m", "pip", "install", "-e", project_dir],
[sys.executable, "-m", "pip", "install", "-e", str(project_dir)],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
@@ -212,24 +458,26 @@ def server(xprocess, config_eos, config_default_dirs):
# command to start server process
args = [sys.executable, "-m", "akkudoktoreos.server.eos"]
# startup pattern
pattern = "Application startup complete."
# search this number of lines for the startup pattern, if not found
# a RuntimeError will be raised informing the user
max_read_lines = 30
# will wait for 30 seconds before timing out
timeout = 30
# will wait for xx seconds before timing out
timeout = 10
# xprocess will now attempt to clean up upon interruptions
terminate_on_interrupt = True
# checks if our server is ready
def startup_check(self):
try:
result = requests.get(f"{url}/v1/health")
if result.status_code == 200:
return True
except:
pass
return False
# ensure process is running and return its logfile
pid, logfile = xprocess.ensure("eos", Starter)
print(f"View xprocess logfile at: {logfile}")
# create url/port info to the server
url = "http://127.0.0.1:8503"
yield url
# clean up whole process tree afterwards

694
tests/test_cache.py Normal file
View File

@@ -0,0 +1,694 @@
import io
import json
import pickle
import tempfile
from datetime import date, datetime, timedelta
from pathlib import Path
from time import sleep
from unittest.mock import MagicMock, patch
import cachebox
import pytest
from akkudoktoreos.core.cache import (
CacheFileRecord,
CacheFileStore,
CacheUntilUpdateStore,
cache_in_file,
cache_until_update,
cachemethod_until_update,
)
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
# ---------------------------------
# In-Memory Caching Functionality
# ---------------------------------
# Fixtures for testing
@pytest.fixture
def cache_until_update_store():
"""Ensures CacheUntilUpdateStore is reset between tests."""
cache = CacheUntilUpdateStore()
CacheUntilUpdateStore().clear()
assert len(cache) == 0
return cache
class TestCacheUntilUpdateStore:
def test_cache_initialization(self, cache_until_update_store):
"""Test that CacheUntilUpdateStore initializes with the correct properties."""
cache = CacheUntilUpdateStore()
assert isinstance(cache.cache, cachebox.LRUCache)
assert cache.maxsize == 100
assert len(cache) == 0
def test_singleton_behavior(self, cache_until_update_store):
"""Test that CacheUntilUpdateStore is a singleton."""
cache1 = CacheUntilUpdateStore()
cache2 = CacheUntilUpdateStore()
assert cache1 is cache2
def test_cache_storage(self, cache_until_update_store):
"""Test that items can be added and retrieved from the cache."""
cache = CacheUntilUpdateStore()
cache["key1"] = "value1"
assert cache["key1"] == "value1"
assert len(cache) == 1
def test_cache_getattr_invalid_method(self, cache_until_update_store):
"""Test that accessing an invalid method raises an AttributeError."""
with pytest.raises(AttributeError):
CacheUntilUpdateStore().non_existent_method() # This should raise AttributeError
class TestCacheUntilUpdateDecorators:
def test_cachemethod_until_update(self, cache_until_update_store):
"""Test that cachemethod_until_update caches method results."""
class MyClass:
@cachemethod_until_update
def compute(self, value: int) -> int:
return value * 2
obj = MyClass()
# Call method and assert caching
assert CacheUntilUpdateStore.miss_count == 0
assert CacheUntilUpdateStore.hit_count == 0
result1 = obj.compute(5)
assert CacheUntilUpdateStore.miss_count == 1
assert CacheUntilUpdateStore.hit_count == 0
result2 = obj.compute(5)
assert CacheUntilUpdateStore.miss_count == 1
assert CacheUntilUpdateStore.hit_count == 1
assert result1 == result2
def test_cache_until_update(self, cache_until_update_store):
"""Test that cache_until_update caches function results."""
@cache_until_update
def compute(value: int) -> int:
return value * 3
# Call function and assert caching
result1 = compute(4)
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_MISS
result2 = compute(4)
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_HIT
assert result1 == result2
def test_cache_with_different_arguments(self, cache_until_update_store):
"""Test that caching works for different arguments."""
class MyClass:
@cachemethod_until_update
def compute(self, value: int) -> int:
return value * 2
obj = MyClass()
assert CacheUntilUpdateStore.miss_count == 0
result1 = obj.compute(3)
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_MISS
assert CacheUntilUpdateStore.miss_count == 1
result2 = obj.compute(5)
assert CacheUntilUpdateStore.last_event == cachebox.EVENT_MISS
assert CacheUntilUpdateStore.miss_count == 2
assert result1 == 6
assert result2 == 10
def test_cache_clearing(self, cache_until_update_store):
"""Test that cache is cleared between EMS update cycles."""
class MyClass:
@cachemethod_until_update
def compute(self, value: int) -> int:
return value * 2
obj = MyClass()
obj.compute(5)
# Clear cache
CacheUntilUpdateStore().clear()
with pytest.raises(KeyError):
_ = CacheUntilUpdateStore()["<invalid>"]
def test_decorator_works_for_standalone_function(self, cache_until_update_store):
"""Test that cache_until_update works with standalone functions."""
@cache_until_update
def add(a: int, b: int) -> int:
return a + b
assert CacheUntilUpdateStore.miss_count == 0
assert CacheUntilUpdateStore.hit_count == 0
result1 = add(1, 2)
assert CacheUntilUpdateStore.miss_count == 1
assert CacheUntilUpdateStore.hit_count == 0
result2 = add(1, 2)
assert CacheUntilUpdateStore.miss_count == 1
assert CacheUntilUpdateStore.hit_count == 1
assert result1 == result2
# -----------------------------
# CacheFileStore
# -----------------------------
@pytest.fixture
def temp_store_file():
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
yield Path(temp_file.file.name)
# temp_file.unlink()
@pytest.fixture
def cache_file_store(temp_store_file):
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
cache = CacheFileStore()
cache._store_file = temp_store_file
cache.clear(clear_all=True)
assert len(cache._store) == 0
return cache
class TestCacheFileStore:
def test_generate_cache_file_key(self, cache_file_store):
"""Test cache file key generation based on URL and date."""
key = "http://example.com"
# Provide until date - assure until_dt is used.
until_dt = to_datetime("2024-10-01")
cache_file_key, cache_file_until_dt, ttl_duration = (
cache_file_store._generate_cache_file_key(key=key, until_datetime=until_dt)
)
assert cache_file_key is not None
assert compare_datetimes(cache_file_until_dt, until_dt).equal
# Provide until date again - assure same key is generated.
cache_file_key1, cache_file_until_dt1, ttl_duration1 = (
cache_file_store._generate_cache_file_key(key=key, until_datetime=until_dt)
)
assert cache_file_key1 == cache_file_key
assert compare_datetimes(cache_file_until_dt1, until_dt).equal
# Provide no until date - assure today EOD is used.
no_until_dt = to_datetime().end_of("day")
cache_file_key, cache_file_until_dt, ttl_duration = (
cache_file_store._generate_cache_file_key(key)
)
assert cache_file_key is not None
assert compare_datetimes(cache_file_until_dt, no_until_dt).equal
# Provide with_ttl - assure until_dt is used.
until_dt = to_datetime().add(hours=1)
cache_file_key, cache_file_until_dt, ttl_duration = (
cache_file_store._generate_cache_file_key(key, with_ttl="1 hour")
)
assert cache_file_key is not None
assert compare_datetimes(cache_file_until_dt, until_dt).approximately_equal
assert ttl_duration == to_duration("1 hour")
# Provide with_ttl again - assure same key is generated.
until_dt = to_datetime().add(hours=1)
cache_file_key1, cache_file_until_dt1, ttl_duration1 = (
cache_file_store._generate_cache_file_key(key=key, with_ttl="1 hour")
)
assert cache_file_key1 == cache_file_key
assert compare_datetimes(cache_file_until_dt1, until_dt).approximately_equal
assert ttl_duration1 == to_duration("1 hour")
# Provide different with_ttl - assure different key is generated.
until_dt = to_datetime().add(hours=1, minutes=1)
cache_file_key2, cache_file_until_dt2, ttl_duration2 = (
cache_file_store._generate_cache_file_key(key=key, with_ttl="1 hour 1 minute")
)
assert cache_file_key2 != cache_file_key
assert compare_datetimes(cache_file_until_dt2, until_dt).approximately_equal
assert ttl_duration2 == to_duration("1 hour 1 minute")
def test_get_file_path(self, cache_file_store):
"""Test get file path from cache file object."""
cache_file = cache_file_store.create("test_file", mode="w+", suffix=".txt")
file_path = cache_file_store._get_file_path(cache_file)
assert file_path is not None
def test_until_datetime_by_options(self, cache_file_store):
"""Test until datetime calculation based on options."""
now = to_datetime()
# Test with until_datetime
result, ttl_duration = cache_file_store._until_datetime_by_options(until_datetime=now)
assert result == now
assert ttl_duration is None
# -- From now on we expect a until_datetime in one hour
ttl_duration_expected = to_duration("1 hour")
# Test with with_ttl as timedelta
until_datetime_expected = to_datetime().add(hours=1)
ttl = timedelta(hours=1)
result, ttl_duration = cache_file_store._until_datetime_by_options(with_ttl=ttl)
assert compare_datetimes(result, until_datetime_expected).approximately_equal
assert ttl_duration == ttl_duration_expected
# Test with with_ttl as int (seconds)
until_datetime_expected = to_datetime().add(hours=1)
ttl_seconds = 3600
result, ttl_duration = cache_file_store._until_datetime_by_options(with_ttl=ttl_seconds)
assert compare_datetimes(result, until_datetime_expected).approximately_equal
assert ttl_duration == ttl_duration_expected
# Test with with_ttl as string ("1 hour")
until_datetime_expected = to_datetime().add(hours=1)
ttl_string = "1 hour"
result, ttl_duration = cache_file_store._until_datetime_by_options(with_ttl=ttl_string)
assert compare_datetimes(result, until_datetime_expected).approximately_equal
assert ttl_duration == ttl_duration_expected
# -- From now on we expect a until_datetime today at end of day
until_datetime_expected = to_datetime().end_of("day")
ttl_duration_expected = None
# Test default case (end of today)
result, ttl_duration = cache_file_store._until_datetime_by_options()
assert compare_datetimes(result, until_datetime_expected).equal
assert ttl_duration == ttl_duration_expected
# -- From now on we expect a until_datetime in one day at end of day
until_datetime_expected = to_datetime().add(days=1).end_of("day")
assert ttl_duration == ttl_duration_expected
# Test with until_date as date
until_date = date.today() + timedelta(days=1)
result, ttl_duration = cache_file_store._until_datetime_by_options(until_date=until_date)
assert compare_datetimes(result, until_datetime_expected).equal
assert ttl_duration == ttl_duration_expected
# -- Test with multiple options (until_datetime takes precedence)
specific_datetime = to_datetime().add(days=2)
result, ttl_duration = cache_file_store._until_datetime_by_options(
until_date=to_datetime().add(days=1).date(),
until_datetime=specific_datetime,
with_ttl=ttl,
)
assert compare_datetimes(result, specific_datetime).equal
assert ttl_duration is None
# Test with invalid inputs
with pytest.raises(ValueError):
cache_file_store._until_datetime_by_options(until_date="invalid-date")
with pytest.raises(ValueError):
cache_file_store._until_datetime_by_options(with_ttl="invalid-ttl")
with pytest.raises(ValueError):
cache_file_store._until_datetime_by_options(until_datetime="invalid-datetime")
def test_create_cache_file(self, cache_file_store):
"""Test the creation of a cache file and ensure it is stored correctly."""
# Create a cache file for today's date
cache_file = cache_file_store.create("test_file", mode="w+", suffix=".txt")
# Check that the file exists in the store and is a file-like object
assert cache_file is not None
assert hasattr(cache_file, "name")
assert cache_file.name.endswith(".txt")
# Write some data to the file
cache_file.seek(0)
cache_file.write("Test data")
cache_file.seek(0) # Reset file pointer
assert cache_file.read() == "Test data"
def test_get_cache_file(self, cache_file_store):
"""Test retrieving an existing cache file by key."""
# Create a cache file and write data to it
cache_file = cache_file_store.create("test_file", mode="w+")
cache_file.seek(0)
cache_file.write("Test data")
cache_file.seek(0)
# Retrieve the cache file and verify the data
retrieved_file = cache_file_store.get("test_file")
assert retrieved_file is not None
retrieved_file.seek(0)
assert retrieved_file.read() == "Test data"
def test_set_custom_file_object(self, cache_file_store):
"""Test setting a custom file-like object (BytesIO or StringIO) in the store."""
# Create a BytesIO object and set it into the cache
file_obj = io.BytesIO(b"Binary data")
cache_file_store.set("binary_file", file_obj)
# Retrieve the file from the store
retrieved_file = cache_file_store.get("binary_file")
assert isinstance(retrieved_file, io.BytesIO)
retrieved_file.seek(0)
assert retrieved_file.read() == b"Binary data"
def test_delete_cache_file(self, cache_file_store):
"""Test deleting a cache file from the store."""
# Create multiple cache files
cache_file1 = cache_file_store.create("file1")
assert hasattr(cache_file1, "name")
cache_file2 = cache_file_store.create("file2")
assert hasattr(cache_file2, "name")
# Ensure the files are in the store
assert cache_file_store.get("file1") is cache_file1
assert cache_file_store.get("file2") is cache_file2
# Delete cache files
cache_file_store.delete("file1")
cache_file_store.delete("file2")
# Ensure the store is empty
assert cache_file_store.get("file1") is None
assert cache_file_store.get("file2") is None
def test_clear_all_cache_files(self, cache_file_store):
"""Test clearing all cache files from the store."""
# Create multiple cache files
cache_file1 = cache_file_store.create("file1")
assert hasattr(cache_file1, "name")
cache_file2 = cache_file_store.create("file2")
assert hasattr(cache_file2, "name")
# Ensure the files are in the store
assert cache_file_store.get("file1") is cache_file1
assert cache_file_store.get("file2") is cache_file2
current_store = cache_file_store.current_store()
assert current_store != {}
# Clear all cache files
cache_file_store.clear(clear_all=True)
# Ensure the store is empty
assert cache_file_store.get("file1") is None
assert cache_file_store.get("file2") is None
current_store = cache_file_store.current_store()
assert current_store == {}
def test_clear_cache_files_by_date(self, cache_file_store):
"""Test clearing cache files from the store by date."""
# Create multiple cache files
cache_file1 = cache_file_store.create("file1")
assert hasattr(cache_file1, "name")
cache_file2 = cache_file_store.create("file2")
assert hasattr(cache_file2, "name")
# Ensure the files are in the store
assert cache_file_store.get("file1") is cache_file1
assert cache_file_store.get("file2") is cache_file2
# Clear cache files that are older than today
cache_file_store.clear(before_datetime=to_datetime().start_of("day"))
# Ensure the files are in the store
assert cache_file_store.get("file1") is cache_file1
assert cache_file_store.get("file2") is cache_file2
# Clear cache files that are older than tomorrow
cache_file_store.clear(before_datetime=datetime.now() + timedelta(days=1))
# Ensure the store is empty
assert cache_file_store.get("file1") is None
assert cache_file_store.get("file2") is None
def test_cache_file_with_date(self, cache_file_store):
"""Test creating and retrieving cache files with a specific date."""
# Use a specific date for cache file creation
specific_date = datetime(2023, 10, 10)
cache_file = cache_file_store.create("dated_file", mode="w+", until_date=specific_date)
# Write data to the cache file
cache_file.write("Dated data")
cache_file.seek(0)
# Retrieve the cache file with the specific date
retrieved_file = cache_file_store.get("dated_file", until_date=specific_date)
assert retrieved_file is not None
retrieved_file.seek(0)
assert retrieved_file.read() == "Dated data"
def test_recreate_existing_cache_file(self, cache_file_store):
"""Test creating a cache file with an existing key does not overwrite the existing file."""
# Create a cache file
cache_file = cache_file_store.create("test_file", mode="w+")
cache_file.write("Original data")
cache_file.seek(0)
# Attempt to recreate the same file (should return the existing one)
new_file = cache_file_store.create("test_file")
assert new_file is cache_file # Should be the same object
new_file.seek(0)
assert new_file.read() == "Original data" # Data should be preserved
# Assure cache file store is a singleton
cache_file_store2 = CacheFileStore()
new_file = cache_file_store2.get("test_file")
assert new_file is cache_file # Should be the same object
def test_cache_file_store_is_singleton(self, cache_file_store):
"""Test re-creating a cache store provides the same store."""
# Create a cache file
cache_file = cache_file_store.create("test_file", mode="w+")
cache_file.write("Original data")
cache_file.seek(0)
# Assure cache file store is a singleton
cache_file_store2 = CacheFileStore()
new_file = cache_file_store2.get("test_file")
assert new_file is cache_file # Should be the same object
def test_cache_file_store_save_store(self, cache_file_store):
# Creating a sample cache record
cache_file = MagicMock()
cache_file.name = "cache_file_path"
cache_file.mode = "wb+"
cache_record = CacheFileRecord(
cache_file=cache_file, until_datetime=to_datetime(), ttl_duration=None
)
cache_file_store._store = {"test_key": cache_record}
# Save the store to the file
cache_file_store.save_store()
# Verify the file content
with cache_file_store._store_file.open("r", encoding="utf-8", newline=None) as f:
store_loaded = json.load(f)
assert "test_key" in store_loaded
assert store_loaded["test_key"]["cache_file"] == "cache_file_path"
assert store_loaded["test_key"]["mode"] == "wb+"
assert store_loaded["test_key"]["until_datetime"] == to_datetime(
cache_record.until_datetime, as_string=True
)
assert store_loaded["test_key"]["ttl_duration"] is None
def test_cache_file_store_load_store(self, cache_file_store):
# Creating a sample cache record and save it to the file
cache_record = {
"test_key": {
"cache_file": "cache_file_path",
"mode": "wb+",
"until_datetime": to_datetime(as_string=True),
"ttl_duration": None,
}
}
with cache_file_store._store_file.open("w", encoding="utf-8", newline="\n") as f:
json.dump(cache_record, f, indent=4)
# Mock the open function to return a MagicMock for the cache file
with patch("builtins.open", new_callable=MagicMock) as mock_open:
mock_open.return_value.name = "cache_file_path"
mock_open.return_value.mode = "wb+"
# Load the store from the file
cache_file_store.load_store()
# Verify the loaded store
assert "test_key" in cache_file_store._store
loaded_record = cache_file_store._store["test_key"]
assert loaded_record.cache_file.name == "cache_file_path"
assert loaded_record.cache_file.mode == "wb+"
assert loaded_record.until_datetime == to_datetime(
cache_record["test_key"]["until_datetime"]
)
assert loaded_record.ttl_duration is None
class TestCacheFileDecorators:
def test_cache_in_file_decorator_caches_function_result(self, cache_file_store):
"""Test that the cache_in_file decorator caches a function result."""
# Clear store to assure it is empty
cache_file_store.clear(clear_all=True)
assert len(cache_file_store._store) == 0
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function(until_date=None):
return "Some expensive computation result"
# Call the decorated function (should store result in cache)
result = my_function(until_date=datetime.now() + timedelta(days=1))
assert result == "Some expensive computation result"
# Assert that the create method was called to store the result
assert len(cache_file_store._store) == 1
# Check if the result was written to the cache file
key = next(iter(cache_file_store._store))
cache_file = cache_file_store._store[key].cache_file
assert cache_file is not None
# Assert correct content was written to the file
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == "Some expensive computation result"
def test_cache_in_file_decorator_uses_cache(self, cache_file_store):
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
# Clear store to assure it is empty
cache_file_store.clear(clear_all=True)
assert len(cache_file_store._store) == 0
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function(until_date=None):
return "New result"
# Call the decorated function (should store result in cache)
result = my_function(until_date=to_datetime().add(days=1))
assert result == "New result"
# Assert result was written to cache file
key = next(iter(cache_file_store._store))
cache_file = cache_file_store._store[key].cache_file
assert cache_file is not None
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result
# Modify cache file
result2 = "Cached result"
cache_file.seek(0)
cache_file.write(result2)
# Call the decorated function again (should get result from cache)
result = my_function(until_date=to_datetime().add(days=1))
assert result == result2
def test_cache_in_file_decorator_forces_update_data(self, cache_file_store):
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
# Clear store to assure it is empty
cache_file_store.clear(clear_all=True)
assert len(cache_file_store._store) == 0
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function(until_date=None):
return "New result"
until_date = to_datetime().add(days=1).date()
# Call the decorated function (should store result in cache)
result1 = "New result"
result = my_function(until_date=until_date)
assert result == result1
# Assert result was written to cache file
key = next(iter(cache_file_store._store))
cache_file = cache_file_store._store[key].cache_file
assert cache_file is not None
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result
# Modify cache file
result2 = "Cached result"
cache_file.seek(0)
cache_file.write(result2)
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result2
# Call the decorated function again with force update (should get result from function)
result = my_function(until_date=until_date, force_update=True) # type: ignore[call-arg]
assert result == result1
# Assure result was written to the same cache file
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result1
def test_cache_in_file_handles_ttl(self, cache_file_store):
"""Test that the cache_infile decorator handles the with_ttl parameter."""
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function():
return "New result"
# Call the decorated function
result1 = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result1 == "New result"
assert len(cache_file_store._store) == 1
key = list(cache_file_store._store.keys())[0]
# Assert result was written to cache file
key = next(iter(cache_file_store._store))
cache_file = cache_file_store._store[key].cache_file
assert cache_file is not None
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result1
# Modify cache file
result2 = "Cached result"
cache_file.seek(0)
cache_file.write(result2)
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result2
# Call the decorated function again
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result2
assert result == result2
# Wait one second to let the cache time out
sleep(2)
# Call again - cache should be timed out
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result == result1
def test_cache_in_file_handles_bytes_return(self, cache_file_store):
"""Test that the cache_infile decorator handles bytes returned from the function."""
# Clear store to assure it is empty
cache_file_store.clear(clear_all=True)
assert len(cache_file_store._store) == 0
# Define a function that returns bytes
@cache_in_file()
def my_function(until_date=None) -> bytes:
return b"Some binary data"
# Call the decorated function
result = my_function(until_date=datetime.now() + timedelta(days=1))
# Check if the binary data was written to the cache file
key = next(iter(cache_file_store._store))
cache_file = cache_file_store._store[key].cache_file
assert len(cache_file_store._store) == 1
assert cache_file is not None
cache_file.seek(0)
result1 = pickle.load(cache_file)
assert result1 == result
# Access cache
result = my_function(until_date=datetime.now() + timedelta(days=1))
assert len(cache_file_store._store) == 1
assert cache_file_store._store[key].cache_file is not None
assert result1 == result

View File

@@ -1,491 +0,0 @@
"""Test Module for CacheFileStore Module."""
import io
import pickle
from datetime import date, datetime, timedelta
from time import sleep
import pytest
from akkudoktoreos.utils.cacheutil import CacheFileStore, cache_in_file
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
# -----------------------------
# CacheFileStore
# -----------------------------
@pytest.fixture
def cache_store():
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
cache = CacheFileStore()
cache.clear(clear_all=True)
assert len(cache._store) == 0
return cache
def test_generate_cache_file_key(cache_store):
"""Test cache file key generation based on URL and date."""
key = "http://example.com"
# Provide until date - assure until_dt is used.
until_dt = to_datetime("2024-10-01")
cache_file_key, cache_file_until_dt, ttl_duration = cache_store._generate_cache_file_key(
key=key, until_datetime=until_dt
)
assert cache_file_key is not None
assert compare_datetimes(cache_file_until_dt, until_dt).equal
# Provide until date again - assure same key is generated.
cache_file_key1, cache_file_until_dt1, ttl_duration1 = cache_store._generate_cache_file_key(
key=key, until_datetime=until_dt
)
assert cache_file_key1 == cache_file_key
assert compare_datetimes(cache_file_until_dt1, until_dt).equal
# Provide no until date - assure today EOD is used.
no_until_dt = to_datetime().end_of("day")
cache_file_key, cache_file_until_dt, ttl_duration = cache_store._generate_cache_file_key(key)
assert cache_file_key is not None
assert compare_datetimes(cache_file_until_dt, no_until_dt).equal
# Provide with_ttl - assure until_dt is used.
until_dt = to_datetime().add(hours=1)
cache_file_key, cache_file_until_dt, ttl_duration = cache_store._generate_cache_file_key(
key, with_ttl="1 hour"
)
assert cache_file_key is not None
assert compare_datetimes(cache_file_until_dt, until_dt).approximately_equal
assert ttl_duration == to_duration("1 hour")
# Provide with_ttl again - assure same key is generated.
until_dt = to_datetime().add(hours=1)
cache_file_key1, cache_file_until_dt1, ttl_duration1 = cache_store._generate_cache_file_key(
key=key, with_ttl="1 hour"
)
assert cache_file_key1 == cache_file_key
assert compare_datetimes(cache_file_until_dt1, until_dt).approximately_equal
assert ttl_duration1 == to_duration("1 hour")
# Provide different with_ttl - assure different key is generated.
until_dt = to_datetime().add(hours=1, minutes=1)
cache_file_key2, cache_file_until_dt2, ttl_duration2 = cache_store._generate_cache_file_key(
key=key, with_ttl="1 hour 1 minute"
)
assert cache_file_key2 != cache_file_key
assert compare_datetimes(cache_file_until_dt2, until_dt).approximately_equal
assert ttl_duration2 == to_duration("1 hour 1 minute")
def test_get_file_path(cache_store):
"""Test get file path from cache file object."""
cache_file = cache_store.create("test_file", mode="w+", suffix=".txt")
file_path = cache_store._get_file_path(cache_file)
assert file_path is not None
def test_until_datetime_by_options(cache_store):
"""Test until datetime calculation based on options."""
now = to_datetime()
# Test with until_datetime
result, ttl_duration = cache_store._until_datetime_by_options(until_datetime=now)
assert result == now
assert ttl_duration is None
# -- From now on we expect a until_datetime in one hour
ttl_duration_expected = to_duration("1 hour")
# Test with with_ttl as timedelta
until_datetime_expected = to_datetime().add(hours=1)
ttl = timedelta(hours=1)
result, ttl_duration = cache_store._until_datetime_by_options(with_ttl=ttl)
assert compare_datetimes(result, until_datetime_expected).approximately_equal
assert ttl_duration == ttl_duration_expected
# Test with with_ttl as int (seconds)
until_datetime_expected = to_datetime().add(hours=1)
ttl_seconds = 3600
result, ttl_duration = cache_store._until_datetime_by_options(with_ttl=ttl_seconds)
assert compare_datetimes(result, until_datetime_expected).approximately_equal
assert ttl_duration == ttl_duration_expected
# Test with with_ttl as string ("1 hour")
until_datetime_expected = to_datetime().add(hours=1)
ttl_string = "1 hour"
result, ttl_duration = cache_store._until_datetime_by_options(with_ttl=ttl_string)
assert compare_datetimes(result, until_datetime_expected).approximately_equal
assert ttl_duration == ttl_duration_expected
# -- From now on we expect a until_datetime today at end of day
until_datetime_expected = to_datetime().end_of("day")
ttl_duration_expected = None
# Test default case (end of today)
result, ttl_duration = cache_store._until_datetime_by_options()
assert compare_datetimes(result, until_datetime_expected).equal
assert ttl_duration == ttl_duration_expected
# -- From now on we expect a until_datetime in one day at end of day
until_datetime_expected = to_datetime().add(days=1).end_of("day")
assert ttl_duration == ttl_duration_expected
# Test with until_date as date
until_date = date.today() + timedelta(days=1)
result, ttl_duration = cache_store._until_datetime_by_options(until_date=until_date)
assert compare_datetimes(result, until_datetime_expected).equal
assert ttl_duration == ttl_duration_expected
# -- Test with multiple options (until_datetime takes precedence)
specific_datetime = to_datetime().add(days=2)
result, ttl_duration = cache_store._until_datetime_by_options(
until_date=to_datetime().add(days=1).date(),
until_datetime=specific_datetime,
with_ttl=ttl,
)
assert compare_datetimes(result, specific_datetime).equal
assert ttl_duration is None
# Test with invalid inputs
with pytest.raises(ValueError):
cache_store._until_datetime_by_options(until_date="invalid-date")
with pytest.raises(ValueError):
cache_store._until_datetime_by_options(with_ttl="invalid-ttl")
with pytest.raises(ValueError):
cache_store._until_datetime_by_options(until_datetime="invalid-datetime")
def test_create_cache_file(cache_store):
"""Test the creation of a cache file and ensure it is stored correctly."""
# Create a cache file for today's date
cache_file = cache_store.create("test_file", mode="w+", suffix=".txt")
# Check that the file exists in the store and is a file-like object
assert cache_file is not None
assert hasattr(cache_file, "name")
assert cache_file.name.endswith(".txt")
# Write some data to the file
cache_file.seek(0)
cache_file.write("Test data")
cache_file.seek(0) # Reset file pointer
assert cache_file.read() == "Test data"
def test_get_cache_file(cache_store):
"""Test retrieving an existing cache file by key."""
# Create a cache file and write data to it
cache_file = cache_store.create("test_file", mode="w+")
cache_file.seek(0)
cache_file.write("Test data")
cache_file.seek(0)
# Retrieve the cache file and verify the data
retrieved_file = cache_store.get("test_file")
assert retrieved_file is not None
retrieved_file.seek(0)
assert retrieved_file.read() == "Test data"
def test_set_custom_file_object(cache_store):
"""Test setting a custom file-like object (BytesIO or StringIO) in the store."""
# Create a BytesIO object and set it into the cache
file_obj = io.BytesIO(b"Binary data")
cache_store.set("binary_file", file_obj)
# Retrieve the file from the store
retrieved_file = cache_store.get("binary_file")
assert isinstance(retrieved_file, io.BytesIO)
retrieved_file.seek(0)
assert retrieved_file.read() == b"Binary data"
def test_delete_cache_file(cache_store):
"""Test deleting a cache file from the store."""
# Create multiple cache files
cache_file1 = cache_store.create("file1")
assert hasattr(cache_file1, "name")
cache_file2 = cache_store.create("file2")
assert hasattr(cache_file2, "name")
# Ensure the files are in the store
assert cache_store.get("file1") is cache_file1
assert cache_store.get("file2") is cache_file2
# Delete cache files
cache_store.delete("file1")
cache_store.delete("file2")
# Ensure the store is empty
assert cache_store.get("file1") is None
assert cache_store.get("file2") is None
def test_clear_all_cache_files(cache_store):
"""Test clearing all cache files from the store."""
# Create multiple cache files
cache_file1 = cache_store.create("file1")
assert hasattr(cache_file1, "name")
cache_file2 = cache_store.create("file2")
assert hasattr(cache_file2, "name")
# Ensure the files are in the store
assert cache_store.get("file1") is cache_file1
assert cache_store.get("file2") is cache_file2
# Clear all cache files
cache_store.clear(clear_all=True)
# Ensure the store is empty
assert cache_store.get("file1") is None
assert cache_store.get("file2") is None
def test_clear_cache_files_by_date(cache_store):
"""Test clearing cache files from the store by date."""
# Create multiple cache files
cache_file1 = cache_store.create("file1")
assert hasattr(cache_file1, "name")
cache_file2 = cache_store.create("file2")
assert hasattr(cache_file2, "name")
# Ensure the files are in the store
assert cache_store.get("file1") is cache_file1
assert cache_store.get("file2") is cache_file2
# Clear cache files that are older than today
cache_store.clear(before_datetime=to_datetime().start_of("day"))
# Ensure the files are in the store
assert cache_store.get("file1") is cache_file1
assert cache_store.get("file2") is cache_file2
# Clear cache files that are older than tomorrow
cache_store.clear(before_datetime=datetime.now() + timedelta(days=1))
# Ensure the store is empty
assert cache_store.get("file1") is None
assert cache_store.get("file2") is None
def test_cache_file_with_date(cache_store):
"""Test creating and retrieving cache files with a specific date."""
# Use a specific date for cache file creation
specific_date = datetime(2023, 10, 10)
cache_file = cache_store.create("dated_file", mode="w+", until_date=specific_date)
# Write data to the cache file
cache_file.write("Dated data")
cache_file.seek(0)
# Retrieve the cache file with the specific date
retrieved_file = cache_store.get("dated_file", until_date=specific_date)
assert retrieved_file is not None
retrieved_file.seek(0)
assert retrieved_file.read() == "Dated data"
def test_recreate_existing_cache_file(cache_store):
"""Test creating a cache file with an existing key does not overwrite the existing file."""
# Create a cache file
cache_file = cache_store.create("test_file", mode="w+")
cache_file.write("Original data")
cache_file.seek(0)
# Attempt to recreate the same file (should return the existing one)
new_file = cache_store.create("test_file")
assert new_file is cache_file # Should be the same object
new_file.seek(0)
assert new_file.read() == "Original data" # Data should be preserved
# Assure cache file store is a singleton
cache_store2 = CacheFileStore()
new_file = cache_store2.get("test_file")
assert new_file is cache_file # Should be the same object
def test_cache_store_is_singleton(cache_store):
"""Test re-creating a cache store provides the same store."""
# Create a cache file
cache_file = cache_store.create("test_file", mode="w+")
cache_file.write("Original data")
cache_file.seek(0)
# Assure cache file store is a singleton
cache_store2 = CacheFileStore()
new_file = cache_store2.get("test_file")
assert new_file is cache_file # Should be the same object
def test_cache_in_file_decorator_caches_function_result(cache_store):
"""Test that the cache_in_file decorator caches a function result."""
# Clear store to assure it is empty
cache_store.clear(clear_all=True)
assert len(cache_store._store) == 0
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function(until_date=None):
return "Some expensive computation result"
# Call the decorated function (should store result in cache)
result = my_function(until_date=datetime.now() + timedelta(days=1))
assert result == "Some expensive computation result"
# Assert that the create method was called to store the result
assert len(cache_store._store) == 1
# Check if the result was written to the cache file
key = next(iter(cache_store._store))
cache_file = cache_store._store[key].cache_file
assert cache_file is not None
# Assert correct content was written to the file
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == "Some expensive computation result"
def test_cache_in_file_decorator_uses_cache(cache_store):
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
# Clear store to assure it is empty
cache_store.clear(clear_all=True)
assert len(cache_store._store) == 0
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function(until_date=None):
return "New result"
# Call the decorated function (should store result in cache)
result = my_function(until_date=to_datetime().add(days=1))
assert result == "New result"
# Assert result was written to cache file
key = next(iter(cache_store._store))
cache_file = cache_store._store[key].cache_file
assert cache_file is not None
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result
# Modify cache file
result2 = "Cached result"
cache_file.seek(0)
cache_file.write(result2)
# Call the decorated function again (should get result from cache)
result = my_function(until_date=to_datetime().add(days=1))
assert result == result2
def test_cache_in_file_decorator_forces_update_data(cache_store):
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
# Clear store to assure it is empty
cache_store.clear(clear_all=True)
assert len(cache_store._store) == 0
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function(until_date=None):
return "New result"
until_date = to_datetime().add(days=1).date()
# Call the decorated function (should store result in cache)
result1 = "New result"
result = my_function(until_date=until_date)
assert result == result1
# Assert result was written to cache file
key = next(iter(cache_store._store))
cache_file = cache_store._store[key].cache_file
assert cache_file is not None
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result
# Modify cache file
result2 = "Cached result"
cache_file.seek(0)
cache_file.write(result2)
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result2
# Call the decorated function again with force update (should get result from function)
result = my_function(until_date=until_date, force_update=True) # type: ignore[call-arg]
assert result == result1
# Assure result was written to the same cache file
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result1
def test_cache_in_file_handles_ttl(cache_store):
"""Test that the cache_infile decorator handles the with_ttl parameter."""
# Define a simple function to decorate
@cache_in_file(mode="w+")
def my_function():
return "New result"
# Call the decorated function
result1 = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result1 == "New result"
assert len(cache_store._store) == 1
key = list(cache_store._store.keys())[0]
# Assert result was written to cache file
key = next(iter(cache_store._store))
cache_file = cache_store._store[key].cache_file
assert cache_file is not None
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result1
# Modify cache file
result2 = "Cached result"
cache_file.seek(0)
cache_file.write(result2)
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result2
# Call the decorated function again
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
cache_file.seek(0) # Move to the start of the file
assert cache_file.read() == result2
assert result == result2
# Wait one second to let the cache time out
sleep(2)
# Call again - cache should be timed out
result = my_function(with_ttl="1 second") # type: ignore[call-arg]
assert result == result1
def test_cache_in_file_handles_bytes_return(cache_store):
"""Test that the cache_infile decorator handles bytes returned from the function."""
# Clear store to assure it is empty
cache_store.clear(clear_all=True)
assert len(cache_store._store) == 0
# Define a function that returns bytes
@cache_in_file()
def my_function(until_date=None) -> bytes:
return b"Some binary data"
# Call the decorated function
result = my_function(until_date=datetime.now() + timedelta(days=1))
# Check if the binary data was written to the cache file
key = next(iter(cache_store._store))
cache_file = cache_store._store[key].cache_file
assert len(cache_store._store) == 1
assert cache_file is not None
cache_file.seek(0)
result1 = pickle.load(cache_file)
assert result1 == result
# Access cache
result = my_function(until_date=datetime.now() + timedelta(days=1))
assert len(cache_store._store) == 1
assert cache_store._store[key].cache_file is not None
assert result1 == result

View File

@@ -2,8 +2,8 @@ import numpy as np
import pytest
from akkudoktoreos.core.ems import (
EnergieManagementSystem,
EnergieManagementSystemParameters,
EnergyManagement,
EnergyManagementParameters,
SimulationResult,
get_ems,
)
@@ -20,8 +20,8 @@ start_hour = 1
# Example initialization of necessary components
@pytest.fixture
def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
def create_ems_instance(devices_eos, config_eos) -> EnergyManagement:
"""Fixture to create an EnergyManagement instance with given test parameters."""
# Assure configuration holds the correct values
config_eos.merge_settings_from_dict(
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
@@ -227,7 +227,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
# Initialize the energy management system with the respective parameters
ems = get_ems()
ems.set_parameters(
EnergieManagementSystemParameters(
EnergyManagementParameters(
pv_prognose_wh=pv_prognose_wh,
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
einspeiseverguetung_euro_pro_wh=einspeiseverguetung_euro_pro_wh,
@@ -243,7 +243,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
def test_simulation(create_ems_instance):
"""Test the EnergieManagementSystem simulation method."""
"""Test the EnergyManagement simulation method."""
ems = create_ems_instance
# Simulate starting from hour 1 (this value can be adjusted)

View File

@@ -2,8 +2,8 @@ import numpy as np
import pytest
from akkudoktoreos.core.ems import (
EnergieManagementSystem,
EnergieManagementSystemParameters,
EnergyManagement,
EnergyManagementParameters,
SimulationResult,
get_ems,
)
@@ -20,8 +20,8 @@ start_hour = 0
# Example initialization of necessary components
@pytest.fixture
def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
def create_ems_instance(devices_eos, config_eos) -> EnergyManagement:
"""Fixture to create an EnergyManagement instance with given test parameters."""
# Assure configuration holds the correct values
config_eos.merge_settings_from_dict(
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
@@ -130,7 +130,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
# Initialize the energy management system with the respective parameters
ems = get_ems()
ems.set_parameters(
EnergieManagementSystemParameters(
EnergyManagementParameters(
pv_prognose_wh=pv_prognose_wh,
strompreis_euro_pro_wh=strompreis_euro_pro_wh,
einspeiseverguetung_euro_pro_wh=einspeiseverguetung_euro_pro_wh,
@@ -153,7 +153,7 @@ def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
def test_simulation(create_ems_instance):
"""Test the EnergieManagementSystem simulation method."""
"""Test the EnergyManagement simulation method."""
ems = create_ems_instance
# Simulate starting from hour 0 (this value can be adjusted)
@@ -254,7 +254,7 @@ def test_simulation(create_ems_instance):
def test_set_parameters(create_ems_instance):
"""Test the set_parameters method of EnergieManagementSystem."""
"""Test the set_parameters method of EnergyManagement."""
ems = create_ems_instance
# Check if parameters are set correctly
@@ -267,7 +267,7 @@ def test_set_parameters(create_ems_instance):
def test_set_akku_discharge_hours(create_ems_instance):
"""Test the set_akku_discharge_hours method of EnergieManagementSystem."""
"""Test the set_akku_discharge_hours method of EnergyManagement."""
ems = create_ems_instance
discharge_hours = np.full(ems.config.prediction.hours, 1.0)
ems.set_akku_discharge_hours(discharge_hours)
@@ -277,7 +277,7 @@ def test_set_akku_discharge_hours(create_ems_instance):
def test_set_akku_ac_charge_hours(create_ems_instance):
"""Test the set_akku_ac_charge_hours method of EnergieManagementSystem."""
"""Test the set_akku_ac_charge_hours method of EnergyManagement."""
ems = create_ems_instance
ac_charge_hours = np.full(ems.config.prediction.hours, 1.0)
ems.set_akku_ac_charge_hours(ac_charge_hours)
@@ -287,7 +287,7 @@ def test_set_akku_ac_charge_hours(create_ems_instance):
def test_set_akku_dc_charge_hours(create_ems_instance):
"""Test the set_akku_dc_charge_hours method of EnergieManagementSystem."""
"""Test the set_akku_dc_charge_hours method of EnergyManagement."""
ems = create_ems_instance
dc_charge_hours = np.full(ems.config.prediction.hours, 1.0)
ems.set_akku_dc_charge_hours(dc_charge_hours)
@@ -297,7 +297,7 @@ def test_set_akku_dc_charge_hours(create_ems_instance):
def test_set_ev_charge_hours(create_ems_instance):
"""Test the set_ev_charge_hours method of EnergieManagementSystem."""
"""Test the set_ev_charge_hours method of EnergyManagement."""
ems = create_ems_instance
ev_charge_hours = np.full(ems.config.prediction.hours, 1.0)
ems.set_ev_charge_hours(ev_charge_hours)
@@ -307,7 +307,7 @@ def test_set_ev_charge_hours(create_ems_instance):
def test_reset(create_ems_instance):
"""Test the reset method of EnergieManagementSystem."""
"""Test the reset method of EnergyManagement."""
ems = create_ems_instance
ems.reset()
assert ems.ev.current_soc_percentage() == 100, "EV SOC should be reset to initial value"
@@ -317,7 +317,7 @@ def test_reset(create_ems_instance):
def test_simulate_start_now(create_ems_instance):
"""Test the simulate_start_now method of EnergieManagementSystem."""
"""Test the simulate_start_now method of EnergyManagement."""
ems = create_ems_instance
result = ems.simulate_start_now()
assert result is not None, "Result should not be None"

View File

@@ -86,7 +86,8 @@ def test_optimize(
parameters=input_data, start_hour=start_hour, ngen=ngen
)
# Write test output to file, so we can take it as new data on intended change
with open(DIR_TESTDATA / f"new_{fn_out}", "w") as f_out:
TESTDATA_FILE = DIR_TESTDATA / f"new_{fn_out}"
with TESTDATA_FILE.open("w", encoding="utf-8", newline="\n") as f_out:
f_out.write(ergebnis.model_dump_json(indent=4, exclude_unset=True))
assert ergebnis.result.Gesamtbilanz_Euro == pytest.approx(

View File

@@ -1,5 +1,6 @@
import tempfile
from pathlib import Path
from typing import Union
from unittest.mock import patch
import pytest
@@ -46,12 +47,19 @@ def test_computed_paths(config_eos):
"general": {
"data_folder_path": "/base/data",
"data_output_subpath": "extra/output",
"data_cache_subpath": "somewhere/cache",
}
},
"cache": {
"subpath": "somewhere/cache",
},
}
)
assert config_eos.general.data_folder_path == Path("/base/data")
assert config_eos.general.data_output_path == Path("/base/data/extra/output")
assert config_eos.general.data_cache_path == Path("/base/data/somewhere/cache")
assert config_eos.cache.path() == Path("/base/data/somewhere/cache")
# Check non configurable pathes
assert config_eos.package_root_path == Path(__file__).parent.parent.resolve().joinpath(
"src/akkudoktoreos"
)
# reset settings so the config_eos fixture can verify the default paths
config_eos.reset_settings()
@@ -374,3 +382,64 @@ def test_get_nested_key(path, expected_value, exception, config_eos):
else:
with pytest.raises(exception):
config_eos.get_config_value(path)
def test_merge_settings_from_dict_invalid(config_eos):
"""Test merging invalid data."""
invalid_settings = {
"general": {
"latitude": "invalid_latitude" # Should be a float
},
}
with pytest.raises(Exception): # Pydantic ValidationError expected
config_eos.merge_settings_from_dict(invalid_settings)
def test_merge_settings_partial(config_eos):
"""Test merging only a subset of settings."""
partial_settings: dict[str, dict[str, Union[float, None, str]]] = {
"general": {
"latitude": 51.1657 # Only latitude is updated
},
}
config_eos.merge_settings_from_dict(partial_settings)
assert config_eos.general.latitude == 51.1657
assert config_eos.general.longitude == 13.405 # Should remain unchanged
partial_settings = {
"weather": {
"provider": "BrightSky",
},
}
config_eos.merge_settings_from_dict(partial_settings)
assert config_eos.weather.provider == "BrightSky"
partial_settings = {
"general": {
"latitude": None,
},
"weather": {
"provider": "ClearOutside",
},
}
config_eos.merge_settings_from_dict(partial_settings)
assert config_eos.general.latitude is None
assert config_eos.weather.provider == "ClearOutside"
# Assure update keeps same values
config_eos.update()
assert config_eos.general.latitude is None
assert config_eos.weather.provider == "ClearOutside"
def test_merge_settings_empty(config_eos):
"""Test merging an empty dictionary does not change settings."""
original_latitude = config_eos.general.latitude
config_eos.merge_settings_from_dict({}) # No changes
assert config_eos.general.latitude == original_latitude # Should remain unchanged

View File

@@ -562,6 +562,102 @@ class TestDataSequence:
assert dates == [to_datetime(datetime(2023, 11, 5)), to_datetime(datetime(2023, 11, 6))]
assert values == [0.8, 0.9]
def test_to_dataframe_full_data(self, sequence):
"""Test conversion of all records to a DataFrame without filtering."""
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
sequence.append(record1)
sequence.append(record2)
sequence.append(record3)
df = sequence.to_dataframe()
# Validate DataFrame structure
assert isinstance(df, pd.DataFrame)
assert not df.empty
assert len(df) == 3 # All records should be included
assert "data_value" in df.columns
def test_to_dataframe_with_filter(self, sequence):
"""Test filtering records by datetime range."""
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
sequence.append(record1)
sequence.append(record2)
sequence.append(record3)
start = to_datetime("2024-01-01T12:30:00Z")
end = to_datetime("2024-01-01T14:00:00Z")
df = sequence.to_dataframe(start_datetime=start, end_datetime=end)
assert isinstance(df, pd.DataFrame)
assert not df.empty
assert len(df) == 1 # Only one record should match the range
assert df.index[0] == pd.Timestamp("2024-01-01T13:00:00Z")
def test_to_dataframe_no_matching_records(self, sequence):
"""Test when no records match the given datetime filter."""
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
sequence.append(record1)
sequence.append(record2)
start = to_datetime("2024-01-01T14:00:00Z") # Start time after all records
end = to_datetime("2024-01-01T15:00:00Z")
df = sequence.to_dataframe(start_datetime=start, end_datetime=end)
assert isinstance(df, pd.DataFrame)
assert df.empty # No records should match
def test_to_dataframe_empty_sequence(self, sequence):
"""Test when DataSequence has no records."""
sequence = DataSequence(records=[])
df = sequence.to_dataframe()
assert isinstance(df, pd.DataFrame)
assert df.empty # Should return an empty DataFrame
def test_to_dataframe_no_start_datetime(self, sequence):
"""Test when only end_datetime is given (all past records should be included)."""
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
sequence.append(record1)
sequence.append(record2)
sequence.append(record3)
end = to_datetime("2024-01-01T13:00:00Z") # Include only first record
df = sequence.to_dataframe(end_datetime=end)
assert isinstance(df, pd.DataFrame)
assert not df.empty
assert len(df) == 1
assert df.index[0] == pd.Timestamp("2024-01-01T12:00:00Z")
def test_to_dataframe_no_end_datetime(self, sequence):
"""Test when only start_datetime is given (all future records should be included)."""
record1 = self.create_test_record("2024-01-01T12:00:00Z", 10)
record2 = self.create_test_record("2024-01-01T13:00:00Z", 20)
record3 = self.create_test_record("2024-01-01T14:00:00Z", 30)
sequence.append(record1)
sequence.append(record2)
sequence.append(record3)
start = to_datetime("2024-01-01T13:00:00Z") # Include last two records
df = sequence.to_dataframe(start_datetime=start)
assert isinstance(df, pd.DataFrame)
assert not df.empty
assert len(df) == 2
assert df.index[0] == pd.Timestamp("2024-01-01T13:00:00Z")
class TestDataProvider:
# Fixtures and helper functions

View File

@@ -1,4 +1,5 @@
import json
import os
import sys
from pathlib import Path
from unittest.mock import patch
@@ -14,7 +15,7 @@ def test_openapi_spec_current(config_eos):
expected_spec_path = DIR_PROJECT_ROOT / "openapi.json"
new_spec_path = DIR_TESTDATA / "openapi-new.json"
with open(expected_spec_path) as f_expected:
with expected_spec_path.open("r", encoding="utf-8", newline=None) as f_expected:
expected_spec = json.load(f_expected)
# Patch get_config and import within guard to patch global variables within the eos module.
@@ -25,12 +26,14 @@ def test_openapi_spec_current(config_eos):
from scripts import generate_openapi
spec = generate_openapi.generate_openapi()
spec_str = json.dumps(spec, indent=4, sort_keys=True)
with open(new_spec_path, "w") as f_new:
json.dump(spec, f_new, indent=4, sort_keys=True)
if os.name == "nt":
spec_str = spec_str.replace("127.0.0.1", "0.0.0.0")
with new_spec_path.open("w", encoding="utf-8", newline="\n") as f_new:
f_new.write(spec_str)
# Serialize to ensure comparison is consistent
spec_str = json.dumps(spec, indent=4, sort_keys=True)
expected_spec_str = json.dumps(expected_spec, indent=4, sort_keys=True)
try:
@@ -47,7 +50,7 @@ def test_openapi_md_current(config_eos):
expected_spec_md_path = DIR_PROJECT_ROOT / "docs" / "_generated" / "openapi.md"
new_spec_md_path = DIR_TESTDATA / "openapi-new.md"
with open(expected_spec_md_path, encoding="utf8") as f_expected:
with expected_spec_md_path.open("r", encoding="utf-8", newline=None) as f_expected:
expected_spec_md = f_expected.read()
# Patch get_config and import within guard to patch global variables within the eos module.
@@ -59,7 +62,9 @@ def test_openapi_md_current(config_eos):
spec_md = generate_openapi_md.generate_openapi_md()
with open(new_spec_md_path, "w", encoding="utf8") as f_new:
if os.name == "nt":
spec_md = spec_md.replace("127.0.0.1", "0.0.0.0")
with new_spec_md_path.open("w", encoding="utf-8", newline="\n") as f_new:
f_new.write(spec_md)
try:
@@ -76,7 +81,7 @@ def test_config_md_current(config_eos):
expected_config_md_path = DIR_PROJECT_ROOT / "docs" / "_generated" / "config.md"
new_config_md_path = DIR_TESTDATA / "config-new.md"
with open(expected_config_md_path, encoding="utf8") as f_expected:
with expected_config_md_path.open("r", encoding="utf-8", newline=None) as f_expected:
expected_config_md = f_expected.read()
# Patch get_config and import within guard to patch global variables within the eos module.
@@ -88,7 +93,9 @@ def test_config_md_current(config_eos):
config_md = generate_config_md.generate_config_md(config_eos)
with open(new_config_md_path, "w", encoding="utf8") as f_new:
if os.name == "nt":
config_md = config_md.replace("127.0.0.1", "0.0.0.0").replace("\\\\", "/")
with new_config_md_path.open("w", encoding="utf-8", newline="\n") as f_new:
f_new.write(config_md)
try:

View File

@@ -6,6 +6,7 @@ import numpy as np
import pytest
import requests
from akkudoktoreos.core.cache import CacheFileStore
from akkudoktoreos.core.ems import get_ems
from akkudoktoreos.core.logging import get_logger
from akkudoktoreos.prediction.elecpriceakkudoktor import (
@@ -13,7 +14,6 @@ from akkudoktoreos.prediction.elecpriceakkudoktor import (
AkkudoktorElecPriceValue,
ElecPriceAkkudoktor,
)
from akkudoktoreos.utils.cacheutil import CacheFileStore
from akkudoktoreos.utils.datetimeutil import to_datetime
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
@@ -36,7 +36,9 @@ def provider(monkeypatch, config_eos):
@pytest.fixture
def sample_akkudoktor_1_json():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "r") as f_res:
with FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON.open(
"r", encoding="utf-8", newline=None
) as f_res:
input_data = json.load(f_res)
return input_data
@@ -173,7 +175,7 @@ def test_request_forecast_status_codes(
provider._request_forecast()
@patch("akkudoktoreos.utils.cacheutil.CacheFileStore")
@patch("akkudoktoreos.core.cache.CacheFileStore")
def test_cache_integration(mock_cache, provider):
"""Test caching of 8-day electricity price data."""
mock_cache_instance = mock_cache.return_value
@@ -208,5 +210,7 @@ def test_akkudoktor_development_forecast_data(provider):
akkudoktor_data = provider._request_forecast()
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "w") as f_out:
with FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON.open(
"w", encoding="utf-8", newline="\n"
) as f_out:
json.dump(akkudoktor_data, f_out, indent=4)

View File

@@ -33,7 +33,7 @@ def provider(sample_import_1_json, config_eos):
@pytest.fixture
def sample_import_1_json():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON, "r") as f_res:
with FILE_TESTDATA_ELECPRICEIMPORT_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
input_data = json.load(f_res)
return input_data

View File

@@ -0,0 +1,51 @@
import time
from http import HTTPStatus
import requests
class TestEOSDash:
def test_eosdash_started(self, server_setup_for_class, is_system_test):
"""Test the EOSdash server is started by EOS server."""
server = server_setup_for_class["server"]
eosdash_server = server_setup_for_class["eosdash_server"]
eos_dir = server_setup_for_class["eos_dir"]
timeout = server_setup_for_class["timeout"]
# Assure EOSdash is up
startup = False
error = ""
for retries in range(int(timeout / 3)):
try:
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
if result.status_code == HTTPStatus.OK:
startup = True
break
error = f"{result.status_code}, {str(result.content)}"
except Exception as ex:
error = str(ex)
time.sleep(3)
assert startup, f"Connection to {eosdash_server}/eosdash/health failed: {error}"
assert result.json()["status"] == "alive"
def test_eosdash_proxied_by_eos(self, server_setup_for_class, is_system_test):
"""Test the EOSdash server proxied by EOS server."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
timeout = server_setup_for_class["timeout"]
# Assure EOSdash is up
startup = False
error = ""
for retries in range(int(timeout / 3)):
try:
result = requests.get(f"{server}/eosdash/health", timeout=2)
if result.status_code == HTTPStatus.OK:
startup = True
break
error = f"{result.status_code}, {str(result.content)}"
except Exception as ex:
error = str(ex)
time.sleep(3)
assert startup, f"Connection to {server}/eosdash/health failed: {error}"
assert result.json()["status"] == "alive"

View File

@@ -26,6 +26,8 @@ def provider(config_eos):
}
}
config_eos.merge_settings_from_dict(settings)
assert config_eos.load.provider == "LoadAkkudoktor"
assert config_eos.load.provider_settings.loadakkudoktor_year_energy == 1000
return LoadAkkudoktor()

View File

@@ -1,8 +1,8 @@
"""Test Module for logging Module."""
import logging
import os
from logging.handlers import RotatingFileHandler
from pathlib import Path
import pytest
@@ -13,16 +13,7 @@ from akkudoktoreos.core.logging import get_logger
# -----------------------------
@pytest.fixture
def clean_up_log_file():
"""Fixture to clean up log files after tests."""
log_file = "test.log"
yield log_file
if os.path.exists(log_file):
os.remove(log_file)
def test_get_logger_console_logging(clean_up_log_file):
def test_get_logger_console_logging():
"""Test logger creation with console logging."""
logger = get_logger("test_logger", logging_level="DEBUG")
@@ -37,9 +28,10 @@ def test_get_logger_console_logging(clean_up_log_file):
assert isinstance(logger.handlers[0], logging.StreamHandler)
def test_get_logger_file_logging(clean_up_log_file):
def test_get_logger_file_logging(tmpdir):
"""Test logger creation with file logging."""
logger = get_logger("test_logger", log_file="test.log", logging_level="WARNING")
log_file = Path(tmpdir).joinpath("test.log")
logger = get_logger("test_logger", log_file=str(log_file), logging_level="WARNING")
# Check logger name
assert logger.name == "test_logger"
@@ -53,10 +45,10 @@ def test_get_logger_file_logging(clean_up_log_file):
assert isinstance(logger.handlers[1], RotatingFileHandler)
# Check file existence
assert os.path.exists("test.log")
assert log_file.exists()
def test_get_logger_no_file_logging(clean_up_log_file):
def test_get_logger_no_file_logging():
"""Test logger creation without file logging."""
logger = get_logger("test_logger")
@@ -71,7 +63,7 @@ def test_get_logger_no_file_logging(clean_up_log_file):
assert isinstance(logger.handlers[0], logging.StreamHandler)
def test_get_logger_with_invalid_level(clean_up_log_file):
def test_get_logger_with_invalid_level():
"""Test logger creation with an invalid logging level."""
with pytest.raises(ValueError, match="Unknown loggin level: INVALID"):
logger = get_logger("test_logger", logging_level="INVALID")

View File

@@ -201,8 +201,8 @@ class TestPredictionProvider:
def test_update_method_force_enable(self, provider, monkeypatch):
"""Test that `update` executes when `force_enable` is True, even if `enabled` is False."""
# Preset values that are needed by update
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "37.7749")
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "-122.4194")
monkeypatch.setenv("EOS_GENERAL__LATITUDE", "37.7749")
monkeypatch.setenv("EOS_GENERAL__LONGITUDE", "-122.4194")
# Override enabled to return False for this test
DerivedPredictionProvider.provider_enabled = False

View File

@@ -80,7 +80,7 @@ def sample_settings(config_eos):
@pytest.fixture
def sample_forecast_data():
"""Fixture that returns sample forecast data converted to pydantic model."""
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r", encoding="utf8") as f_in:
with FILE_TESTDATA_PV_FORECAST_INPUT_1.open("r", encoding="utf-8", newline=None) as f_in:
input_data = f_in.read()
return PVForecastAkkudoktor._validate_data(input_data)
@@ -88,7 +88,7 @@ def sample_forecast_data():
@pytest.fixture
def sample_forecast_data_raw():
"""Fixture that returns raw sample forecast data."""
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r", encoding="utf8") as f_in:
with FILE_TESTDATA_PV_FORECAST_INPUT_1.open("r", encoding="utf-8", newline=None) as f_in:
input_data = f_in.read()
return input_data
@@ -96,7 +96,7 @@ def sample_forecast_data_raw():
@pytest.fixture
def sample_forecast_report():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_PV_FORECAST_RESULT_1, "r", encoding="utf8") as f_res:
with FILE_TESTDATA_PV_FORECAST_RESULT_1.open("r", encoding="utf-8", newline=None) as f_res:
input_data = f_res.read()
return input_data

View File

@@ -33,7 +33,7 @@ def provider(sample_import_1_json, config_eos):
@pytest.fixture
def sample_import_1_json():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_PVFORECASTIMPORT_1_JSON, "r") as f_res:
with FILE_TESTDATA_PVFORECASTIMPORT_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
input_data = json.load(f_res)
return input_data

View File

@@ -1,13 +1,443 @@
import json
import os
import signal
import time
from http import HTTPStatus
from pathlib import Path
import psutil
import pytest
import requests
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
def test_server(server, config_eos):
"""Test the server."""
# validate correct path in server
assert config_eos.general.data_folder_path is not None
assert config_eos.general.data_folder_path.is_dir()
FILE_TESTDATA_EOSSERVER_CONFIG_1 = DIR_TESTDATA.joinpath("eosserver_config_1.json")
result = requests.get(f"{server}/v1/config")
assert result.status_code == HTTPStatus.OK
class TestServer:
def test_server_setup_for_class(self, server_setup_for_class):
"""Ensure server is started."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
result = requests.get(f"{server}/v1/config")
assert result.status_code == HTTPStatus.OK
# Get testing config
config_json = result.json()
config_folder_path = Path(config_json["general"]["config_folder_path"])
config_file_path = Path(config_json["general"]["config_file_path"])
data_folder_path = Path(config_json["general"]["data_folder_path"])
data_ouput_path = Path(config_json["general"]["data_output_path"])
# Assure we are working in test environment
assert str(config_folder_path).startswith(eos_dir)
assert str(config_file_path).startswith(eos_dir)
assert str(data_folder_path).startswith(eos_dir)
assert str(data_ouput_path).startswith(eos_dir)
def test_prediction_brightsky(self, server_setup_for_class, is_system_test):
"""Test weather prediction by BrightSky."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
result = requests.get(f"{server}/v1/config")
assert result.status_code == HTTPStatus.OK
# Get testing config
config_json = result.json()
config_folder_path = Path(config_json["general"]["config_folder_path"])
# Assure we are working in test environment
assert str(config_folder_path).startswith(eos_dir)
result = requests.put(f"{server}/v1/config/weather/provider", json="BrightSky")
assert result.status_code == HTTPStatus.OK
# Assure prediction is enabled
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
assert result.status_code == HTTPStatus.OK
providers = result.json()
assert "BrightSky" in providers
if is_system_test:
result = requests.post(f"{server}/v1/prediction/update/BrightSky")
assert result.status_code == HTTPStatus.OK
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
assert result.status_code == HTTPStatus.OK
data = result.json()
assert len(data["data"]) > 24
else:
pass
def test_prediction_clearoutside(self, server_setup_for_class, is_system_test):
"""Test weather prediction by ClearOutside."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
result = requests.put(f"{server}/v1/config/weather/provider", json="ClearOutside")
assert result.status_code == HTTPStatus.OK
# Assure prediction is enabled
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
assert result.status_code == HTTPStatus.OK
providers = result.json()
assert "ClearOutside" in providers
if is_system_test:
result = requests.post(f"{server}/v1/prediction/update/ClearOutside")
assert result.status_code == HTTPStatus.OK
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
assert result.status_code == HTTPStatus.OK
data = result.json()
assert len(data["data"]) > 24
else:
pass
def test_prediction_pvforecastakkudoktor(self, server_setup_for_class, is_system_test):
"""Test PV prediction by PVForecastAkkudoktor."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
# Reset config
with FILE_TESTDATA_EOSSERVER_CONFIG_1.open("r", encoding="utf-8", newline=None) as fd:
config = json.load(fd)
config["pvforecast"]["provider"] = "PVForecastAkkudoktor"
result = requests.put(f"{server}/v1/config", json=config)
assert result.status_code == HTTPStatus.OK
# Assure prediction is enabled
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
assert result.status_code == HTTPStatus.OK
providers = result.json()
assert "PVForecastAkkudoktor" in providers
if is_system_test:
result = requests.post(f"{server}/v1/prediction/update/PVForecastAkkudoktor")
assert result.status_code == HTTPStatus.OK
result = requests.get(f"{server}/v1/prediction/series?key=pvforecast_ac_power")
assert result.status_code == HTTPStatus.OK
data = result.json()
assert len(data["data"]) > 24
else:
pass
def test_prediction_elecpriceakkudoktor(self, server_setup_for_class, is_system_test):
"""Test electricity price prediction by ElecPriceImport."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
# Reset config
with FILE_TESTDATA_EOSSERVER_CONFIG_1.open("r", encoding="utf-8", newline=None) as fd:
config = json.load(fd)
config["elecprice"]["provider"] = "ElecPriceAkkudoktor"
result = requests.put(f"{server}/v1/config", json=config)
assert result.status_code == HTTPStatus.OK
# Assure prediction is enabled
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
assert result.status_code == HTTPStatus.OK
providers = result.json()
assert "ElecPriceAkkudoktor" in providers
if is_system_test:
result = requests.post(f"{server}/v1/prediction/update/ElecPriceAkkudoktor")
assert result.status_code == HTTPStatus.OK
result = requests.get(f"{server}/v1/prediction/series?key=elecprice_marketprice_wh")
assert result.status_code == HTTPStatus.OK
data = result.json()
assert len(data["data"]) > 24
else:
pass
def test_prediction_loadakkudoktor(self, server_setup_for_class, is_system_test):
"""Test load prediction by LoadAkkudoktor."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
result = requests.put(f"{server}/v1/config/load/provider", json="LoadAkkudoktor")
assert result.status_code == HTTPStatus.OK
# Assure prediction is enabled
result = requests.get(f"{server}/v1/prediction/providers?enabled=true")
assert result.status_code == HTTPStatus.OK
providers = result.json()
assert "LoadAkkudoktor" in providers
if is_system_test:
result = requests.post(f"{server}/v1/prediction/update/LoadAkkudoktor")
assert result.status_code == HTTPStatus.OK
result = requests.get(f"{server}/v1/prediction/series?key=load_mean")
assert result.status_code == HTTPStatus.OK
data = result.json()
assert len(data["data"]) > 24
else:
pass
def test_admin_cache(self, server_setup_for_class, is_system_test):
"""Test whether cache is reconstructed from cached files."""
server = server_setup_for_class["server"]
eos_dir = server_setup_for_class["eos_dir"]
result = requests.get(f"{server}/v1/admin/cache")
assert result.status_code == HTTPStatus.OK
cache = result.json()
if is_system_test:
# There should be some cache data
assert cache != {}
# Save cache
result = requests.post(f"{server}/v1/admin/cache/save")
assert result.status_code == HTTPStatus.OK
cache_saved = result.json()
assert cache_saved == cache
# Clear cache - should clear nothing as all cache files expire in the future
result = requests.post(f"{server}/v1/admin/cache/clear")
assert result.status_code == HTTPStatus.OK
cache_cleared = result.json()
assert cache_cleared == cache
# Force clear cache
result = requests.post(f"{server}/v1/admin/cache/clear?clear_all=true")
assert result.status_code == HTTPStatus.OK
cache_cleared = result.json()
assert cache_cleared == {}
# Try to load already deleted cache entries
result = requests.post(f"{server}/v1/admin/cache/load")
assert result.status_code == HTTPStatus.OK
cache_loaded = result.json()
assert cache_loaded == {}
# Cache should still be empty
result = requests.get(f"{server}/v1/admin/cache")
assert result.status_code == HTTPStatus.OK
cache = result.json()
assert cache == {}
class TestServerStartStop:
def test_server_start_eosdash(self, tmpdir):
"""Test the EOSdash server startup from EOS."""
# Do not use any fixture as this will make pytest the owner of the EOSdash port.
if os.name == "nt":
host = "localhost"
# Windows does not provide SIGKILL
sigkill = signal.SIGTERM
else:
host = "0.0.0.0"
sigkill = signal.SIGKILL
port = 8503
eosdash_port = 8504
timeout = 120
server = f"http://{host}:{port}"
eosdash_server = f"http://{host}:{eosdash_port}"
eos_dir = str(tmpdir)
# Cleanup any EOSdash process left.
try:
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
if result.status_code == HTTPStatus.OK:
pid = result.json()["pid"]
os.kill(pid, sigkill)
time.sleep(1)
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
assert result.status_code != HTTPStatus.OK
except:
pass
# Wait for EOSdash port to be freed
process_info: list[dict] = []
for retries in range(int(timeout / 3)):
process_info = []
pids: list[int] = []
for conn in psutil.net_connections(kind="inet"):
if conn.laddr.port == eosdash_port:
if conn.pid not in pids:
# Get fresh process info
process = psutil.Process(conn.pid)
pids.append(conn.pid)
process_info.append(process.as_dict(attrs=["pid", "cmdline"]))
if len(process_info) == 0:
break
time.sleep(3)
assert len(process_info) == 0
# Import after test setup to prevent creation of config file before test
from akkudoktoreos.server.eos import start_eosdash
process = start_eosdash(
host=host,
port=eosdash_port,
eos_host=host,
eos_port=port,
log_level="debug",
access_log=False,
reload=False,
eos_dir=eos_dir,
eos_config_dir=eos_dir,
)
# Assure EOSdash is up
startup = False
error = ""
for retries in range(int(timeout / 3)):
try:
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
if result.status_code == HTTPStatus.OK:
startup = True
break
error = f"{result.status_code}, {str(result.content)}"
except Exception as ex:
error = str(ex)
time.sleep(3)
assert startup, f"Connection to {eosdash_server}/eosdash/health failed: {error}"
assert result.json()["status"] == "alive"
# Shutdown eosdash
try:
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
if result.status_code == HTTPStatus.OK:
pid = result.json()["pid"]
os.kill(pid, signal.SIGTERM)
time.sleep(1)
result = requests.get(f"{eosdash_server}/eosdash/health", timeout=2)
assert result.status_code != HTTPStatus.OK
except:
pass
@pytest.mark.skipif(os.name == "nt", reason="Server restart not supported on Windows")
def test_server_restart(self, server_setup_for_function, is_system_test):
"""Test server restart."""
server = server_setup_for_function["server"]
eos_dir = server_setup_for_function["eos_dir"]
timeout = server_setup_for_function["timeout"]
result = requests.get(f"{server}/v1/config")
assert result.status_code == HTTPStatus.OK
# Get testing config
config_json = result.json()
config_folder_path = Path(config_json["general"]["config_folder_path"])
config_file_path = Path(config_json["general"]["config_file_path"])
data_folder_path = Path(config_json["general"]["data_folder_path"])
data_ouput_path = Path(config_json["general"]["data_output_path"])
cache_file_path = data_folder_path.joinpath(config_json["cache"]["subpath"]).joinpath(
"cachefilestore.json"
)
# Assure we are working in test environment
assert str(config_folder_path).startswith(eos_dir)
assert str(config_file_path).startswith(eos_dir)
assert str(data_folder_path).startswith(eos_dir)
assert str(data_ouput_path).startswith(eos_dir)
if is_system_test:
# Prepare cache entry and get cached data
result = requests.put(f"{server}/v1/config/weather/provider", json="BrightSky")
assert result.status_code == HTTPStatus.OK
result = requests.post(f"{server}/v1/prediction/update/BrightSky")
assert result.status_code == HTTPStatus.OK
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
assert result.status_code == HTTPStatus.OK
data = result.json()
assert data["data"] != {}
result = requests.put(f"{server}/v1/config/file")
assert result.status_code == HTTPStatus.OK
# Save cache
result = requests.post(f"{server}/v1/admin/cache/save")
assert result.status_code == HTTPStatus.OK
cache = result.json()
assert cache_file_path.exists()
result = requests.get(f"{server}/v1/admin/cache")
assert result.status_code == HTTPStatus.OK
cache = result.json()
result = requests.get(f"{server}/v1/health")
assert result.status_code == HTTPStatus.OK
pid = result.json()["pid"]
result = requests.post(f"{server}/v1/admin/server/restart")
assert result.status_code == HTTPStatus.OK
assert "Restarting EOS.." in result.json()["message"]
new_pid = result.json()["pid"]
# Wait for server to shut down
for retries in range(10):
try:
result = requests.get(f"{server}/v1/health", timeout=2)
if result.status_code == HTTPStatus.OK:
pid = result.json()["pid"]
if pid == new_pid:
# Already started
break
else:
break
except Exception as ex:
break
time.sleep(3)
# Assure EOS is up again
startup = False
error = ""
for retries in range(int(timeout / 3)):
try:
result = requests.get(f"{server}/v1/health", timeout=2)
if result.status_code == HTTPStatus.OK:
startup = True
break
error = f"{result.status_code}, {str(result.content)}"
except Exception as ex:
error = str(ex)
time.sleep(3)
assert startup, f"Connection to {server}/v1/health failed: {error}"
assert result.json()["status"] == "alive"
pid = result.json()["pid"]
assert pid == new_pid
result = requests.get(f"{server}/v1/admin/cache")
assert result.status_code == HTTPStatus.OK
new_cache = result.json()
assert cache.items() <= new_cache.items()
if is_system_test:
result = requests.get(f"{server}/v1/config")
assert result.status_code == HTTPStatus.OK
assert result.json()["weather"]["provider"] == "BrightSky"
# Wait for initialisation task to have finished
time.sleep(5)
result = requests.get(f"{server}/v1/prediction/series?key=weather_temp_air")
assert result.status_code == HTTPStatus.OK
assert result.json() == data
# Shutdown the newly created server
result = requests.post(f"{server}/v1/admin/server/shutdown")
assert result.status_code == HTTPStatus.OK
assert "Stopping EOS.." in result.json()["message"]
new_pid = result.json()["pid"]

View File

@@ -5,9 +5,9 @@ from unittest.mock import Mock, patch
import pandas as pd
import pytest
from akkudoktoreos.core.cache import CacheFileStore
from akkudoktoreos.core.ems import get_ems
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
from akkudoktoreos.utils.cacheutil import CacheFileStore
from akkudoktoreos.utils.datetimeutil import to_datetime
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
@@ -20,15 +20,15 @@ FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON = DIR_TESTDATA.joinpath("weatherforecast_b
def provider(monkeypatch):
"""Fixture to create a WeatherProvider instance."""
monkeypatch.setenv("EOS_WEATHER__WEATHER_PROVIDER", "BrightSky")
monkeypatch.setenv("EOS_PREDICTION__LATITUDE", "50.0")
monkeypatch.setenv("EOS_PREDICTION__LONGITUDE", "10.0")
monkeypatch.setenv("EOS_GENERAL__LATITUDE", "50.0")
monkeypatch.setenv("EOS_GENERAL__LONGITUDE", "10.0")
return WeatherBrightSky()
@pytest.fixture
def sample_brightsky_1_json():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "r") as f_res:
with FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
input_data = json.load(f_res)
return input_data
@@ -36,7 +36,7 @@ def sample_brightsky_1_json():
@pytest.fixture
def sample_brightsky_2_json():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON, "r") as f_res:
with FILE_TESTDATA_WEATHERBRIGHTSKY_2_JSON.open("r", encoding="utf-8", newline=None) as f_res:
input_data = json.load(f_res)
return input_data
@@ -173,15 +173,18 @@ def test_update_data(mock_get, provider, sample_brightsky_1_json, cache_store):
# ------------------------------------------------
@pytest.mark.skip(reason="For development only")
def test_brightsky_development_forecast_data(provider):
def test_brightsky_development_forecast_data(provider, config_eos, is_system_test):
"""Fetch data from real BrightSky server."""
if not is_system_test:
return
# Preset, as this is usually done by update_data()
provider.start_datetime = to_datetime("2024-10-26 00:00:00")
provider.latitude = 50.0
provider.longitude = 10.0
ems_eos = get_ems()
ems_eos.set_start_datetime(to_datetime("2024-10-26 00:00:00", in_timezone="Europe/Berlin"))
config_eos.general.latitude = 50.0
config_eos.general.longitude = 10.0
brightsky_data = provider._request_forecast()
with open(FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON, "w") as f_out:
with FILE_TESTDATA_WEATHERBRIGHTSKY_1_JSON.open("w", encoding="utf-8", newline="\n") as f_out:
json.dump(brightsky_data, f_out, indent=4)

View File

@@ -9,9 +9,9 @@ import pvlib
import pytest
from bs4 import BeautifulSoup
from akkudoktoreos.core.cache import CacheFileStore
from akkudoktoreos.core.ems import get_ems
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
from akkudoktoreos.utils.cacheutil import CacheFileStore
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
@@ -39,7 +39,9 @@ def provider(config_eos):
@pytest.fixture
def sample_clearout_1_html():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_HTML, "r") as f_res:
with FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_HTML.open(
"r", encoding="utf-8", newline=None
) as f_res:
input_data = f_res.read()
return input_data
@@ -47,7 +49,7 @@ def sample_clearout_1_html():
@pytest.fixture
def sample_clearout_1_data():
"""Fixture that returns sample forecast data."""
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "r", encoding="utf8") as f_in:
with FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA.open("r", encoding="utf-8", newline=None) as f_in:
json_str = f_in.read()
data = WeatherClearOutside.from_json(json_str)
return data
@@ -220,7 +222,9 @@ def test_development_forecast_data(mock_get, provider, sample_clearout_1_html):
# Fill the instance
provider.update_data(force_enable=True)
with open(FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA, "w", encoding="utf8") as f_out:
with FILE_TESTDATA_WEATHERCLEAROUTSIDE_1_DATA.open(
"w", encoding="utf-8", newline="\n"
) as f_out:
f_out.write(provider.to_json())

View File

@@ -33,7 +33,7 @@ def provider(sample_import_1_json, config_eos):
@pytest.fixture
def sample_import_1_json():
"""Fixture that returns sample forecast data report."""
with open(FILE_TESTDATA_WEATHERIMPORT_1_JSON, "r") as f_res:
with FILE_TESTDATA_WEATHERIMPORT_1_JSON.open("r", encoding="utf-8", newline=None) as f_res:
input_data = json.load(f_res)
return input_data

86
tests/testdata/eosserver_config_1.json vendored Normal file
View File

@@ -0,0 +1,86 @@
{
"elecprice": {
"charges_kwh": 0.21,
"provider": "ElecPriceImport"
},
"general": {
"latitude": 52.5,
"longitude": 13.4
},
"prediction": {
"historic_hours": 48,
"hours": 48
},
"load": {
"provider": "LoadImport",
"provider_settings": {
"loadakkudoktor_year_energy": 20000
}
},
"optimization": {
"hours": 48
},
"pvforecast": {
"planes": [
{
"peakpower": 5.0,
"surface_azimuth": -10,
"surface_tilt": 7,
"userhorizon": [
20,
27,
22,
20
],
"inverter_paco": 10000
},
{
"peakpower": 4.8,
"surface_azimuth": -90,
"surface_tilt": 7,
"userhorizon": [
30,
30,
30,
50
],
"inverter_paco": 10000
},
{
"peakpower": 1.4,
"surface_azimuth": -40,
"surface_tilt": 60,
"userhorizon": [
60,
30,
0,
30
],
"inverter_paco": 2000
},
{
"peakpower": 1.6,
"surface_azimuth": 5,
"surface_tilt": 45,
"userhorizon": [
45,
25,
30,
60
],
"inverter_paco": 1400
}
],
"provider": "PVForecastImport"
},
"server": {
"startup_eosdash": true,
"host": "0.0.0.0",
"port": 8503,
"eosdash_host": "0.0.0.0",
"eosdash_port": 8504
},
"weather": {
"provider": "WeatherImport"
}
}