mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-04-19 08:55:15 +00:00
Add test to PVForecast (#174)
* Add documentation to class_pv_forecast.py. Added documentation. Beware mostly generated by ChatGPT. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com> * Add CacheFileStore, datetime and logger utilities. The `CacheFileStore` class is a singleton-based, thread-safe key-value store for managing temporary file objects, allowing the creation, retrieval, and management of cache files. The utility modules offer a flexible logging setup (`get_logger`) and utilities to handle different date-time formats (`to_datetime`, `to_timestamp`) and timezone detection (`to_timezone). - Cache files are automatically valid for the the current date unless specified otherwise. This is to mimic the current behaviour used in several classes. - The logger supports rotating log files to prevent excessive log file size. - The `to_datetime` and `to_timestamp`functions support a wide variety of input types and formats. They provide the time conversion that is e.g. used in PVForecast. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com> * Improve testability of PVForecast Improvements for testing of PVForecast - Use common utility functions to allow for general testing at one spot. - to_datetime - CacheFileStore - Use logging instead of print to easily capture in testing. - Add validation of the json schema for Akkudoktor PV forecast data. - Allow to create an empty PVForecast instance as base instance for testing. - Make process_data() complete for filling a PVForecast instance for testing. - Normalize forecast datetime to timezone of system given in loaded data. - Do not print report but provide report for test checks. - Get rid of cache file path using the CachFileStore to automate cache file usage. - Improved module documentation. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com> * Add test for PVForecast and newly extracted utility modules. - Add test for PVForecast - Add test for CacheFileStore in the new cachefilestore module - Add test for to_datetime, to_timestamp, to_timezone in the new datetimeutil module - Add test for get_logger in the new logutil module Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com> --------- Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com> Co-authored-by: Normann <github@koldrack.com>
This commit is contained in:
parent
235ae87be5
commit
d2ba0adb5f
@ -2,6 +2,8 @@ numpy==2.1.3
|
||||
matplotlib==3.9.2
|
||||
flask==3.0.3
|
||||
scikit-learn==1.5.2
|
||||
timezonefinder==6.5.4
|
||||
deap==1.4.1
|
||||
requests==2.32.3
|
||||
pandas==2.2.3
|
||||
pydantic==2.9.2
|
||||
|
635
src/akkudoktoreos/cachefilestore.py
Normal file
635
src/akkudoktoreos/cachefilestore.py
Normal file
@ -0,0 +1,635 @@
|
||||
"""cachefilestore.py.
|
||||
|
||||
This module provides a class for in-memory managing of cache files.
|
||||
|
||||
The `CacheFileStore` class is a singleton-based, thread-safe key-value store for managing
|
||||
temporary file objects, allowing the creation, retrieval, and management of cache files.
|
||||
|
||||
Classes:
|
||||
--------
|
||||
- CacheFileStore: A thread-safe, singleton class for in-memory managing of file-like cache objects.
|
||||
- CacheFileStoreMeta: Metaclass for enforcing the singleton behavior in `CacheFileStore`.
|
||||
|
||||
Example usage:
|
||||
--------------
|
||||
# CacheFileStore usage
|
||||
>>> cache_store = CacheFileStore()
|
||||
>>> cache_store.create('example_key')
|
||||
>>> cache_file = cache_store.get('example_key')
|
||||
>>> cache_file.write('Some data')
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: 'Some data'
|
||||
|
||||
Notes:
|
||||
------
|
||||
- Cache files are automatically associated with the current date unless specified.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import os
|
||||
import pickle
|
||||
import tempfile
|
||||
import threading
|
||||
from datetime import date, datetime, time, timedelta
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from akkudoktoreos.datetimeutil import to_datetime, to_timedelta
|
||||
from akkudoktoreos.logutil import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class CacheFileStoreMeta(type):
|
||||
"""A thread-safe implementation of CacheFileStore."""
|
||||
|
||||
_instances = {}
|
||||
|
||||
_lock: threading.Lock = threading.Lock()
|
||||
"""Lock object to synchronize threads on first access to CacheFileStore."""
|
||||
|
||||
def __call__(cls):
|
||||
"""Return CacheFileStore instance."""
|
||||
with cls._lock:
|
||||
if cls not in cls._instances:
|
||||
instance = super().__call__()
|
||||
cls._instances[cls] = instance
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class CacheFileStore(metaclass=CacheFileStoreMeta):
|
||||
"""A key-value store that manages file-like tempfile objects to be used as cache files.
|
||||
|
||||
Cache files are associated with a date. If no date is specified, the cache files are
|
||||
associated with the current date by default. The class provides methods to create
|
||||
new cache files, retrieve existing ones, delete specific files, and clear all cache
|
||||
entries.
|
||||
|
||||
CacheFileStore is a thread-safe singleton. Only one store instance will ever be created.
|
||||
|
||||
Attributes:
|
||||
store (dict): A dictionary that holds the in-memory cache file objects
|
||||
with their associated keys and dates.
|
||||
|
||||
Example usage:
|
||||
>>> cache_store = CacheFileStore()
|
||||
>>> cache_store.create('example_file')
|
||||
>>> cache_file = cache_store.get('example_file')
|
||||
>>> cache_file.write('Some data')
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: 'Some data'
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initializes the CacheFileStore instance.
|
||||
|
||||
This constructor sets up an empty key-value store (a dictionary) where each key
|
||||
corresponds to a cache file that is associated with a given key and an optional date.
|
||||
"""
|
||||
self._store = {}
|
||||
self._store_lock = threading.Lock()
|
||||
|
||||
def _generate_cache_file_key(
|
||||
self, key: str, until_datetime: Union[datetime, None]
|
||||
) -> (str, datetime):
|
||||
"""Generates a unique cache file key based on the key and date.
|
||||
|
||||
The cache file key is a combination of the input key and the date (if provided),
|
||||
hashed using SHA-256 to ensure uniqueness.
|
||||
|
||||
Args:
|
||||
key (str): The key that identifies the cache file.
|
||||
until_datetime (Union[datetime, date, str, int, float, None]): The datetime
|
||||
until the cache file is valid. The default is the current date at maximum time
|
||||
(23:59:59).
|
||||
|
||||
Returns:
|
||||
A tuple of:
|
||||
str: A hashed string that serves as the unique identifier for the cache file.
|
||||
datetime: The datetime until the the cache file is valid.
|
||||
"""
|
||||
if until_datetime is None:
|
||||
until_datetime = datetime.combine(date.today(), time.max)
|
||||
key_datetime = to_datetime(until_datetime, as_string="UTC")
|
||||
cache_key = hashlib.sha256(f"{key}{key_datetime}".encode("utf-8")).hexdigest()
|
||||
return (f"{cache_key}", until_datetime)
|
||||
|
||||
def _get_file_path(self, file_obj):
|
||||
"""Retrieve the file path from a file-like object.
|
||||
|
||||
Args:
|
||||
file_obj: A file-like object (e.g., an instance of
|
||||
NamedTemporaryFile, BytesIO, StringIO) from which to retrieve the
|
||||
file path.
|
||||
|
||||
Returns:
|
||||
str or None: The file path if available, or None if the file-like
|
||||
object does not provide a file path.
|
||||
"""
|
||||
file_path = None
|
||||
if hasattr(file_obj, "name"):
|
||||
file_path = file_obj.name # Get the file path from the cache file object
|
||||
return file_path
|
||||
|
||||
def _until_datetime_by_options(
|
||||
self,
|
||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||
):
|
||||
"""Get until_datetime from the given options."""
|
||||
if until_datetime:
|
||||
until_datetime = to_datetime(until_datetime)
|
||||
elif with_ttl:
|
||||
with_ttl = to_timedelta(with_ttl)
|
||||
until_datetime = to_datetime(datetime.now() + with_ttl)
|
||||
elif until_date:
|
||||
until_datetime = to_datetime(to_datetime(until_date).date())
|
||||
else:
|
||||
# end of today
|
||||
until_datetime = to_datetime(datetime.combine(date.today(), time.max))
|
||||
return until_datetime
|
||||
|
||||
def _is_valid_cache_item(
|
||||
self,
|
||||
cache_item: (),
|
||||
until_datetime: datetime = None,
|
||||
at_datetime: datetime = None,
|
||||
before_datetime: datetime = None,
|
||||
):
|
||||
cache_file_datetime = cache_item[1] # Extract the datetime associated with the cache item
|
||||
if (
|
||||
(until_datetime and until_datetime == cache_file_datetime)
|
||||
or (at_datetime and at_datetime <= cache_file_datetime)
|
||||
or (before_datetime and cache_file_datetime < before_datetime)
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _search(
|
||||
self,
|
||||
key: str,
|
||||
until_datetime: Union[datetime, date, str, int, float] = None,
|
||||
at_datetime: Union[datetime, date, str, int, float] = None,
|
||||
before_datetime: Union[datetime, date, str, int, float] = None,
|
||||
):
|
||||
"""Searches for a cached item that matches the key and falls within the datetime range.
|
||||
|
||||
This method looks for a cache item with a key that matches the given `key`, and whose associated
|
||||
datetime (`cache_file_datetime`) falls on or after the `at_datetime`. If both conditions are met,
|
||||
it returns the cache item. Otherwise, it returns `None`.
|
||||
|
||||
Args:
|
||||
key (str): The key to identify the cache item.
|
||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||
at_datetime (Union[datetime, date, str, int, float], optional): The datetime to compare with
|
||||
the cache item's datetime.
|
||||
before_datetime (Union[datetime, date, str, int, float], optional): The datetime to compare
|
||||
the cache item's datetime to be before.
|
||||
|
||||
Returns:
|
||||
Optional[tuple]: Returns the cache_file_key, chache_file, cache_file_datetime if found,
|
||||
otherwise returns `None`.
|
||||
"""
|
||||
# Convert input to datetime if they are not None
|
||||
if until_datetime:
|
||||
until_datetime = to_datetime(until_datetime)
|
||||
if at_datetime:
|
||||
at_datetime = to_datetime(at_datetime)
|
||||
if before_datetime:
|
||||
before_datetime = to_datetime(before_datetime)
|
||||
|
||||
for cache_file_key, cache_item in self._store.items():
|
||||
# Check if the cache file datetime matches the given criteria
|
||||
if self._is_valid_cache_item(
|
||||
cache_item,
|
||||
until_datetime=until_datetime,
|
||||
at_datetime=at_datetime,
|
||||
before_datetime=before_datetime,
|
||||
):
|
||||
# This cache file is within the given datetime range
|
||||
# Extract the datetime associated with the cache item
|
||||
cache_file_datetime = cache_item[1]
|
||||
|
||||
# Generate a cache file key based on the given key and the cache file datetime
|
||||
generated_key, _until_dt = self._generate_cache_file_key(key, cache_file_datetime)
|
||||
|
||||
if generated_key == cache_file_key:
|
||||
# The key matches, return the key and the cache item
|
||||
return (cache_file_key, cache_item[0], cache_file_datetime)
|
||||
|
||||
# Return None if no matching cache item is found
|
||||
return None
|
||||
|
||||
def create(
|
||||
self,
|
||||
key: str,
|
||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||
mode: str = "wb+",
|
||||
delete: bool = False,
|
||||
suffix: Optional[str] = None,
|
||||
):
|
||||
"""Creates a new file-like tempfile object associated with the given key.
|
||||
|
||||
If a cache file with the given key and valid timedate already exists, the existing file is
|
||||
returned. Otherwise, a new tempfile object is created and stored in the key-value store.
|
||||
|
||||
Args:
|
||||
key (str): The key to store the cache file under.
|
||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||
provided.
|
||||
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
||||
the cache file is valid. Time starts now.
|
||||
mode (str, optional): The mode in which the tempfile is opened
|
||||
(e.g., 'w+', 'r+', 'wb+'). Defaults to 'wb+'.
|
||||
delete (bool, optional): Whether to delete the file after it is closed.
|
||||
Defaults to False (keeps the file).
|
||||
suffix (str, optional): The suffix for the cache file (e.g., '.txt', '.log').
|
||||
Defaults to None.
|
||||
|
||||
Returns:
|
||||
file_obj: A file-like object representing the cache file.
|
||||
|
||||
Example:
|
||||
>>> cache_file = cache_store.create('example_file', suffix='.txt')
|
||||
>>> cache_file.write('Some cached data')
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: 'Some cached data'
|
||||
"""
|
||||
until_datetime = self._until_datetime_by_options(
|
||||
until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
|
||||
)
|
||||
|
||||
cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime)
|
||||
with self._store_lock: # Synchronize access to _store
|
||||
if cache_file_key in self._store:
|
||||
# File already available
|
||||
cache_file_obj, until_datetime = self._store.get(cache_file_key)
|
||||
else:
|
||||
cache_file_obj = tempfile.NamedTemporaryFile(
|
||||
mode=mode, delete=delete, suffix=suffix
|
||||
)
|
||||
self._store[cache_file_key] = (cache_file_obj, until_datetime)
|
||||
cache_file_obj.seek(0)
|
||||
return cache_file_obj
|
||||
|
||||
def set(
|
||||
self,
|
||||
key: str,
|
||||
file_obj,
|
||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||
):
|
||||
"""Stores a file-like object in the cache under the specified key and date.
|
||||
|
||||
This method allows you to manually set a file-like object into the cache with a specific key
|
||||
and optional date.
|
||||
|
||||
Args:
|
||||
key (str): The key to store the file object under.
|
||||
file_obj: The file-like object.
|
||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||
provided.
|
||||
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
||||
the cache file is valid. Time starts now.
|
||||
|
||||
Raises:
|
||||
ValueError: If the key is already in store.
|
||||
|
||||
Example:
|
||||
>>> cache_store.set('example_file', io.BytesIO(b'Some binary data'))
|
||||
"""
|
||||
until_datetime = self._until_datetime_by_options(
|
||||
until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
|
||||
)
|
||||
|
||||
cache_file_key, until_date = self._generate_cache_file_key(key, until_datetime)
|
||||
with self._store_lock: # Synchronize access to _store
|
||||
if cache_file_key in self._store:
|
||||
raise ValueError(f"Key already in store: `{key}`.")
|
||||
|
||||
self._store[cache_file_key] = (file_obj, until_date)
|
||||
|
||||
def get(
|
||||
self,
|
||||
key: str,
|
||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
at_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
before_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
):
|
||||
"""Retrieves the cache file associated with the given key and validity datetime.
|
||||
|
||||
If no cache file is found for the provided key and datetime, the method returns None.
|
||||
The retrieved file is a file-like object that can be read from or written to.
|
||||
|
||||
Args:
|
||||
key (str): The key to retrieve the cache file for.
|
||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||
provided.
|
||||
at_datetime (Union[datetime, date, str, int, float, None], optional): The datetime the
|
||||
cache file shall be valid at. Time of day is set to maximum time (23:59:59) if not
|
||||
provided. Defaults to the current datetime if None is provided.
|
||||
before_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
||||
to compare the cache files datetime to be before.
|
||||
|
||||
Returns:
|
||||
file_obj: The file-like cache object, or None if no file is found.
|
||||
|
||||
Example:
|
||||
>>> cache_file = cache_store.get('example_file')
|
||||
>>> if cache_file:
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: Cached data (if exists)
|
||||
"""
|
||||
if until_datetime or until_date:
|
||||
until_datetime = self._until_datetime_by_options(
|
||||
until_datetime=until_datetime, until_date=until_date
|
||||
)
|
||||
elif at_datetime:
|
||||
at_datetime = to_datetime(at_datetime)
|
||||
elif before_datetime:
|
||||
before_datetime = to_datetime(before_datetime)
|
||||
else:
|
||||
at_datetime = to_datetime(datetime.now())
|
||||
|
||||
with self._store_lock: # Synchronize access to _store
|
||||
search_item = self._search(key, until_datetime, at_datetime, before_datetime)
|
||||
if search_item is None:
|
||||
return None
|
||||
return search_item[1]
|
||||
|
||||
def delete(
|
||||
self,
|
||||
key,
|
||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
before_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
):
|
||||
"""Deletes the cache file associated with the given key and datetime.
|
||||
|
||||
This method removes the cache file from the store.
|
||||
|
||||
Args:
|
||||
key (str): The key of the cache file to delete.
|
||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||
provided.
|
||||
before_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
||||
the cache file shall become or be invalid at. Time of day is set to maximum time
|
||||
(23:59:59) if not provided. Defaults to tommorow start of day.
|
||||
"""
|
||||
if until_datetime or until_date:
|
||||
until_datetime = self._until_datetime_by_options(
|
||||
until_datetime=until_datetime, until_date=until_date
|
||||
)
|
||||
elif before_datetime:
|
||||
before_datetime = to_datetime(before_datetime)
|
||||
else:
|
||||
today = datetime.now().date() # Get today's date
|
||||
tomorrow = today + timedelta(days=1) # Add one day to get tomorrow's date
|
||||
before_datetime = to_datetime(datetime.combine(tomorrow, time.min))
|
||||
|
||||
with self._store_lock: # Synchronize access to _store
|
||||
search_item = self._search(key, until_datetime, None, before_datetime)
|
||||
if search_item:
|
||||
cache_file_key = search_item[0]
|
||||
cache_file = search_item[1]
|
||||
cache_file_datetime = search_item[2]
|
||||
file_path = self._get_file_path(cache_file)
|
||||
if file_path is None:
|
||||
logger.warning(
|
||||
f"The cache file with key '{cache_file_key}' is an in memory "
|
||||
f"file object. Will only delete store entry but not file."
|
||||
)
|
||||
self._store.pop(cache_file_key)
|
||||
return
|
||||
file_path = cache_file.name # Get the file path from the cache file object
|
||||
del self._store[cache_file_key]
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
logger.debug(f"Deleted cache file: {file_path}")
|
||||
except OSError as e:
|
||||
logger.error(f"Error deleting cache file {file_path}: {e}")
|
||||
|
||||
def clear(
|
||||
self, clear_all=False, before_datetime: Union[datetime, date, str, int, float, None] = None
|
||||
):
|
||||
"""Deletes all cache files or those expiring before `before_datetime`.
|
||||
|
||||
Args:
|
||||
clear_all (bool, optional): Delete all cache files. Default is False.
|
||||
before_datetime (Union[datetime, date, str, int, float, None], optional): The
|
||||
threshold date. Cache files that are only valid before this date will be deleted.
|
||||
The default datetime is beginning of today.
|
||||
|
||||
Raises:
|
||||
OSError: If there's an error during file deletion.
|
||||
"""
|
||||
delete_keys = [] # List of keys to delete, prevent deleting when traversing the store
|
||||
clear_timestamp = None
|
||||
|
||||
with self._store_lock: # Synchronize access to _store
|
||||
for cache_file_key, cache_item in self._store.items():
|
||||
cache_file = cache_item[0]
|
||||
|
||||
# Some weired logic to prevent calling to_datetime on clear_all.
|
||||
# Clear_all may be set on __del__. At this time some info for to_datetime will
|
||||
# not be available anymore.
|
||||
clear_file = clear_all
|
||||
if not clear_all:
|
||||
if clear_timestamp is None:
|
||||
before_datetime = to_datetime(before_datetime, to_maxtime=False)
|
||||
# Convert the threshold date to a timestamp (seconds since epoch)
|
||||
clear_timestamp = to_datetime(before_datetime).timestamp()
|
||||
cache_file_timestamp = to_datetime(cache_item[1]).timestamp()
|
||||
if cache_file_timestamp < clear_timestamp:
|
||||
clear_file = True
|
||||
|
||||
if clear_file:
|
||||
# We have to clear this cache file
|
||||
delete_keys.append(cache_file_key)
|
||||
|
||||
file_path = self._get_file_path(cache_file)
|
||||
|
||||
if file_path is None:
|
||||
# In memory file like object
|
||||
logger.warning(
|
||||
f"The cache file with key '{cache_file_key}' is an in memory "
|
||||
f"file object. Will only delete store entry but not file."
|
||||
)
|
||||
continue
|
||||
|
||||
if not os.path.exists(file_path):
|
||||
# Already deleted
|
||||
logger.warning(f"The cache file '{file_path}' was already deleted.")
|
||||
continue
|
||||
|
||||
# Finally remove the file
|
||||
try:
|
||||
os.remove(file_path)
|
||||
logger.debug(f"Deleted cache file: {file_path}")
|
||||
except OSError as e:
|
||||
logger.error(f"Error deleting cache file {file_path}: {e}")
|
||||
|
||||
for delete_key in delete_keys:
|
||||
del self._store[delete_key]
|
||||
|
||||
|
||||
def cache_in_file(
|
||||
ignore_params: List[str] = [],
|
||||
until_date: Union[datetime, date, str, int, float, None] = None,
|
||||
until_datetime: Union[datetime, date, str, int, float, None] = None,
|
||||
with_ttl: Union[timedelta, str, int, float, None] = None,
|
||||
mode: str = "wb+",
|
||||
delete: bool = False,
|
||||
suffix: Optional[str] = None,
|
||||
):
|
||||
"""Decorator to cache the output of a function into a temporary file.
|
||||
|
||||
The decorator caches function output to a cache file based on its inputs as key to identify the
|
||||
cache file. Ignore parameters are used to avoid key generation on non-deterministic inputs, such
|
||||
as time values. We can also ignore parameters that are slow to serialize/constant across runs,
|
||||
such as large objects.
|
||||
|
||||
The cache file is created using `CacheFileStore` and stored with the generated key.
|
||||
If the file exists in the cache and has not expired, it is returned instead of recomputing the
|
||||
result.
|
||||
|
||||
The decorator scans the arguments of the decorated function for a 'until_date' or
|
||||
'until_datetime` or `with_ttl` or `force_update` parameter. The value of this parameter will be
|
||||
used instead of the one given in the decorator if available.
|
||||
|
||||
Content of cache files without a suffix are transparently pickled to save file space.
|
||||
|
||||
Args:
|
||||
ignore_params (List[str], optional):
|
||||
until_date (Union[datetime, date, str, int, float, None], optional): The date
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59).
|
||||
until_datetime (Union[datetime, date, str, int, float, None], optional): The datetime
|
||||
until the cache file is valid. Time of day is set to maximum time (23:59:59) if not
|
||||
provided.
|
||||
with_ttl (Union[timedelta, str, int, float, None], optional): The time to live that
|
||||
the cache file is valid. Time starts now.
|
||||
mode (str, optional): The mode in which the file will be opened. Defaults to 'wb+'.
|
||||
delete (bool, optional): Whether the cache file will be deleted after being closed.
|
||||
Defaults to False.
|
||||
suffix (str, optional): A suffix for the cache file, such as an extension (e.g., '.txt').
|
||||
Defaults to None.
|
||||
|
||||
Returns:
|
||||
callable: A decorated function that caches its result in a file.
|
||||
|
||||
Example:
|
||||
>>> @cache_in_file(suffix = '.txt')
|
||||
>>> def expensive_computation(until_date = None):
|
||||
>>> # Perform some expensive computation
|
||||
>>> return 'Some large result'
|
||||
>>>
|
||||
>>> result = expensive_computation(until_date = date.today())
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
|
||||
func_source_code = inspect.getsource(func)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
nonlocal ignore_params, until_date, until_datetime, with_ttl, mode, delete, suffix
|
||||
# Convert args to a dictionary based on the function's signature
|
||||
args_names = func.__code__.co_varnames[: func.__code__.co_argcount]
|
||||
args_dict = dict(zip(args_names, args))
|
||||
|
||||
# Search for caching parameters of function and remove
|
||||
force_update = None
|
||||
for param in ["force_update", "until_datetime", "with_ttl", "until_date"]:
|
||||
if param in kwargs:
|
||||
if param == "force_update":
|
||||
force_update = kwargs[param]
|
||||
kwargs.pop("force_update")
|
||||
|
||||
if param == "until_datetime":
|
||||
until_datetime = kwargs[param]
|
||||
until_date = None
|
||||
with_ttl = None
|
||||
elif param == "with_ttl":
|
||||
until_datetime = None
|
||||
until_date = None
|
||||
with_ttl = kwargs[param]
|
||||
elif param == "until_date":
|
||||
until_datetime = None
|
||||
until_date = kwargs[param]
|
||||
with_ttl = None
|
||||
kwargs.pop("until_datetime", None)
|
||||
kwargs.pop("until_date", None)
|
||||
kwargs.pop("with_ttl", None)
|
||||
break
|
||||
|
||||
# Remove ignored params
|
||||
kwargs_clone = kwargs.copy()
|
||||
for param in ignore_params:
|
||||
args_dict.pop(param, None)
|
||||
kwargs_clone.pop(param, None)
|
||||
|
||||
# Create key based on argument names, argument values, and function source code
|
||||
key = str(args_dict) + str(kwargs_clone) + str(func_source_code)
|
||||
|
||||
result = None
|
||||
# Get cache file that is currently valid
|
||||
cache_file = CacheFileStore().get(key)
|
||||
if not force_update and cache_file is not None:
|
||||
# cache file is available
|
||||
try:
|
||||
logger.debug("Used cache file for function: " + func.__name__)
|
||||
cache_file.seek(0)
|
||||
if "b" in mode:
|
||||
result = pickle.load(cache_file)
|
||||
else:
|
||||
result = cache_file.read()
|
||||
except Exception as e:
|
||||
logger.info(f"Read failed: {e}")
|
||||
# Fail gracefully - force creation
|
||||
force_update = True
|
||||
if force_update or cache_file is None:
|
||||
# Otherwise, call the function and save its result to the cache
|
||||
logger.debug("Created cache file for function: " + func.__name__)
|
||||
cache_file = CacheFileStore().create(
|
||||
key,
|
||||
mode=mode,
|
||||
delete=delete,
|
||||
suffix=suffix,
|
||||
until_datetime=until_datetime,
|
||||
until_date=until_date,
|
||||
with_ttl=with_ttl,
|
||||
)
|
||||
result = func(*args, **kwargs)
|
||||
try:
|
||||
# Assure we have an empty file
|
||||
cache_file.truncate(0)
|
||||
if "b" in mode:
|
||||
pickle.dump(result, cache_file)
|
||||
else:
|
||||
cache_file.write(result)
|
||||
except Exception as e:
|
||||
logger.info(f"Write failed: {e}")
|
||||
CacheFileStore().delete(key)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
@ -1,25 +1,144 @@
|
||||
import hashlib
|
||||
"""PV Power Forecasting Module.
|
||||
|
||||
This module contains classes and methods to retrieve, process, and display photovoltaic (PV)
|
||||
power forecast data, including temperature, windspeed, DC power, and AC power forecasts.
|
||||
The module supports caching of forecast data to reduce redundant network requests and includes
|
||||
functions to update AC power measurements and retrieve forecasts within a specified date range.
|
||||
|
||||
Classes
|
||||
ForecastData: Represents a single forecast entry, including DC power, AC power,
|
||||
temperature, and windspeed.
|
||||
PVForecast: Retrieves, processes, and stores PV power forecast data, either from
|
||||
a file or URL, with optional caching. It also provides methods to query
|
||||
and update the forecast data, convert it to a DataFrame, and output key
|
||||
metrics like AC power.
|
||||
|
||||
Example:
|
||||
# Initialize PVForecast class with an URL
|
||||
forecast = PVForecast(
|
||||
prediction_hours=24,
|
||||
url="https://api.akkudoktor.net/forecast?lat=50.8588&lon=7.3747..."
|
||||
)
|
||||
|
||||
# Update the AC power measurement for a specific date and time
|
||||
forecast.update_ac_power_measurement(date_time=datetime.now(), ac_power_measurement=1000)
|
||||
|
||||
# Print the forecast data with DC and AC power details
|
||||
forecast.print_ac_power_and_measurement()
|
||||
|
||||
# Get the forecast data as a Pandas DataFrame
|
||||
df = forecast.get_forecast_dataframe()
|
||||
print(df)
|
||||
|
||||
Attributes:
|
||||
prediction_hours (int): Number of forecast hours. Defaults to 48.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pprint import pprint
|
||||
from datetime import date, datetime
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import requests
|
||||
from dateutil import parser
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from akkudoktoreos.cachefilestore import cache_in_file
|
||||
from akkudoktoreos.datetimeutil import to_datetime
|
||||
from akkudoktoreos.logutil import get_logger
|
||||
|
||||
logger = get_logger(__name__, logging_level="DEBUG")
|
||||
|
||||
|
||||
class AkkudoktorForecastHorizon(BaseModel):
|
||||
altitude: int
|
||||
azimuthFrom: int
|
||||
azimuthTo: int
|
||||
|
||||
|
||||
class AkkudoktorForecastMeta(BaseModel):
|
||||
lat: float
|
||||
lon: float
|
||||
power: List[int]
|
||||
azimuth: List[int]
|
||||
tilt: List[int]
|
||||
timezone: str
|
||||
albedo: float
|
||||
past_days: int
|
||||
inverterEfficiency: float
|
||||
powerInverter: List[int]
|
||||
cellCoEff: float
|
||||
range: bool
|
||||
horizont: List[List[AkkudoktorForecastHorizon]]
|
||||
horizontString: List[str]
|
||||
|
||||
|
||||
class AkkudoktorForecastValue(BaseModel):
|
||||
datetime: str
|
||||
dcPower: float
|
||||
power: float
|
||||
sunTilt: float
|
||||
sunAzimuth: float
|
||||
temperature: float
|
||||
relativehumidity_2m: float
|
||||
windspeed_10m: float
|
||||
|
||||
|
||||
class AkkudoktorForecast(BaseModel):
|
||||
meta: AkkudoktorForecastMeta
|
||||
values: List[List[AkkudoktorForecastValue]]
|
||||
|
||||
|
||||
def validate_pv_forecast_data(data) -> str:
|
||||
"""Validate PV forecast data."""
|
||||
data_type = None
|
||||
error_msg = ""
|
||||
|
||||
try:
|
||||
AkkudoktorForecast.model_validate(data)
|
||||
data_type = "Akkudoktor"
|
||||
except ValidationError as e:
|
||||
for error in e.errors():
|
||||
field = " -> ".join(str(x) for x in error["loc"])
|
||||
message = error["msg"]
|
||||
error_type = error["type"]
|
||||
error_msg += f"Field: {field}\nError: {message}\nType: {error_type}\n"
|
||||
logger.debug(f"Validation did not succeed: {error_msg}")
|
||||
|
||||
return data_type
|
||||
|
||||
|
||||
class ForecastData:
|
||||
"""Stores forecast data for PV power and weather parameters.
|
||||
|
||||
Attributes:
|
||||
date_time (datetime): The date and time of the forecast.
|
||||
dc_power (float): The direct current (DC) power in watts.
|
||||
ac_power (float): The alternating current (AC) power in watts.
|
||||
windspeed_10m (float, optional): Wind speed at 10 meters altitude.
|
||||
temperature (float, optional): Temperature in degrees Celsius.
|
||||
ac_power_measurement (float, optional): Measured AC power.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
date_time,
|
||||
dc_power,
|
||||
ac_power,
|
||||
windspeed_10m=None,
|
||||
temperature=None,
|
||||
ac_power_measurement=None,
|
||||
date_time: datetime,
|
||||
dc_power: float,
|
||||
ac_power: float,
|
||||
windspeed_10m: Optional[float] = None,
|
||||
temperature: Optional[float] = None,
|
||||
ac_power_measurement: Optional[float] = None,
|
||||
):
|
||||
"""Initializes the ForecastData instance.
|
||||
|
||||
Args:
|
||||
date_time (datetime): The date and time of the forecast.
|
||||
dc_power (float): The DC power in watts.
|
||||
ac_power (float): The AC power in watts.
|
||||
windspeed_10m (float, optional): Wind speed at 10 meters altitude. Defaults to None.
|
||||
temperature (float, optional): Temperature in degrees Celsius. Defaults to None.
|
||||
ac_power_measurement (float, optional): Measured AC power. Defaults to None.
|
||||
"""
|
||||
self.date_time = date_time
|
||||
self.dc_power = dc_power
|
||||
self.ac_power = ac_power
|
||||
@ -27,139 +146,387 @@ class ForecastData:
|
||||
self.temperature = temperature
|
||||
self.ac_power_measurement = ac_power_measurement
|
||||
|
||||
def get_date_time(self):
|
||||
def get_date_time(self) -> datetime:
|
||||
"""Returns the forecast date and time.
|
||||
|
||||
Returns:
|
||||
datetime: The date and time of the forecast.
|
||||
"""
|
||||
return self.date_time
|
||||
|
||||
def get_dc_power(self):
|
||||
def get_dc_power(self) -> float:
|
||||
"""Returns the DC power.
|
||||
|
||||
Returns:
|
||||
float: DC power in watts.
|
||||
"""
|
||||
return self.dc_power
|
||||
|
||||
def ac_power_measurement(self):
|
||||
def ac_power_measurement(self) -> float:
|
||||
"""Returns the measured AC power.
|
||||
|
||||
It returns the measured AC power if available; otherwise None.
|
||||
|
||||
Returns:
|
||||
float: Measured AC power in watts or None
|
||||
"""
|
||||
return self.ac_power_measurement
|
||||
|
||||
def get_ac_power(self):
|
||||
def get_ac_power(self) -> float:
|
||||
"""Returns the AC power.
|
||||
|
||||
If a measured value is available, it returns the measured AC power;
|
||||
otherwise, it returns the forecasted AC power.
|
||||
|
||||
Returns:
|
||||
float: AC power in watts.
|
||||
"""
|
||||
if self.ac_power_measurement is not None:
|
||||
return self.ac_power_measurement
|
||||
else:
|
||||
return self.ac_power
|
||||
|
||||
def get_windspeed_10m(self):
|
||||
def get_windspeed_10m(self) -> float:
|
||||
"""Returns the wind speed at 10 meters altitude.
|
||||
|
||||
Returns:
|
||||
float: Wind speed in meters per second.
|
||||
"""
|
||||
return self.windspeed_10m
|
||||
|
||||
def get_temperature(self):
|
||||
def get_temperature(self) -> float:
|
||||
"""Returns the temperature.
|
||||
|
||||
Returns:
|
||||
float: Temperature in degrees Celsius.
|
||||
"""
|
||||
return self.temperature
|
||||
|
||||
|
||||
class PVForecast:
|
||||
def __init__(self, filepath=None, url=None, cache_dir="cache", prediction_hours=48):
|
||||
"""Manages PV (photovoltaic) power forecasts and weather data.
|
||||
|
||||
Forecast data can be loaded from different sources (in-memory data, file, or URL).
|
||||
|
||||
Attributes:
|
||||
meta (dict): Metadata related to the forecast (e.g., source, location).
|
||||
forecast_data (list): A list of forecast data points of `ForecastData` objects.
|
||||
prediction_hours (int): The number of hours into the future the forecast covers.
|
||||
current_measurement (Optional[float]): The current AC power measurement in watts (or None if unavailable).
|
||||
data (Optional[dict]): JSON data containing the forecast information (if provided).
|
||||
filepath (Optional[str]): Filepath to the forecast data file (if provided).
|
||||
url (Optional[str]): URL to retrieve forecast data from an API (if provided).
|
||||
_forecast_start (Optional[date]): Start datetime for the forecast period.
|
||||
tz_name (Optional[str]): The time zone name of the forecast data, if applicable.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: Optional[dict] = None,
|
||||
filepath: Optional[str] = None,
|
||||
url: Optional[str] = None,
|
||||
forecast_start: Union[datetime, date, str, int, float] = None,
|
||||
prediction_hours: Optional[int] = None,
|
||||
):
|
||||
"""Initializes a `PVForecast` instance.
|
||||
|
||||
Forecast data can be loaded from in-memory `data`, a file specified by `filepath`, or
|
||||
fetched from a remote `url`. If none are provided, an empty forecast will be initialized.
|
||||
The `forecast_start` and `prediction_hours` parameters can be specified to control the
|
||||
forecasting time period.
|
||||
|
||||
Use `process_data()` to fill an empty forecast later on.
|
||||
|
||||
Args:
|
||||
data (Optional[dict]): In-memory JSON data containing forecast information. Defaults to None.
|
||||
filepath (Optional[str]): Path to a local file containing forecast data in JSON format. Defaults to None.
|
||||
url (Optional[str]): URL to an API providing forecast data. Defaults to None.
|
||||
forecast_start (Union[datetime, date, str, int, float]): The start datetime for the forecast period.
|
||||
Can be a `datetime`, `date`, `str` (formatted date), `int` (timestamp), `float`, or None. Defaults to None.
|
||||
prediction_hours (Optional[int]): The number of hours to forecast into the future. Defaults to 48 hours.
|
||||
|
||||
Example:
|
||||
forecast = PVForecast(data=my_forecast_data, forecast_start="2024-10-13", prediction_hours=72)
|
||||
"""
|
||||
self.meta = {}
|
||||
self.forecast_data = []
|
||||
self.cache_dir = cache_dir
|
||||
self.prediction_hours = prediction_hours
|
||||
self.current_measurement = None
|
||||
self.data = data
|
||||
self.filepath = filepath
|
||||
self.url = url
|
||||
if forecast_start:
|
||||
self._forecast_start = to_datetime(forecast_start, to_naiv=True, to_maxtime=False)
|
||||
else:
|
||||
self._forecast_start = None
|
||||
self.prediction_hours = prediction_hours
|
||||
self._tz_name = None
|
||||
|
||||
if not os.path.exists(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
if filepath:
|
||||
self.load_data_from_file(filepath)
|
||||
elif url:
|
||||
self.load_data_with_caching(url)
|
||||
|
||||
if len(self.forecast_data) < self.prediction_hours:
|
||||
raise ValueError(
|
||||
f"Die Vorhersage muss mindestens {self.prediction_hours} Stunden umfassen, aber es wurden nur {len(self.forecast_data)} Stunden vorhergesagt."
|
||||
if self.data or self.filepath or self.url:
|
||||
self.process_data(
|
||||
data=self.data,
|
||||
filepath=self.filepath,
|
||||
url=self.url,
|
||||
forecast_start=self._forecast_start,
|
||||
prediction_hours=self.prediction_hours,
|
||||
)
|
||||
|
||||
def update_ac_power_measurement(self, date_time=None, ac_power_measurement=None) -> bool:
|
||||
def update_ac_power_measurement(
|
||||
self,
|
||||
date_time: Union[datetime, date, str, int, float, None] = None,
|
||||
ac_power_measurement=None,
|
||||
) -> bool:
|
||||
"""Updates the AC power measurement for a specific time.
|
||||
|
||||
Args:
|
||||
date_time (datetime): The date and time of the measurement.
|
||||
ac_power_measurement (float): Measured AC power.
|
||||
|
||||
Returns:
|
||||
bool: True if a matching timestamp was found, False otherwise.
|
||||
"""
|
||||
found = False
|
||||
input_date_hour = date_time.replace(minute=0, second=0, microsecond=0)
|
||||
input_date_hour = to_datetime(
|
||||
date_time, to_timezone=self._tz_name, to_naiv=True, to_maxtime=False
|
||||
).replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
for forecast in self.forecast_data:
|
||||
forecast_date_hour = parser.parse(forecast.date_time).replace(
|
||||
forecast_date_hour = to_datetime(forecast.date_time, to_naiv=True).replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
)
|
||||
if forecast_date_hour == input_date_hour:
|
||||
forecast.ac_power_measurement = ac_power_measurement
|
||||
found = True
|
||||
logger.debug(
|
||||
f"AC Power measurement updated at date {input_date_hour}: {ac_power_measurement}"
|
||||
)
|
||||
break
|
||||
return found
|
||||
|
||||
def process_data(self, data):
|
||||
self.meta = data.get("meta", {})
|
||||
all_values = data.get("values", [])
|
||||
def process_data(
|
||||
self,
|
||||
data: Optional[dict] = None,
|
||||
filepath: Optional[str] = None,
|
||||
url: Optional[str] = None,
|
||||
forecast_start: Union[datetime, date, str, int, float] = None,
|
||||
prediction_hours: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Processes the forecast data from the provided source (in-memory `data`, `filepath`, or `url`).
|
||||
|
||||
for i in range(len(all_values[0])): # Annahme, dass alle Listen gleich lang sind
|
||||
sum_dc_power = sum(values[i]["dcPower"] for values in all_values)
|
||||
sum_ac_power = sum(values[i]["power"] for values in all_values)
|
||||
If `forecast_start` and `prediction_hours` are provided, they define the forecast period.
|
||||
|
||||
# Zeige die ursprünglichen und berechneten Zeitstempel an
|
||||
original_datetime = all_values[0][i].get("datetime")
|
||||
# print(original_datetime," ",sum_dc_power," ",all_values[0][i]['dcPower'])
|
||||
dt = datetime.strptime(original_datetime, "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||
dt = dt.replace(tzinfo=None)
|
||||
# iso_datetime = parser.parse(original_datetime).isoformat() # Konvertiere zu ISO-Format
|
||||
# print()
|
||||
# Optional: 2 Stunden abziehen, um die Zeitanpassung zu testen
|
||||
# adjusted_datetime = parser.parse(original_datetime) - timedelta(hours=2)
|
||||
# print(f"Angepasste Zeitstempel: {adjusted_datetime.isoformat()}")
|
||||
Args:
|
||||
data (Optional[dict]): JSON data containing forecast values. Defaults to None.
|
||||
filepath (Optional[str]): Path to a file with forecast data. Defaults to None.
|
||||
url (Optional[str]): API URL to retrieve forecast data from. Defaults to None.
|
||||
forecast_start (Union[datetime, date, str, int, float, None]): Start datetime of the forecast
|
||||
period. Defaults to None. If given before it is cached.
|
||||
prediction_hours (Optional[int]): The number of hours to forecast into the future.
|
||||
Defaults to None. If given before it is cached.
|
||||
|
||||
forecast = ForecastData(
|
||||
date_time=dt, # Verwende angepassten Zeitstempel
|
||||
dc_power=sum_dc_power,
|
||||
ac_power=sum_ac_power,
|
||||
windspeed_10m=all_values[0][i].get("windspeed_10m"),
|
||||
temperature=all_values[0][i].get("temperature"),
|
||||
Returns:
|
||||
None
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the specified `filepath` does not exist.
|
||||
ValueError: If no valid data source or data is provided.
|
||||
|
||||
Example:
|
||||
forecast = PVForecast(
|
||||
url="https://api.akkudoktor.net/forecast?lat=50.8588&lon=7.3747&"
|
||||
"power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&"
|
||||
"power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&"
|
||||
"power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&"
|
||||
"power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&"
|
||||
"past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&"
|
||||
"timezone=Europe%2FBerlin&hourly=relativehumidity_2m%2Cwindspeed_10m",
|
||||
prediction_hours = 24,
|
||||
)
|
||||
"""
|
||||
# Get input forecast data
|
||||
if data:
|
||||
pass
|
||||
elif filepath:
|
||||
data = self.load_data_from_file(filepath)
|
||||
elif url:
|
||||
data = self.load_data_from_url_with_caching(url)
|
||||
elif self.data or self.filepath or self.url:
|
||||
# Re-process according to previous arguments
|
||||
if self.data:
|
||||
data = self.data
|
||||
elif self.filepath:
|
||||
data = self.load_data_from_file(self.filepath)
|
||||
elif self.url:
|
||||
data = self.load_data_from_url_with_caching(self.url)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Re-processing for None input is not implemented!"
|
||||
) # Invalid path
|
||||
else:
|
||||
raise ValueError("No prediction input data available.")
|
||||
# Validate input data to be of a known format
|
||||
data_format = validate_pv_forecast_data(data)
|
||||
if data_format != "Akkudoktor":
|
||||
raise ValueError(f"Prediction input data are of unknown format: '{data_format}'.")
|
||||
|
||||
# Assure we have a forecast start datetime
|
||||
if forecast_start is None:
|
||||
forecast_start = self._forecast_start
|
||||
if forecast_start is None:
|
||||
forecast_start = datetime(1970, 1, 1)
|
||||
|
||||
# Assure we have prediction hours set
|
||||
if prediction_hours is None:
|
||||
prediction_hours = self.prediction_hours
|
||||
if prediction_hours is None:
|
||||
prediction_hours = 48
|
||||
self.prediction_hours = prediction_hours
|
||||
|
||||
if data_format == "Akkudoktor":
|
||||
# --------------------------------------------
|
||||
# From here Akkudoktor PV forecast data format
|
||||
# ---------------------------------------------
|
||||
self.meta = data.get("meta")
|
||||
all_values = data.get("values")
|
||||
|
||||
# timezone of the PV system
|
||||
self._tz_name = self.meta.get("timezone", None)
|
||||
if not self._tz_name:
|
||||
raise NotImplementedError(
|
||||
"Processing without PV system timezone info ist not implemented!"
|
||||
)
|
||||
|
||||
# Assumption that all lists are the same length and are ordered chronologically
|
||||
# in ascending order and have the same timestamps.
|
||||
values_len = len(all_values[0])
|
||||
if values_len < self.prediction_hours:
|
||||
# Expect one value set per prediction hour
|
||||
raise ValueError(
|
||||
f"The forecast must cover at least {self.prediction_hours} hours, "
|
||||
f"but only {values_len} data sets are given in forecast data."
|
||||
)
|
||||
|
||||
# Convert forecast_start to timezone of PV system and make it a naiv datetime
|
||||
self._forecast_start = to_datetime(
|
||||
forecast_start, to_timezone=self._tz_name, to_naiv=True
|
||||
)
|
||||
logger.debug(f"Forecast start set to {self._forecast_start}")
|
||||
|
||||
for i in range(values_len):
|
||||
# Zeige die ursprünglichen und berechneten Zeitstempel an
|
||||
original_datetime = all_values[0][i].get("datetime")
|
||||
# print(original_datetime," ",sum_dc_power," ",all_values[0][i]['dcPower'])
|
||||
dt = to_datetime(original_datetime, to_timezone=self._tz_name, to_naiv=True)
|
||||
# iso_datetime = parser.parse(original_datetime).isoformat() # Konvertiere zu ISO-Format
|
||||
# print()
|
||||
# Optional: 2 Stunden abziehen, um die Zeitanpassung zu testen
|
||||
# adjusted_datetime = parser.parse(original_datetime) - timedelta(hours=2)
|
||||
# print(f"Angepasste Zeitstempel: {adjusted_datetime.isoformat()}")
|
||||
|
||||
if dt < self._forecast_start:
|
||||
# forecast data are too old
|
||||
continue
|
||||
|
||||
sum_dc_power = sum(values[i]["dcPower"] for values in all_values)
|
||||
sum_ac_power = sum(values[i]["power"] for values in all_values)
|
||||
|
||||
forecast = ForecastData(
|
||||
date_time=dt, # Verwende angepassten Zeitstempel
|
||||
dc_power=sum_dc_power,
|
||||
ac_power=sum_ac_power,
|
||||
windspeed_10m=all_values[0][i].get("windspeed_10m"),
|
||||
temperature=all_values[0][i].get("temperature"),
|
||||
)
|
||||
self.forecast_data.append(forecast)
|
||||
|
||||
if len(self.forecast_data) < self.prediction_hours:
|
||||
raise ValueError(
|
||||
f"The forecast must cover at least {self.prediction_hours} hours, "
|
||||
f"but only {len(self.forecast_data)} hours starting from {forecast_start} "
|
||||
f"were predicted."
|
||||
)
|
||||
|
||||
self.forecast_data.append(forecast)
|
||||
# Adapt forecast start to actual value
|
||||
self._forecast_start = self.forecast_data[0].get_date_time()
|
||||
logger.debug(f"Forecast start adapted to {self._forecast_start}")
|
||||
|
||||
def load_data_from_file(self, filepath):
|
||||
def load_data_from_file(self, filepath: str) -> dict:
|
||||
"""Loads forecast data from a file.
|
||||
|
||||
Args:
|
||||
filepath (str): Path to the file containing the forecast data.
|
||||
|
||||
Returns:
|
||||
data (dict): JSON data containing forecast values.
|
||||
"""
|
||||
with open(filepath, "r") as file:
|
||||
data = json.load(file)
|
||||
self.process_data(data)
|
||||
return data
|
||||
|
||||
def load_data_from_url(self, url):
|
||||
def load_data_from_url(self, url: str) -> dict:
|
||||
"""Loads forecast data from a URL.
|
||||
|
||||
Example:
|
||||
https://api.akkudoktor.net/forecast?lat=50.8588&lon=7.3747&power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&hourly=relativehumidity_2m%2Cwindspeed_10m
|
||||
|
||||
Args:
|
||||
url (str): URL of the API providing forecast data.
|
||||
|
||||
Returns:
|
||||
data (dict): JSON data containing forecast values.
|
||||
"""
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
pprint(data)
|
||||
self.process_data(data)
|
||||
else:
|
||||
print(f"Failed to load data from {url}. Status Code: {response.status_code}")
|
||||
self.load_data_from_url(url)
|
||||
data = f"Failed to load data from `{url}`. Status Code: {response.status_code}"
|
||||
logger.error(data)
|
||||
return data
|
||||
|
||||
def load_data_with_caching(self, url):
|
||||
date = datetime.now().strftime("%Y-%m-%d")
|
||||
@cache_in_file() # use binary mode by default as we have python objects not text
|
||||
def load_data_from_url_with_caching(self, url: str, until_date=None) -> dict:
|
||||
"""Loads data from a URL or from the cache if available.
|
||||
|
||||
cache_file = os.path.join(self.cache_dir, self.generate_cache_filename(url, date))
|
||||
if os.path.exists(cache_file):
|
||||
with open(cache_file, "r") as file:
|
||||
data = json.load(file)
|
||||
print("Loading data from cache.")
|
||||
Args:
|
||||
url (str): URL of the API providing forecast data.
|
||||
|
||||
Returns:
|
||||
data (dict): JSON data containing forecast values.
|
||||
"""
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
logger.debug(f"Data fetched from URL `{url} and cached.")
|
||||
else:
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
with open(cache_file, "w") as file:
|
||||
json.dump(data, file)
|
||||
print("Data fetched from URL and cached.")
|
||||
else:
|
||||
print(f"Failed to load data from {url}. Status Code: {response.status_code}")
|
||||
return
|
||||
self.process_data(data)
|
||||
|
||||
def generate_cache_filename(self, url, date):
|
||||
cache_key = hashlib.sha256(f"{url}{date}".encode("utf-8")).hexdigest()
|
||||
return f"cache_{cache_key}.json"
|
||||
data = f"Failed to load data from `{url}`. Status Code: {response.status_code}"
|
||||
logger.error(data)
|
||||
return data
|
||||
|
||||
def get_forecast_data(self):
|
||||
"""Returns the forecast data.
|
||||
|
||||
Returns:
|
||||
list: List of ForecastData objects.
|
||||
"""
|
||||
return self.forecast_data
|
||||
|
||||
def get_temperature_forecast_for_date(self, input_date_str):
|
||||
input_date = datetime.strptime(input_date_str, "%Y-%m-%d")
|
||||
def get_temperature_forecast_for_date(
|
||||
self, input_date: Union[datetime, date, str, int, float, None]
|
||||
):
|
||||
"""Returns the temperature forecast for a specific date.
|
||||
|
||||
Args:
|
||||
input_date (str): Date
|
||||
|
||||
Returns:
|
||||
np.array: Array of temperature forecasts.
|
||||
"""
|
||||
if not self._tz_name:
|
||||
raise NotImplementedError(
|
||||
"Processing without PV system timezone info ist not implemented!"
|
||||
)
|
||||
input_date = to_datetime(input_date, to_timezone=self._tz_name, to_naiv=True).date()
|
||||
daily_forecast_obj = [
|
||||
data
|
||||
for data in self.forecast_data
|
||||
if parser.parse(data.get_date_time()).date() == input_date.date()
|
||||
data for data in self.forecast_data if data.get_date_time().date() == input_date
|
||||
]
|
||||
daily_forecast = []
|
||||
for d in daily_forecast_obj:
|
||||
@ -167,24 +534,58 @@ class PVForecast:
|
||||
|
||||
return np.array(daily_forecast)
|
||||
|
||||
def get_pv_forecast_for_date_range(self, start_date_str, end_date_str):
|
||||
start_date = datetime.strptime(start_date_str, "%Y-%m-%d").date()
|
||||
end_date = datetime.strptime(end_date_str, "%Y-%m-%d").date()
|
||||
def get_pv_forecast_for_date_range(
|
||||
self,
|
||||
start_date: Union[datetime, date, str, int, float, None],
|
||||
end_date: Union[datetime, date, str, int, float, None],
|
||||
):
|
||||
"""Returns the PV forecast for a date range.
|
||||
|
||||
Args:
|
||||
start_date_str (str): Start date in the format YYYY-MM-DD.
|
||||
end_date_str (str): End date in the format YYYY-MM-DD.
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: DataFrame containing the forecast data.
|
||||
"""
|
||||
if not self._tz_name:
|
||||
raise NotImplementedError(
|
||||
"Processing without PV system timezone info ist not implemented!"
|
||||
)
|
||||
start_date = to_datetime(start_date, to_timezone=self._tz_name, to_naiv=True).date()
|
||||
end_date = to_datetime(end_date, to_timezone=self._tz_name, to_naiv=True).date()
|
||||
date_range_forecast = []
|
||||
|
||||
for data in self.forecast_data:
|
||||
data_date = data.get_date_time().date() # parser.parse(data.get_date_time()).date()
|
||||
data_date = data.get_date_time().date()
|
||||
if start_date <= data_date <= end_date:
|
||||
date_range_forecast.append(data)
|
||||
print(data.get_date_time(), " ", data.get_ac_power())
|
||||
# print(data.get_date_time(), " ", data.get_ac_power())
|
||||
|
||||
ac_power_forecast = np.array([data.get_ac_power() for data in date_range_forecast])
|
||||
|
||||
return np.array(ac_power_forecast)[: self.prediction_hours]
|
||||
|
||||
def get_temperature_for_date_range(self, start_date_str, end_date_str):
|
||||
start_date = datetime.strptime(start_date_str, "%Y-%m-%d").date()
|
||||
end_date = datetime.strptime(end_date_str, "%Y-%m-%d").date()
|
||||
def get_temperature_for_date_range(
|
||||
self,
|
||||
start_date: Union[datetime, date, str, int, float, None],
|
||||
end_date: Union[datetime, date, str, int, float, None],
|
||||
):
|
||||
"""Returns the temperature forecast for a given date range.
|
||||
|
||||
Args:
|
||||
start_date (datetime | date | str | int | float | None): Start date.
|
||||
end_date (datetime | date | str | int | float | None): End date.
|
||||
|
||||
Returns:
|
||||
np.array: Array containing temperature forecasts for each hour within the date range.
|
||||
"""
|
||||
if not self._tz_name:
|
||||
raise NotImplementedError(
|
||||
"Processing without PV system timezone info ist not implemented!"
|
||||
)
|
||||
start_date = to_datetime(start_date, to_timezone=self._tz_name, to_naiv=True).date()
|
||||
end_date = to_datetime(end_date, to_timezone=self._tz_name, to_naiv=True).date()
|
||||
date_range_forecast = []
|
||||
|
||||
for data in self.forecast_data:
|
||||
@ -196,7 +597,12 @@ class PVForecast:
|
||||
return np.array(temperature_forecast)[: self.prediction_hours]
|
||||
|
||||
def get_forecast_dataframe(self):
|
||||
# Wandelt die Vorhersagedaten in ein Pandas DataFrame um
|
||||
"""Converts the forecast data into a Pandas DataFrame.
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: A DataFrame containing the forecast data with columns for date/time,
|
||||
DC power, AC power, windspeed, and temperature.
|
||||
"""
|
||||
data = [
|
||||
{
|
||||
"date_time": f.get_date_time(),
|
||||
@ -212,20 +618,54 @@ class PVForecast:
|
||||
df = pd.DataFrame(data)
|
||||
return df
|
||||
|
||||
def print_ac_power_and_measurement(self):
|
||||
"""Druckt die DC-Leistung und den Messwert für jede Stunde."""
|
||||
def get_forecast_start(self) -> datetime:
|
||||
"""Return the start of the forecast data in local timezone.
|
||||
|
||||
Returns:
|
||||
forecast_start (datetime | None): The start datetime or None if no data available.
|
||||
"""
|
||||
if not self._forecast_start:
|
||||
return None
|
||||
return to_datetime(
|
||||
self._forecast_start, to_timezone=self._tz_name, to_naiv=True, to_maxtime=False
|
||||
)
|
||||
|
||||
def report_ac_power_and_measurement(self) -> str:
|
||||
"""Report DC/ AC power, and AC power measurement for each forecast hour.
|
||||
|
||||
For each forecast entry, the time, DC power, forecasted AC power, measured AC power
|
||||
(if available), and the value returned by the `get_ac_power` method is provided.
|
||||
|
||||
Returns:
|
||||
str: The report.
|
||||
"""
|
||||
rep = ""
|
||||
for forecast in self.forecast_data:
|
||||
date_time = forecast.date_time
|
||||
print(
|
||||
f"Zeit: {date_time}, DC: {forecast.dc_power}, AC: {forecast.ac_power}, Messwert: {forecast.ac_power_measurement}, AC GET: {forecast.get_ac_power()}"
|
||||
rep += (
|
||||
f"Zeit: {date_time}, DC: {forecast.dc_power}, AC: {forecast.ac_power}, "
|
||||
f"Messwert: {forecast.ac_power_measurement}, AC GET: {forecast.get_ac_power()}"
|
||||
"\n"
|
||||
)
|
||||
return rep
|
||||
|
||||
|
||||
# Beispiel für die Verwendung der Klasse
|
||||
# Example of how to use the PVForecast class
|
||||
if __name__ == "__main__":
|
||||
"""Main execution block to demonstrate the use of the PVForecast class.
|
||||
|
||||
Fetches PV power forecast data from a given URL, updates the AC power measurement
|
||||
for the current date/time, and prints the DC and AC power information.
|
||||
"""
|
||||
forecast = PVForecast(
|
||||
prediction_hours=24,
|
||||
url="https://api.akkudoktor.net/forecast?lat=50.8588&lon=7.3747&power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&hourly=relativehumidity_2m%2Cwindspeed_10m",
|
||||
url="https://api.akkudoktor.net/forecast?lat=50.8588&lon=7.3747&"
|
||||
"power=5000&azimuth=-10&tilt=7&powerInvertor=10000&horizont=20,27,22,20&"
|
||||
"power=4800&azimuth=-90&tilt=7&powerInvertor=10000&horizont=30,30,30,50&"
|
||||
"power=1400&azimuth=-40&tilt=60&powerInvertor=2000&horizont=60,30,0,30&"
|
||||
"power=1600&azimuth=5&tilt=45&powerInvertor=1400&horizont=45,25,30,60&"
|
||||
"past_days=5&cellCoEff=-0.36&inverterEfficiency=0.8&albedo=0.25&timezone=Europe%2FBerlin&"
|
||||
"hourly=relativehumidity_2m%2Cwindspeed_10m",
|
||||
)
|
||||
forecast.update_ac_power_measurement(date_time=datetime.now(), ac_power_measurement=1000)
|
||||
forecast.print_ac_power_and_measurement()
|
||||
print(forecast.report_ac_power_and_measurement())
|
||||
|
285
src/akkudoktoreos/datetimeutil.py
Normal file
285
src/akkudoktoreos/datetimeutil.py
Normal file
@ -0,0 +1,285 @@
|
||||
"""Utility functions for date-time conversion tasks.
|
||||
|
||||
Functions:
|
||||
----------
|
||||
- to_datetime: Converts various date or time inputs to a timezone-aware or naive `datetime`
|
||||
object or formatted string.
|
||||
- to_timedelta: Converts various time delta inputs to a `timedelta`object.
|
||||
- to_timezone: Converts position latitude and longitude to a `timezone` object.
|
||||
|
||||
Example usage:
|
||||
--------------
|
||||
|
||||
# Date-time conversion
|
||||
>>> date_str = "2024-10-15"
|
||||
>>> date_obj = to_datetime(date_str)
|
||||
>>> print(date_obj) # Output: datetime object for '2024-10-15'
|
||||
|
||||
# Time delta conversion
|
||||
>>> to_timedelta("2 days 5 hours")
|
||||
|
||||
# Timezone detection
|
||||
>>> to_timezone(40.7128, -74.0060)
|
||||
"""
|
||||
|
||||
import re
|
||||
from datetime import date, datetime, time, timedelta, timezone
|
||||
from typing import Optional, Union
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from timezonefinder import TimezoneFinder
|
||||
|
||||
|
||||
def to_datetime(
|
||||
date_input: Union[datetime, date, str, int, float, None],
|
||||
as_string: Optional[Union[str, bool]] = None,
|
||||
to_timezone: Optional[Union[timezone, str]] = None,
|
||||
to_naiv: Optional[bool] = None,
|
||||
to_maxtime: Optional[bool] = None,
|
||||
):
|
||||
"""Converts a date input to a datetime object or a formatted string with timezone support.
|
||||
|
||||
Args:
|
||||
date_input (Union[datetime, date, str, int, float, None]): The date input to convert.
|
||||
Accepts a date string, a datetime object, a date object or a Unix timestamp.
|
||||
as_string (Optional[Union[str, bool]]): If as_string is given (a format string or true)
|
||||
return datetime as a string. Otherwise, return a datetime object, which is the default.
|
||||
If true is given the string will returned in ISO format.
|
||||
If a format string is given it may define the special formats "UTC" or "utc"
|
||||
to return a string in ISO format normalized to UTC. Otherwise the format string must be
|
||||
given compliant to Python's `datetime.strptime`.
|
||||
to_timezone (Optional[Union[timezone, str]]):
|
||||
Optional timezone object or name (e.g., 'UTC', 'Europe/Berlin').
|
||||
If provided, the datetime will be converted to this timezone.
|
||||
If not provided, the datetime will be converted to the local timezone.
|
||||
to_naiv (Optional[bool]):
|
||||
If True, remove timezone info from datetime after conversion.
|
||||
If False, keep timezone info after conversion. The default.
|
||||
to_maxtime (Optional[bool]):
|
||||
If True, convert to maximum time if no time is given. The default.
|
||||
If False, convert to minimum time if no time is given.
|
||||
|
||||
Example:
|
||||
to_datetime("2027-12-12 24:13:12", as_string = "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||
|
||||
Returns:
|
||||
datetime or str: Converted date as a datetime object or a formatted string with timezone.
|
||||
|
||||
Raises:
|
||||
ValueError: If the date input is not a valid type or format.
|
||||
"""
|
||||
if isinstance(date_input, datetime):
|
||||
dt_object = date_input
|
||||
elif isinstance(date_input, date):
|
||||
# Convert date object to datetime object
|
||||
if to_maxtime is None or to_maxtime:
|
||||
dt_object = datetime.combine(date_input, time.max)
|
||||
else:
|
||||
dt_object = datetime.combine(date_input, time.max)
|
||||
elif isinstance(date_input, (int, float)):
|
||||
# Convert timestamp to datetime object
|
||||
dt_object = datetime.fromtimestamp(date_input, tz=timezone.utc)
|
||||
elif isinstance(date_input, str):
|
||||
# Convert string to datetime object
|
||||
try:
|
||||
# Try ISO format
|
||||
dt_object = datetime.fromisoformat(date_input)
|
||||
except ValueError as e:
|
||||
formats = [
|
||||
"%Y-%m-%d", # Format: 2024-10-13
|
||||
"%d/%m/%y", # Format: 13/10/24
|
||||
"%d/%m/%Y", # Format: 13/10/2024
|
||||
"%m-%d-%Y", # Format: 10-13-2024
|
||||
"%Y.%m.%d", # Format: 2024.10.13
|
||||
"%d %b %Y", # Format: 13 Oct 2024
|
||||
"%d %B %Y", # Format: 13 October 2024
|
||||
"%Y-%m-%d %H:%M:%S", # Format: 2024-10-13 15:30:00
|
||||
"%Y-%m-%d %H:%M:%S%z", # Format with timezone: 2024-10-13 15:30:00+0000
|
||||
"%Y-%m-%d %H:%M:%S%z:00", # Format with timezone: 2024-10-13 15:30:00+0000
|
||||
"%Y-%m-%dT%H:%M:%S.%f%z", # Format with timezone: 2024-10-13T15:30:00.000+0000
|
||||
]
|
||||
|
||||
for fmt in formats:
|
||||
try:
|
||||
dt_object = datetime.strptime(date_input, fmt)
|
||||
break
|
||||
except ValueError as e:
|
||||
dt_object = None
|
||||
continue
|
||||
if dt_object is None:
|
||||
raise ValueError(f"Date string {date_input} does not match any known formats.")
|
||||
elif date_input is None:
|
||||
if to_maxtime is None or to_maxtime:
|
||||
dt_object = datetime.combine(date.today(), time.max)
|
||||
else:
|
||||
dt_object = datetime.combine(date.today(), time.min)
|
||||
else:
|
||||
raise ValueError(f"Unsupported date input type: {type(date_input)}")
|
||||
|
||||
# Get local timezone
|
||||
local_date = datetime.now().astimezone()
|
||||
local_tz_name = local_date.tzname()
|
||||
local_utc_offset = local_date.utcoffset()
|
||||
local_timezone = timezone(local_utc_offset, local_tz_name)
|
||||
|
||||
# Get target timezone
|
||||
if to_timezone:
|
||||
if isinstance(to_timezone, timezone):
|
||||
target_timezone = to_timezone
|
||||
elif isinstance(to_timezone, str):
|
||||
try:
|
||||
target_timezone = ZoneInfo(to_timezone)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid timezone: {to_timezone}") from e
|
||||
else:
|
||||
raise ValueError(f"Invalid timezone: {to_timezone}")
|
||||
|
||||
# Adjust/Add timezone information
|
||||
if dt_object.tzinfo is None or dt_object.tzinfo.utcoffset(dt_object) is None:
|
||||
# datetime object is naive (not timezone aware)
|
||||
# Add timezone
|
||||
if to_timezone is None:
|
||||
# Add local timezone
|
||||
dt_object = dt_object.replace(tzinfo=local_timezone)
|
||||
else:
|
||||
# Set to target timezone
|
||||
dt_object = dt_object.replace(tzinfo=target_timezone)
|
||||
elif to_timezone:
|
||||
# Localize the datetime object to given target timezone
|
||||
dt_object = dt_object.astimezone(target_timezone)
|
||||
else:
|
||||
# Localize the datetime object to local timezone
|
||||
dt_object = dt_object.astimezone(local_timezone)
|
||||
|
||||
if to_naiv:
|
||||
# Remove timezone info to make the datetime naiv
|
||||
dt_object = dt_object.replace(tzinfo=None)
|
||||
|
||||
if as_string:
|
||||
# Return formatted string as defined by as_string
|
||||
if isinstance(as_string, bool):
|
||||
return dt_object.isoformat()
|
||||
elif as_string == "UTC" or as_string == "utc":
|
||||
dt_object = dt_object.astimezone(timezone.utc)
|
||||
return dt_object.isoformat()
|
||||
else:
|
||||
return dt_object.strftime(as_string)
|
||||
else:
|
||||
return dt_object
|
||||
|
||||
|
||||
def to_timedelta(input_value):
|
||||
"""Converts various input types into a timedelta object.
|
||||
|
||||
Args:
|
||||
input_value (Union[timedelta, str, int, float, tuple, list]): Input to be converted
|
||||
timedelta.
|
||||
- str: A string like "2 days", "5 hours", "30 minutes", or a combination.
|
||||
- int/float: Number representing seconds.
|
||||
- tuple/list: A tuple or list in the format (days, hours, minutes, seconds).
|
||||
|
||||
Returns:
|
||||
timedelta: A timedelta object corresponding to the input value.
|
||||
|
||||
Raises:
|
||||
ValueError: If the input format is not supported.
|
||||
|
||||
Examples:
|
||||
>>> to_timedelta("2 days 5 hours")
|
||||
datetime.timedelta(days=2, seconds=18000)
|
||||
|
||||
>>> to_timedelta(3600)
|
||||
datetime.timedelta(seconds=3600)
|
||||
|
||||
>>> to_timedelta((1, 2, 30, 15))
|
||||
datetime.timedelta(days=1, seconds=90315)
|
||||
"""
|
||||
if isinstance(input_value, timedelta):
|
||||
return input_value
|
||||
|
||||
if isinstance(input_value, (int, float)):
|
||||
# Handle integers or floats as seconds
|
||||
return timedelta(seconds=input_value)
|
||||
|
||||
elif isinstance(input_value, (tuple, list)):
|
||||
# Handle tuple or list: (days, hours, minutes, seconds)
|
||||
if len(input_value) == 4:
|
||||
days, hours, minutes, seconds = input_value
|
||||
return timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
|
||||
else:
|
||||
raise ValueError(f"Expected a tuple or list of length 4, got {len(input_value)}")
|
||||
|
||||
elif isinstance(input_value, str):
|
||||
# Handle strings like "2 days 5 hours 30 minutes"
|
||||
total_seconds = 0
|
||||
time_units = {
|
||||
"day": 86400, # 24 * 60 * 60
|
||||
"hour": 3600,
|
||||
"minute": 60,
|
||||
"second": 1,
|
||||
}
|
||||
|
||||
# Regular expression to match time components like '2 days', '5 hours', etc.
|
||||
matches = re.findall(r"(\d+)\s*(days?|hours?|minutes?|seconds?)", input_value)
|
||||
|
||||
if not matches:
|
||||
raise ValueError(f"Invalid time string format: {input_value}")
|
||||
|
||||
for value, unit in matches:
|
||||
unit = unit.lower().rstrip("s") # Normalize unit
|
||||
if unit in time_units:
|
||||
total_seconds += int(value) * time_units[unit]
|
||||
else:
|
||||
raise ValueError(f"Unsupported time unit: {unit}")
|
||||
|
||||
return timedelta(seconds=total_seconds)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(input_value)}")
|
||||
|
||||
|
||||
def to_timezone(lat: float, lon: float, as_string: Optional[bool] = None):
|
||||
"""Determines the timezone for a given geographic location specified by latitude and longitude.
|
||||
|
||||
By default, it returns a `ZoneInfo` object representing the timezone.
|
||||
If `as_string` is set to `True`, the function returns the timezone name as a string instead.
|
||||
|
||||
Args:
|
||||
lat (float): Latitude of the location in decimal degrees. Must be between -90 and 90.
|
||||
lon (float): Longitude of the location in decimal degrees. Must be between -180 and 180.
|
||||
as_string (Optional[bool]):
|
||||
- If `True`, returns the timezone as a string (e.g., "America/New_York").
|
||||
- If `False` or not provided, returns a `ZoneInfo` object for the timezone.
|
||||
|
||||
Returns:
|
||||
str or ZoneInfo:
|
||||
- A timezone name as a string (e.g., "America/New_York") if `as_string` is `True`.
|
||||
- A `ZoneInfo` timezone object if `as_string` is `False` or not provided.
|
||||
|
||||
Raises:
|
||||
ValueError: If the latitude or longitude is out of range, or if no timezone is found for
|
||||
the specified coordinates.
|
||||
|
||||
Example:
|
||||
>>> to_timezone(40.7128, -74.0060, as_string=True)
|
||||
'America/New_York'
|
||||
|
||||
>>> to_timezone(40.7128, -74.0060)
|
||||
ZoneInfo(key='America/New_York')
|
||||
"""
|
||||
# Initialize the static variable only once
|
||||
if not hasattr(to_timezone, "timezone_finder"):
|
||||
to_timezone.timezone_finder = TimezoneFinder() # static variable
|
||||
|
||||
# Check and convert coordinates to timezone
|
||||
try:
|
||||
tz_name = to_timezone.timezone_finder.timezone_at(lat=lat, lng=lon)
|
||||
if not tz_name:
|
||||
raise ValueError(f"No timezone found for coordinates: latitude {lat}, longitude {lon}")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid location: latitude {lat}, longitude {lon}") from e
|
||||
|
||||
if as_string:
|
||||
return tz_name
|
||||
|
||||
return ZoneInfo(tz_name)
|
95
src/akkudoktoreos/logutil.py
Normal file
95
src/akkudoktoreos/logutil.py
Normal file
@ -0,0 +1,95 @@
|
||||
"""Utility functions for handling logging tasks.
|
||||
|
||||
Functions:
|
||||
----------
|
||||
- get_logger: Creates and configures a logger with console and optional rotating file logging.
|
||||
|
||||
Example usage:
|
||||
--------------
|
||||
# Logger setup
|
||||
>>> logger = get_logger(__name__, log_file="app.log", logging_level="DEBUG")
|
||||
>>> logger.info("Logging initialized.")
|
||||
|
||||
Notes:
|
||||
------
|
||||
- The logger supports rotating log files to prevent excessive log file size.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def get_logger(
|
||||
name: str,
|
||||
log_file: Optional[str] = None,
|
||||
logging_level: Optional[str] = "INFO",
|
||||
max_bytes: int = 5000000,
|
||||
backup_count: int = 5,
|
||||
) -> logging.Logger:
|
||||
"""Creates and configures a logger with a given name.
|
||||
|
||||
The logger supports logging to both the console and an optional log file. File logging is
|
||||
handled by a rotating file handler to prevent excessive log file size.
|
||||
|
||||
Args:
|
||||
name (str): The name of the logger, typically `__name__` from the calling module.
|
||||
log_file (Optional[str]): Path to the log file for file logging. If None, no file logging is done.
|
||||
logging_level (Optional[str]): Logging level (e.g., "INFO", "DEBUG"). Defaults to "INFO".
|
||||
max_bytes (int): Maximum size in bytes for log file before rotation. Defaults to 5 MB.
|
||||
backup_count (int): Number of backup log files to keep. Defaults to 5.
|
||||
|
||||
Returns:
|
||||
logging.Logger: Configured logger instance.
|
||||
|
||||
Example:
|
||||
logger = get_logger(__name__, log_file="app.log", logging_level="DEBUG")
|
||||
logger.info("Application started")
|
||||
"""
|
||||
# Create a logger with the specified name
|
||||
logger = logging.getLogger(name)
|
||||
logger.propagate = True
|
||||
if logging_level == "DEBUG":
|
||||
level = logging.DEBUG
|
||||
elif logging_level == "INFO":
|
||||
level = logging.INFO
|
||||
elif logging_level == "WARNING":
|
||||
level = logging.WARNING
|
||||
elif logging_level == "ERROR":
|
||||
level = logging.ERROR
|
||||
else:
|
||||
level = logging.DEBUG
|
||||
logger.setLevel(level)
|
||||
|
||||
# The log message format
|
||||
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
|
||||
# Prevent loggers from being added multiple times
|
||||
# There may already be a logger from pytest
|
||||
if not logger.handlers:
|
||||
# Create a console handler with a standard output stream
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(level)
|
||||
console_handler.setFormatter(formatter)
|
||||
|
||||
# Add the console handler to the logger
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
if log_file and len(logger.handlers) < 2: # We assume a console logger to be the first logger
|
||||
# If a log file path is specified, create a rotating file handler
|
||||
|
||||
# Ensure the log directory exists
|
||||
log_dir = os.path.dirname(log_file)
|
||||
if log_dir and not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
|
||||
# Create a rotating file handler
|
||||
file_handler = RotatingFileHandler(log_file, maxBytes=max_bytes, backupCount=backup_count)
|
||||
file_handler.setLevel(level)
|
||||
file_handler.setFormatter(formatter)
|
||||
|
||||
# Add the file handler to the logger
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
return logger
|
@ -1,11 +1,22 @@
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
import pytest
|
||||
from xprocess import ProcessStarter
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def disable_debug_logging():
|
||||
# Temporarily set logging level higher than DEBUG
|
||||
logging.disable(logging.DEBUG)
|
||||
yield
|
||||
# Re-enable logging back to its original state after the test
|
||||
logging.disable(logging.NOTSET)
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--full-run", action="store_true", default=False, help="Run with all optimization tests."
|
||||
@ -19,6 +30,11 @@ def is_full_run(request):
|
||||
|
||||
@pytest.fixture
|
||||
def server(xprocess):
|
||||
"""Fixture to start the server.
|
||||
|
||||
Provides URL of the server.
|
||||
"""
|
||||
|
||||
class Starter(ProcessStarter):
|
||||
# assure server to be installed
|
||||
try:
|
||||
@ -62,3 +78,29 @@ def server(xprocess):
|
||||
|
||||
# clean up whole process tree afterwards
|
||||
xprocess.getinfo("akkudoktoreosserver").terminate()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def other_timezone():
|
||||
"""Fixture to temporarily change the timezone.
|
||||
|
||||
Restores the original timezone after the test.
|
||||
"""
|
||||
original_tz = os.environ.get("TZ", None)
|
||||
|
||||
other_tz = "Atlantic/Canary"
|
||||
if original_tz == other_tz:
|
||||
other_tz = "Asia/Singapore"
|
||||
|
||||
# Change the timezone to another
|
||||
os.environ["TZ"] = other_tz
|
||||
time.tzset() # For Unix/Linux to apply the timezone change
|
||||
|
||||
yield os.environ["TZ"] # Yield control back to the test case
|
||||
|
||||
# Restore the original timezone after the test
|
||||
if original_tz:
|
||||
os.environ["TZ"] = original_tz
|
||||
else:
|
||||
del os.environ["TZ"]
|
||||
time.tzset() # Re-apply the original timezone
|
||||
|
364
tests/test_cachefilestore.py
Normal file
364
tests/test_cachefilestore.py
Normal file
@ -0,0 +1,364 @@
|
||||
"""Test Module for CacheFileStore Module."""
|
||||
|
||||
import io
|
||||
import pickle
|
||||
from datetime import date, datetime, time, timedelta
|
||||
from time import sleep
|
||||
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.cachefilestore import CacheFileStore, cache_in_file
|
||||
from akkudoktoreos.datetimeutil import to_datetime
|
||||
|
||||
# -----------------------------
|
||||
# CacheFileStore
|
||||
# -----------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache_store():
|
||||
"""A pytest fixture that creates a new CacheFileStore instance for testing."""
|
||||
return CacheFileStore()
|
||||
|
||||
|
||||
def test_generate_cache_file_key(cache_store):
|
||||
"""Test cache file key generation based on URL and date."""
|
||||
key = "http://example.com"
|
||||
until_dt = to_datetime("2024-10-01").date()
|
||||
cache_file_key, cache_file_until_dt = cache_store._generate_cache_file_key(key, until_dt)
|
||||
assert cache_file_key is not None
|
||||
assert cache_file_until_dt == until_dt
|
||||
|
||||
# Provide no until date - assure today EOD is used.
|
||||
until_dt = datetime.combine(date.today(), time.max)
|
||||
cache_file_key, cache_file_until_dt = cache_store._generate_cache_file_key(key, None)
|
||||
assert cache_file_until_dt == until_dt
|
||||
cache_file_key1, cache_file_until_dt1 = cache_store._generate_cache_file_key(key, until_dt)
|
||||
assert cache_file_key == cache_file_key1
|
||||
assert cache_file_until_dt == until_dt
|
||||
|
||||
|
||||
def test_get_file_path(cache_store):
|
||||
"""Test get file path from cache file object."""
|
||||
cache_file = cache_store.create("test_file", mode="w+", suffix=".txt")
|
||||
file_path = cache_store._get_file_path(cache_file)
|
||||
|
||||
assert file_path is not None
|
||||
|
||||
|
||||
def test_create_cache_file(cache_store):
|
||||
"""Test the creation of a cache file and ensure it is stored correctly."""
|
||||
# Create a cache file for today's date
|
||||
cache_file = cache_store.create("test_file", mode="w+", suffix=".txt")
|
||||
|
||||
# Check that the file exists in the store and is a file-like object
|
||||
assert cache_file is not None
|
||||
assert hasattr(cache_file, "name")
|
||||
assert cache_file.name.endswith(".txt")
|
||||
|
||||
# Write some data to the file
|
||||
cache_file.seek(0)
|
||||
cache_file.write("Test data")
|
||||
cache_file.seek(0) # Reset file pointer
|
||||
assert cache_file.read() == "Test data"
|
||||
|
||||
|
||||
def test_get_cache_file(cache_store):
|
||||
"""Test retrieving an existing cache file by key."""
|
||||
# Create a cache file and write data to it
|
||||
cache_file = cache_store.create("test_file", mode="w+")
|
||||
cache_file.seek(0)
|
||||
cache_file.write("Test data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Retrieve the cache file and verify the data
|
||||
retrieved_file = cache_store.get("test_file")
|
||||
assert retrieved_file is not None
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == "Test data"
|
||||
|
||||
|
||||
def test_set_custom_file_object(cache_store):
|
||||
"""Test setting a custom file-like object (BytesIO or StringIO) in the store."""
|
||||
# Create a BytesIO object and set it into the cache
|
||||
file_obj = io.BytesIO(b"Binary data")
|
||||
cache_store.set("binary_file", file_obj)
|
||||
|
||||
# Retrieve the file from the store
|
||||
retrieved_file = cache_store.get("binary_file")
|
||||
assert isinstance(retrieved_file, io.BytesIO)
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == b"Binary data"
|
||||
|
||||
|
||||
def test_delete_cache_file(cache_store):
|
||||
"""Test deleting a cache file from the store."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Delete cache files
|
||||
cache_store.delete("file1")
|
||||
cache_store.delete("file2")
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_store.get("file1") is None
|
||||
assert cache_store.get("file2") is None
|
||||
|
||||
|
||||
def test_clear_all_cache_files(cache_store):
|
||||
"""Test clearing all cache files from the store."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Clear all cache files
|
||||
cache_store.clear(clear_all=True)
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_store.get("file1") is None
|
||||
assert cache_store.get("file2") is None
|
||||
|
||||
|
||||
def test_clear_cache_files_by_date(cache_store):
|
||||
"""Test clearing cache files from the store by date."""
|
||||
# Create multiple cache files
|
||||
cache_file1 = cache_store.create("file1")
|
||||
assert hasattr(cache_file1, "name")
|
||||
cache_file2 = cache_store.create("file2")
|
||||
assert hasattr(cache_file2, "name")
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Clear cache files that are older than today
|
||||
cache_store.clear(before_datetime=datetime.combine(date.today(), time.min))
|
||||
|
||||
# Ensure the files are in the store
|
||||
assert cache_store.get("file1") is cache_file1
|
||||
assert cache_store.get("file2") is cache_file2
|
||||
|
||||
# Clear cache files that are older than tomorrow
|
||||
cache_store.clear(before_datetime=datetime.now() + timedelta(days=1))
|
||||
|
||||
# Ensure the store is empty
|
||||
assert cache_store.get("file1") is None
|
||||
assert cache_store.get("file2") is None
|
||||
|
||||
|
||||
def test_cache_file_with_date(cache_store):
|
||||
"""Test creating and retrieving cache files with a specific date."""
|
||||
# Use a specific date for cache file creation
|
||||
specific_date = datetime(2023, 10, 10)
|
||||
cache_file = cache_store.create("dated_file", mode="w+", until_date=specific_date)
|
||||
|
||||
# Write data to the cache file
|
||||
cache_file.write("Dated data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Retrieve the cache file with the specific date
|
||||
retrieved_file = cache_store.get("dated_file", until_date=specific_date)
|
||||
assert retrieved_file is not None
|
||||
retrieved_file.seek(0)
|
||||
assert retrieved_file.read() == "Dated data"
|
||||
|
||||
|
||||
def test_recreate_existing_cache_file(cache_store):
|
||||
"""Test creating a cache file with an existing key does not overwrite the existing file."""
|
||||
# Create a cache file
|
||||
cache_file = cache_store.create("test_file", mode="w+")
|
||||
cache_file.write("Original data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Attempt to recreate the same file (should return the existing one)
|
||||
new_file = cache_store.create("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
new_file.seek(0)
|
||||
assert new_file.read() == "Original data" # Data should be preserved
|
||||
|
||||
# Assure cache file store is a singleton
|
||||
cache_store2 = CacheFileStore()
|
||||
new_file = cache_store2.get("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
|
||||
|
||||
def test_cache_store_is_singleton(cache_store):
|
||||
"""Test re-creating a cache store provides the same store."""
|
||||
# Create a cache file
|
||||
cache_file = cache_store.create("test_file", mode="w+")
|
||||
cache_file.write("Original data")
|
||||
cache_file.seek(0)
|
||||
|
||||
# Assure cache file store is a singleton
|
||||
cache_store2 = CacheFileStore()
|
||||
new_file = cache_store2.get("test_file")
|
||||
assert new_file is cache_file # Should be the same object
|
||||
|
||||
|
||||
def test_cache_in_file_decorator_caches_function_result(cache_store):
|
||||
"""Test that the cache_in_file decorator caches a function result."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "Some expensive computation result"
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
assert result == "Some expensive computation result"
|
||||
|
||||
# Assert that the create method was called to store the result
|
||||
assert len(cache_store._store) == 1
|
||||
|
||||
# Check if the result was written to the cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key][0]
|
||||
assert cache_file is not None
|
||||
|
||||
# Assert correct content was written to the file
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == "Some expensive computation result"
|
||||
|
||||
|
||||
def test_cache_in_file_decorator_uses_cache(cache_store):
|
||||
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "New result"
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
assert result == "New result"
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key][0]
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
|
||||
# Call the decorated function again (should get result from cache)
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
assert result == result2
|
||||
|
||||
|
||||
def test_cache_in_file_decorator_forces_update(cache_store):
|
||||
"""Test that the cache_in_file decorator reuses cached file on subsequent calls."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function(until_date=None):
|
||||
return "New result"
|
||||
|
||||
until_date = datetime.now() + timedelta(days=1)
|
||||
|
||||
# Call the decorated function (should store result in cache)
|
||||
result1 = "New result"
|
||||
result = my_function(until_date=until_date)
|
||||
assert result == result1
|
||||
|
||||
# Assert result was written to cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key][0]
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result
|
||||
|
||||
# Modify cache file
|
||||
result2 = "Cached result"
|
||||
cache_file.seek(0)
|
||||
cache_file.write(result2)
|
||||
|
||||
# Call the decorated function again with force update (should get result from function)
|
||||
result = my_function(until_date=until_date, force_update=True)
|
||||
assert result == result1
|
||||
|
||||
# Assure result was written to the same cache file
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == result1
|
||||
|
||||
|
||||
def test_cache_in_file_handles_ttl(cache_store):
|
||||
"""Test that the cache_infile decorator handles the with_ttl parameter."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a simple function to decorate
|
||||
@cache_in_file(mode="w+")
|
||||
def my_function():
|
||||
return "New result"
|
||||
|
||||
# Call the decorated function
|
||||
result = my_function(with_ttl="1 second")
|
||||
|
||||
# Overwrite cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key][0]
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
cache_file.write("Modified result")
|
||||
cache_file.seek(0) # Move to the start of the file
|
||||
assert cache_file.read() == "Modified result"
|
||||
|
||||
result = my_function(with_ttl="1 second")
|
||||
assert result == "Modified result"
|
||||
|
||||
# Wait one second to let the cache time out
|
||||
sleep(1)
|
||||
|
||||
# Call again - cache should be timed out
|
||||
result = my_function(with_ttl="1 second")
|
||||
assert result == "New result"
|
||||
|
||||
|
||||
def test_cache_in_file_handles_bytes_return(cache_store):
|
||||
"""Test that the cache_infile decorator handles bytes returned from the function."""
|
||||
# Clear store to assure it is empty
|
||||
cache_store.clear(clear_all=True)
|
||||
assert len(cache_store._store) == 0
|
||||
|
||||
# Define a function that returns bytes
|
||||
@cache_in_file()
|
||||
def my_function(until_date=None):
|
||||
return b"Some binary data"
|
||||
|
||||
# Call the decorated function
|
||||
result = my_function(until_date=datetime.now() + timedelta(days=1))
|
||||
|
||||
# Check if the binary data was written to the cache file
|
||||
key = next(iter(cache_store._store))
|
||||
cache_file = cache_store._store[key][0]
|
||||
assert cache_file is not None
|
||||
cache_file.seek(0)
|
||||
result1 = pickle.load(cache_file)
|
||||
assert result1 == result
|
119
tests/test_datetimeutil.py
Normal file
119
tests/test_datetimeutil.py
Normal file
@ -0,0 +1,119 @@
|
||||
"""Test Module for datetimeutil Module."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.datetimeutil import to_datetime, to_timedelta, to_timezone
|
||||
|
||||
# -----------------------------
|
||||
# to_datetime
|
||||
# -----------------------------
|
||||
|
||||
|
||||
# Test cases for valid timedelta inputs
|
||||
@pytest.mark.parametrize(
|
||||
"date_input, as_string, to_timezone, to_naiv, to_maxtime, expected_output",
|
||||
[
|
||||
# as datetime object
|
||||
(
|
||||
"2024-10-07T10:20:30.000+02:00",
|
||||
None,
|
||||
"Europe/Berlin",
|
||||
None,
|
||||
None,
|
||||
datetime(2024, 10, 7, 10, 20, 30, 0, tzinfo=ZoneInfo("Europe/Berlin")),
|
||||
),
|
||||
(
|
||||
"2024-10-07T10:20:30.000+02:00",
|
||||
None,
|
||||
"Europe/Berlin",
|
||||
False,
|
||||
None,
|
||||
datetime(2024, 10, 7, 10, 20, 30, 0, tzinfo=ZoneInfo("Europe/Berlin")),
|
||||
),
|
||||
(
|
||||
"2024-10-07T10:20:30.000+02:00",
|
||||
None,
|
||||
"Europe/Berlin",
|
||||
True,
|
||||
None,
|
||||
datetime(2024, 10, 7, 10, 20, 30, 0),
|
||||
),
|
||||
# as string
|
||||
("2024-10-07T10:20:30.000+02:00", "UTC", None, None, None, "2024-10-07T08:20:30+00:00"),
|
||||
("2024-10-07T10:20:30.000+02:00", "utc", None, None, None, "2024-10-07T08:20:30+00:00"),
|
||||
],
|
||||
)
|
||||
def test_to_datetime(date_input, as_string, to_timezone, to_naiv, to_maxtime, expected_output):
|
||||
"""Test datetime conversion with valid inputs."""
|
||||
assert (
|
||||
to_datetime(
|
||||
date_input,
|
||||
as_string=as_string,
|
||||
to_timezone=to_timezone,
|
||||
to_naiv=to_naiv,
|
||||
to_maxtime=to_maxtime,
|
||||
)
|
||||
== expected_output
|
||||
)
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# to_timedelta
|
||||
# -----------------------------
|
||||
|
||||
|
||||
# Test cases for valid timedelta inputs
|
||||
@pytest.mark.parametrize(
|
||||
"input_value, expected_output",
|
||||
[
|
||||
# timedelta input
|
||||
(timedelta(days=1), timedelta(days=1)),
|
||||
# String input
|
||||
("2 days", timedelta(days=2)),
|
||||
("5 hours", timedelta(hours=5)),
|
||||
("30 minutes", timedelta(minutes=30)),
|
||||
("45 seconds", timedelta(seconds=45)),
|
||||
("1 day 2 hours 30 minutes 15 seconds", timedelta(days=1, hours=2, minutes=30, seconds=15)),
|
||||
("3 days 4 hours", timedelta(days=3, hours=4)),
|
||||
# Integer/Float input
|
||||
(3600, timedelta(seconds=3600)), # 1 hour
|
||||
(86400, timedelta(days=1)), # 1 day
|
||||
(1800.5, timedelta(seconds=1800.5)), # 30 minutes and 0.5 seconds
|
||||
# Tuple/List input
|
||||
((1, 2, 30, 15), timedelta(days=1, hours=2, minutes=30, seconds=15)),
|
||||
([0, 10, 0, 0], timedelta(hours=10)),
|
||||
],
|
||||
)
|
||||
def test_to_timedelta_valid(input_value, expected_output):
|
||||
"""Test to_timedelta with valid inputs."""
|
||||
assert to_timedelta(input_value) == expected_output
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# to_timezone
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def test_to_timezone_string():
|
||||
"""Test to_timezone function returns correct timezone as a string."""
|
||||
lat, lon = 40.7128, -74.0060 # New York City coordinates
|
||||
result = to_timezone(lat, lon, as_string=True)
|
||||
assert result == "America/New_York", "Expected timezone string 'America/New_York'"
|
||||
|
||||
|
||||
def test_to_timezone_zoneinfo():
|
||||
"""Test to_timezone function returns correct timezone as a ZoneInfo object."""
|
||||
lat, lon = 40.7128, -74.0060 # New York City coordinates
|
||||
result = to_timezone(lat, lon)
|
||||
assert isinstance(result, ZoneInfo), "Expected a ZoneInfo object"
|
||||
assert result.key == "America/New_York", "Expected ZoneInfo key 'America/New_York'"
|
||||
|
||||
|
||||
def test_to_timezone_invalid_coordinates():
|
||||
"""Test to_timezone function handles invalid coordinates gracefully."""
|
||||
lat, lon = 100.0, 200.0 # Invalid coordinates outside Earth range
|
||||
with pytest.raises(ValueError, match="Invalid location"):
|
||||
to_timezone(lat, lon, as_string=True)
|
82
tests/test_logutil.py
Normal file
82
tests/test_logutil.py
Normal file
@ -0,0 +1,82 @@
|
||||
"""Test Module for logutil Module."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.logutil import get_logger
|
||||
|
||||
# -----------------------------
|
||||
# get_logger
|
||||
# -----------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clean_up_log_file():
|
||||
"""Fixture to clean up log files after tests."""
|
||||
log_file = "test.log"
|
||||
yield log_file
|
||||
if os.path.exists(log_file):
|
||||
os.remove(log_file)
|
||||
|
||||
|
||||
def test_get_logger_console_logging(clean_up_log_file):
|
||||
"""Test logger creation with console logging."""
|
||||
logger = get_logger("test_logger", logging_level="DEBUG")
|
||||
|
||||
# Check logger name
|
||||
assert logger.name == "test_logger"
|
||||
|
||||
# Check logger level
|
||||
assert logger.level == logging.DEBUG
|
||||
|
||||
# Check console handler is present
|
||||
assert len(logger.handlers) == 1
|
||||
assert isinstance(logger.handlers[0], logging.StreamHandler)
|
||||
|
||||
|
||||
def test_get_logger_file_logging(clean_up_log_file):
|
||||
"""Test logger creation with file logging."""
|
||||
logger = get_logger("test_logger", log_file="test.log", logging_level="WARNING")
|
||||
|
||||
# Check logger name
|
||||
assert logger.name == "test_logger"
|
||||
|
||||
# Check logger level
|
||||
assert logger.level == logging.WARNING
|
||||
|
||||
# Check console handler is present
|
||||
assert len(logger.handlers) == 2 # One for console and one for file
|
||||
assert isinstance(logger.handlers[0], logging.StreamHandler)
|
||||
assert isinstance(logger.handlers[1], RotatingFileHandler)
|
||||
|
||||
# Check file existence
|
||||
assert os.path.exists("test.log")
|
||||
|
||||
|
||||
def test_get_logger_no_file_logging(clean_up_log_file):
|
||||
"""Test logger creation without file logging."""
|
||||
logger = get_logger("test_logger")
|
||||
|
||||
# Check logger name
|
||||
assert logger.name == "test_logger"
|
||||
|
||||
# Check logger level
|
||||
assert logger.level == logging.INFO
|
||||
|
||||
# Check no file handler is present
|
||||
assert len(logger.handlers) >= 1 # First is console handler (maybe be pytest handler)
|
||||
assert isinstance(logger.handlers[0], logging.StreamHandler)
|
||||
|
||||
|
||||
def test_get_logger_with_invalid_level(clean_up_log_file):
|
||||
"""Test logger creation with an invalid logging level."""
|
||||
logger = get_logger("test_logger", logging_level="INVALID")
|
||||
|
||||
# Check logger name
|
||||
assert logger.name == "test_logger"
|
||||
|
||||
# Check default logging level is DEBUG
|
||||
assert logger.level == logging.DEBUG
|
282
tests/test_pv_forecast.py
Normal file
282
tests/test_pv_forecast.py
Normal file
@ -0,0 +1,282 @@
|
||||
"""Test Module for PV Power Forecasting Module.
|
||||
|
||||
This test module is designed to verify the functionality of the `PVForecast` class
|
||||
and its methods in the `class_pv_forecast` module. The tests include validation for
|
||||
forecast data processing, updating AC power measurements, retrieving forecast data,
|
||||
and caching behavior.
|
||||
|
||||
Fixtures:
|
||||
sample_forecast_data: Provides sample forecast data in JSON format for testing.
|
||||
pv_forecast_instance: Provides an instance of `PVForecast` class with sample data loaded.
|
||||
|
||||
Test Cases:
|
||||
- test_generate_cache_filename: Verifies correct cache filename generation based on URL and date.
|
||||
- test_update_ac_power_measurement: Tests updating AC power measurement for a matching date.
|
||||
- test_update_ac_power_measurement_no_match: Ensures no updates occur when there is no matching date.
|
||||
- test_get_temperature_forecast_for_date: Tests retrieving the temperature forecast for a specific date.
|
||||
- test_get_pv_forecast_for_date_range: Verifies retrieval of AC power forecast for a specified date range.
|
||||
- test_get_forecast_dataframe: Ensures forecast data can be correctly converted into a Pandas DataFrame.
|
||||
- test_cache_loading: Tests loading forecast data from a cached file to ensure caching works as expected.
|
||||
|
||||
Usage:
|
||||
This test module uses `pytest` and requires the `akkudoktoreos.class_pv_forecast.py` module to be present.
|
||||
Run the tests using the command: `pytest test_pv_forecast.py`.
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from akkudoktoreos.class_pv_forecast import PVForecast, validate_pv_forecast_data
|
||||
from akkudoktoreos.datetimeutil import to_datetime
|
||||
|
||||
DIR_TESTDATA = Path(__file__).absolute().parent.joinpath("testdata")
|
||||
|
||||
FILE_TESTDATA_PV_FORECAST_INPUT_1 = DIR_TESTDATA.joinpath("pv_forecast_input_1.json")
|
||||
FILE_TESTDATA_PV_FORECAST_RESULT_1 = DIR_TESTDATA.joinpath("pv_forecast_result_1.txt")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_forecast_data():
|
||||
"""Fixture that returns sample forecast data."""
|
||||
with open(FILE_TESTDATA_PV_FORECAST_INPUT_1, "r") as f_in:
|
||||
input_data = json.load(f_in)
|
||||
return input_data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_forecast_report():
|
||||
"""Fixture that returns sample forecast data report."""
|
||||
with open(FILE_TESTDATA_PV_FORECAST_RESULT_1, "r") as f_res:
|
||||
input_data = f_res.read()
|
||||
return input_data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_forecast_start(sample_forecast_data):
|
||||
"""Fixture that returns the start date of the sample forecast data."""
|
||||
forecast_start_str = sample_forecast_data["values"][0][0]["datetime"]
|
||||
assert forecast_start_str == "2024-10-06T00:00:00.000+02:00"
|
||||
|
||||
timezone_name = sample_forecast_data["meta"]["timezone"]
|
||||
assert timezone_name == "Europe/Berlin"
|
||||
|
||||
forecast_start = to_datetime(forecast_start_str, to_timezone=timezone_name, to_naiv=True)
|
||||
assert forecast_start == datetime(2024, 10, 6)
|
||||
|
||||
return forecast_start
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pv_forecast_empty_instance():
|
||||
"""Fixture that returns an empty instance of PVForecast."""
|
||||
empty_instance = PVForecast()
|
||||
assert empty_instance.get_forecast_start() is None
|
||||
|
||||
return empty_instance
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pv_forecast_instance(sample_forecast_data, sample_forecast_start):
|
||||
"""Fixture that returns an instance of PVForecast with sample data loaded."""
|
||||
pv_forecast = PVForecast(
|
||||
data=sample_forecast_data,
|
||||
forecast_start=sample_forecast_start,
|
||||
prediction_hours=48,
|
||||
)
|
||||
return pv_forecast
|
||||
|
||||
|
||||
def test_validate_pv_forecast_data(sample_forecast_data):
|
||||
"""Test validation of PV forecast data on sample data."""
|
||||
ret = validate_pv_forecast_data({})
|
||||
assert ret is None
|
||||
|
||||
ret = validate_pv_forecast_data(sample_forecast_data)
|
||||
assert ret == "Akkudoktor"
|
||||
|
||||
|
||||
def test_process_data(sample_forecast_data, sample_forecast_start):
|
||||
"""Test data processing using sample data."""
|
||||
pv_forecast_instance = PVForecast(forecast_start=sample_forecast_start)
|
||||
|
||||
# Assure the start date is correctly set by init funtion
|
||||
forecast_start = pv_forecast_instance.get_forecast_start()
|
||||
expected_start = sample_forecast_start
|
||||
assert forecast_start == expected_start
|
||||
|
||||
# Assure the prediction hours are unset
|
||||
assert pv_forecast_instance.prediction_hours is None
|
||||
|
||||
# Load forecast with sample data - throws exceptions on error
|
||||
pv_forecast_instance.process_data(data=sample_forecast_data)
|
||||
|
||||
|
||||
def test_update_ac_power_measurement(pv_forecast_instance, sample_forecast_start):
|
||||
"""Test updating AC power measurement for a specific date."""
|
||||
forecast_start = pv_forecast_instance.get_forecast_start()
|
||||
assert forecast_start == sample_forecast_start
|
||||
|
||||
updated = pv_forecast_instance.update_ac_power_measurement(forecast_start, 1000)
|
||||
assert updated is True
|
||||
forecast_data = pv_forecast_instance.get_forecast_data()
|
||||
assert forecast_data[0].ac_power_measurement == 1000
|
||||
|
||||
|
||||
def test_update_ac_power_measurement_no_match(pv_forecast_instance):
|
||||
"""Test updating AC power measurement where no date matches."""
|
||||
date_time = datetime(2023, 10, 2, 1, 0, 0)
|
||||
updated = pv_forecast_instance.update_ac_power_measurement(date_time, 1000)
|
||||
assert not updated
|
||||
|
||||
|
||||
def test_get_temperature_forecast_for_date(pv_forecast_instance, sample_forecast_start):
|
||||
"""Test fetching temperature forecast for a specific date."""
|
||||
forecast_temps = pv_forecast_instance.get_temperature_forecast_for_date(sample_forecast_start)
|
||||
assert len(forecast_temps) == 24
|
||||
assert forecast_temps[0] == 7.0
|
||||
assert forecast_temps[1] == 6.5
|
||||
assert forecast_temps[2] == 6.0
|
||||
|
||||
# Assure function bails out if there is no timezone name available for the system.
|
||||
tz_name = pv_forecast_instance._tz_name
|
||||
pv_forecast_instance._tz_name = None
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
forecast_temps = pv_forecast_instance.get_temperature_forecast_for_date(
|
||||
sample_forecast_start
|
||||
)
|
||||
pv_forecast_instance._tz_name = tz_name
|
||||
assert (
|
||||
exc_info.value.args[0] == "Processing without PV system timezone info ist not implemented!"
|
||||
)
|
||||
|
||||
|
||||
def test_get_temperature_for_date_range(pv_forecast_instance, sample_forecast_start):
|
||||
"""Test fetching temperature forecast for a specific date range."""
|
||||
end_date = sample_forecast_start + timedelta(hours=24)
|
||||
forecast_temps = pv_forecast_instance.get_temperature_for_date_range(
|
||||
sample_forecast_start, end_date
|
||||
)
|
||||
assert len(forecast_temps) == 48
|
||||
assert forecast_temps[0] == 7.0
|
||||
assert forecast_temps[1] == 6.5
|
||||
assert forecast_temps[2] == 6.0
|
||||
|
||||
# Assure function bails out if there is no timezone name available for the system.
|
||||
tz_name = pv_forecast_instance._tz_name
|
||||
pv_forecast_instance._tz_name = None
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
forecast_temps = pv_forecast_instance.get_temperature_for_date_range(
|
||||
sample_forecast_start, end_date
|
||||
)
|
||||
pv_forecast_instance._tz_name = tz_name
|
||||
assert (
|
||||
exc_info.value.args[0] == "Processing without PV system timezone info ist not implemented!"
|
||||
)
|
||||
|
||||
|
||||
def test_get_forecast_for_date_range(pv_forecast_instance, sample_forecast_start):
|
||||
"""Test fetching AC power forecast for a specific date range."""
|
||||
end_date = sample_forecast_start + timedelta(hours=24)
|
||||
forecast = pv_forecast_instance.get_pv_forecast_for_date_range(sample_forecast_start, end_date)
|
||||
assert len(forecast) == 48
|
||||
assert forecast[0] == 0.0
|
||||
assert forecast[1] == 0.0
|
||||
assert forecast[2] == 0.0
|
||||
|
||||
# Assure function bails out if there is no timezone name available for the system.
|
||||
tz_name = pv_forecast_instance._tz_name
|
||||
pv_forecast_instance._tz_name = None
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
forecast = pv_forecast_instance.get_pv_forecast_for_date_range(
|
||||
sample_forecast_start, end_date
|
||||
)
|
||||
pv_forecast_instance._tz_name = tz_name
|
||||
assert (
|
||||
exc_info.value.args[0] == "Processing without PV system timezone info ist not implemented!"
|
||||
)
|
||||
|
||||
|
||||
def test_get_forecast_dataframe(pv_forecast_instance):
|
||||
"""Test converting forecast data to a DataFrame."""
|
||||
df = pv_forecast_instance.get_forecast_dataframe()
|
||||
assert len(df) == 288
|
||||
assert list(df.columns) == ["date_time", "dc_power", "ac_power", "windspeed_10m", "temperature"]
|
||||
assert df.iloc[0]["dc_power"] == 0.0
|
||||
assert df.iloc[1]["ac_power"] == 0.0
|
||||
assert df.iloc[2]["temperature"] == 6.0
|
||||
|
||||
|
||||
def test_load_data_from_file(server, pv_forecast_empty_instance):
|
||||
"""Test loading data from file."""
|
||||
# load from valid address file path
|
||||
filepath = FILE_TESTDATA_PV_FORECAST_INPUT_1
|
||||
data = pv_forecast_empty_instance.load_data_from_file(filepath)
|
||||
assert len(data) > 0
|
||||
|
||||
|
||||
def test_load_data_from_url(server, pv_forecast_empty_instance):
|
||||
"""Test loading data from url."""
|
||||
# load from valid address of our server
|
||||
url = f"{server}/gesamtlast_simple?year_energy=2000&"
|
||||
data = pv_forecast_empty_instance.load_data_from_url(url)
|
||||
assert len(data) > 0
|
||||
|
||||
# load from invalid address of our server
|
||||
url = f"{server}/invalid?"
|
||||
data = pv_forecast_empty_instance.load_data_from_url(url)
|
||||
assert data == f"Failed to load data from `{url}`. Status Code: 404"
|
||||
|
||||
|
||||
def test_load_data_from_url_with_caching(
|
||||
server, pv_forecast_empty_instance, sample_forecast_data, sample_forecast_start
|
||||
):
|
||||
"""Test loading data from url with cache."""
|
||||
# load from valid address of our server
|
||||
url = f"{server}/gesamtlast_simple?year_energy=2000&"
|
||||
data = pv_forecast_empty_instance.load_data_from_url_with_caching(url)
|
||||
assert len(data) > 0
|
||||
|
||||
# load from invalid address of our server
|
||||
url = f"{server}/invalid?"
|
||||
data = pv_forecast_empty_instance.load_data_from_url_with_caching(url)
|
||||
assert data == f"Failed to load data from `{url}`. Status Code: 404"
|
||||
|
||||
|
||||
def test_report_ac_power_and_measurement(pv_forecast_instance, sample_forecast_report):
|
||||
"""Test reporting."""
|
||||
report = pv_forecast_instance.report_ac_power_and_measurement()
|
||||
assert report == sample_forecast_report
|
||||
|
||||
|
||||
def test_timezone_behaviour(
|
||||
pv_forecast_instance, sample_forecast_report, sample_forecast_start, other_timezone
|
||||
):
|
||||
"""Test PVForecast in another timezone."""
|
||||
current_time = datetime.now()
|
||||
|
||||
# Test updating AC power measurement for a specific date.
|
||||
date_time = pv_forecast_instance.get_forecast_start()
|
||||
assert date_time == sample_forecast_start
|
||||
updated = pv_forecast_instance.update_ac_power_measurement(date_time, 1000)
|
||||
assert updated is True
|
||||
forecast_data = pv_forecast_instance.get_forecast_data()
|
||||
assert forecast_data[0].ac_power_measurement == 1000
|
||||
|
||||
# Test fetching temperature forecast for a specific date.
|
||||
forecast_temps = pv_forecast_instance.get_temperature_forecast_for_date(sample_forecast_start)
|
||||
assert len(forecast_temps) == 24
|
||||
assert forecast_temps[0] == 7.0
|
||||
assert forecast_temps[1] == 6.5
|
||||
assert forecast_temps[2] == 6.0
|
||||
|
||||
# Test fetching AC power forecast
|
||||
end_date = sample_forecast_start + timedelta(hours=24)
|
||||
forecast = pv_forecast_instance.get_pv_forecast_for_date_range(sample_forecast_start, end_date)
|
||||
assert len(forecast) == 48
|
||||
assert forecast[0] == 1000.0 # changed before
|
||||
assert forecast[1] == 0.0
|
||||
assert forecast[2] == 0.0
|
1
tests/testdata/pv_forecast_input_1.json
vendored
Normal file
1
tests/testdata/pv_forecast_input_1.json
vendored
Normal file
File diff suppressed because one or more lines are too long
288
tests/testdata/pv_forecast_result_1.txt
vendored
Normal file
288
tests/testdata/pv_forecast_result_1.txt
vendored
Normal file
@ -0,0 +1,288 @@
|
||||
Zeit: 2024-10-06 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 08:00:00, DC: 29.083233405355106, AC: 23.266586724284085, Messwert: None, AC GET: 23.266586724284085
|
||||
Zeit: 2024-10-06 09:00:00, DC: 603.9155913762734, AC: 483.1324731010187, Messwert: None, AC GET: 483.1324731010187
|
||||
Zeit: 2024-10-06 10:00:00, DC: 1202.5733064798087, AC: 962.0586451838469, Messwert: None, AC GET: 962.0586451838469
|
||||
Zeit: 2024-10-06 11:00:00, DC: 3680.674142237336, AC: 2944.539313789869, Messwert: None, AC GET: 2944.539313789869
|
||||
Zeit: 2024-10-06 12:00:00, DC: 4757.349301221422, AC: 3805.8794409771376, Messwert: None, AC GET: 3805.8794409771376
|
||||
Zeit: 2024-10-06 13:00:00, DC: 4976.074631762431, AC: 3980.8597054099446, Messwert: None, AC GET: 3980.8597054099446
|
||||
Zeit: 2024-10-06 14:00:00, DC: 4661.218846907677, AC: 3728.975077526142, Messwert: None, AC GET: 3728.975077526142
|
||||
Zeit: 2024-10-06 15:00:00, DC: 3946.8020263136905, AC: 3157.4416210509526, Messwert: None, AC GET: 3157.4416210509526
|
||||
Zeit: 2024-10-06 16:00:00, DC: 2243.0039568971824, AC: 1794.4031655177462, Messwert: None, AC GET: 1794.4031655177462
|
||||
Zeit: 2024-10-06 17:00:00, DC: 2001.7063462507354, AC: 1601.3650770005884, Messwert: None, AC GET: 1601.3650770005884
|
||||
Zeit: 2024-10-06 18:00:00, DC: 1064.8644840801587, AC: 851.891587264127, Messwert: None, AC GET: 851.891587264127
|
||||
Zeit: 2024-10-06 19:00:00, DC: 213.62586203307436, AC: 170.9006896264595, Messwert: None, AC GET: 170.9006896264595
|
||||
Zeit: 2024-10-06 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-06 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 08:00:00, DC: 12.879067406412917, AC: 10.303253925130335, Messwert: None, AC GET: 10.303253925130335
|
||||
Zeit: 2024-10-07 09:00:00, DC: 262.9677324587079, AC: 210.37418596696637, Messwert: None, AC GET: 210.37418596696637
|
||||
Zeit: 2024-10-07 10:00:00, DC: 1055.8307755341434, AC: 844.6646204273148, Messwert: None, AC GET: 844.6646204273148
|
||||
Zeit: 2024-10-07 11:00:00, DC: 1047.9202921493936, AC: 838.3362337195149, Messwert: None, AC GET: 838.3362337195149
|
||||
Zeit: 2024-10-07 12:00:00, DC: 1974.588548831151, AC: 1579.6708390649208, Messwert: None, AC GET: 1579.6708390649208
|
||||
Zeit: 2024-10-07 13:00:00, DC: 3705.4090777906226, AC: 2964.327262232498, Messwert: None, AC GET: 2964.327262232498
|
||||
Zeit: 2024-10-07 14:00:00, DC: 4302.5339613230035, AC: 3442.0271690584027, Messwert: None, AC GET: 3442.0271690584027
|
||||
Zeit: 2024-10-07 15:00:00, DC: 3329.4322858356677, AC: 2663.5458286685343, Messwert: None, AC GET: 2663.5458286685343
|
||||
Zeit: 2024-10-07 16:00:00, DC: 2230.6158883385765, AC: 1784.4927106708612, Messwert: None, AC GET: 1784.4927106708612
|
||||
Zeit: 2024-10-07 17:00:00, DC: 1963.7588469904522, AC: 1571.0070775923618, Messwert: None, AC GET: 1571.0070775923618
|
||||
Zeit: 2024-10-07 18:00:00, DC: 893.4702395519014, AC: 714.7761916415211, Messwert: None, AC GET: 714.7761916415211
|
||||
Zeit: 2024-10-07 19:00:00, DC: 196.79652262455014, AC: 157.43721809964012, Messwert: None, AC GET: 157.43721809964012
|
||||
Zeit: 2024-10-07 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-07 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 08:00:00, DC: 10.196920114210368, AC: 8.157536091368295, Messwert: None, AC GET: 8.157536091368295
|
||||
Zeit: 2024-10-08 09:00:00, DC: 533.3040032153134, AC: 426.64320257225074, Messwert: None, AC GET: 426.64320257225074
|
||||
Zeit: 2024-10-08 10:00:00, DC: 1427.631719987348, AC: 1142.1053759898787, Messwert: None, AC GET: 1142.1053759898787
|
||||
Zeit: 2024-10-08 11:00:00, DC: 2290.204548762777, AC: 1832.163639010222, Messwert: None, AC GET: 1832.163639010222
|
||||
Zeit: 2024-10-08 12:00:00, DC: 3171.3560346447457, AC: 2537.0848277157966, Messwert: None, AC GET: 2537.0848277157966
|
||||
Zeit: 2024-10-08 13:00:00, DC: 4321.752228255224, AC: 3457.401782604179, Messwert: None, AC GET: 3457.401782604179
|
||||
Zeit: 2024-10-08 14:00:00, DC: 3827.6262786116013, AC: 3062.101022889281, Messwert: None, AC GET: 3062.101022889281
|
||||
Zeit: 2024-10-08 15:00:00, DC: 3160.087490010488, AC: 2528.0699920083907, Messwert: None, AC GET: 2528.0699920083907
|
||||
Zeit: 2024-10-08 16:00:00, DC: 2140.5172010214637, AC: 1712.4137608171711, Messwert: None, AC GET: 1712.4137608171711
|
||||
Zeit: 2024-10-08 17:00:00, DC: 1527.3895384054126, AC: 1221.9116307243303, Messwert: None, AC GET: 1221.9116307243303
|
||||
Zeit: 2024-10-08 18:00:00, DC: 871.7753606528443, AC: 697.4202885222755, Messwert: None, AC GET: 697.4202885222755
|
||||
Zeit: 2024-10-08 19:00:00, DC: 197.71223085186438, AC: 158.16978468149154, Messwert: None, AC GET: 158.16978468149154
|
||||
Zeit: 2024-10-08 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-08 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 08:00:00, DC: 2.5742724878738605, AC: 2.0594179902990883, Messwert: None, AC GET: 2.0594179902990883
|
||||
Zeit: 2024-10-09 09:00:00, DC: 138.89041623325699, AC: 111.1123329866056, Messwert: None, AC GET: 111.1123329866056
|
||||
Zeit: 2024-10-09 10:00:00, DC: 442.0646392614557, AC: 353.65171140916453, Messwert: None, AC GET: 353.65171140916453
|
||||
Zeit: 2024-10-09 11:00:00, DC: 901.4310408000806, AC: 721.1448326400645, Messwert: None, AC GET: 721.1448326400645
|
||||
Zeit: 2024-10-09 12:00:00, DC: 1343.817631464563, AC: 1075.0541051716505, Messwert: None, AC GET: 1075.0541051716505
|
||||
Zeit: 2024-10-09 13:00:00, DC: 1235.4817012708959, AC: 988.3853610167168, Messwert: None, AC GET: 988.3853610167168
|
||||
Zeit: 2024-10-09 14:00:00, DC: 918.466198606648, AC: 734.7729588853184, Messwert: None, AC GET: 734.7729588853184
|
||||
Zeit: 2024-10-09 15:00:00, DC: 520.5546485716002, AC: 416.4437188572802, Messwert: None, AC GET: 416.4437188572802
|
||||
Zeit: 2024-10-09 16:00:00, DC: 403.2086374448306, AC: 322.56690995586445, Messwert: None, AC GET: 322.56690995586445
|
||||
Zeit: 2024-10-09 17:00:00, DC: 269.0971481593191, AC: 215.27771852745528, Messwert: None, AC GET: 215.27771852745528
|
||||
Zeit: 2024-10-09 18:00:00, DC: 160.38853688287293, AC: 128.31082950629835, Messwert: None, AC GET: 128.31082950629835
|
||||
Zeit: 2024-10-09 19:00:00, DC: 35.94208390736801, AC: 28.753667125894406, Messwert: None, AC GET: 28.753667125894406
|
||||
Zeit: 2024-10-09 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-09 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 08:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 09:00:00, DC: 70.9936987722329, AC: 56.79495901778633, Messwert: None, AC GET: 56.79495901778633
|
||||
Zeit: 2024-10-10 10:00:00, DC: 208.63865710086668, AC: 166.91092568069337, Messwert: None, AC GET: 166.91092568069337
|
||||
Zeit: 2024-10-10 11:00:00, DC: 387.9199717226425, AC: 310.335977378114, Messwert: None, AC GET: 310.335977378114
|
||||
Zeit: 2024-10-10 12:00:00, DC: 1919.665578381796, AC: 1535.7324627054368, Messwert: None, AC GET: 1535.7324627054368
|
||||
Zeit: 2024-10-10 13:00:00, DC: 1103.2342605959298, AC: 882.5874084767438, Messwert: None, AC GET: 882.5874084767438
|
||||
Zeit: 2024-10-10 14:00:00, DC: 2628.0291700044304, AC: 2102.4233360035446, Messwert: None, AC GET: 2102.4233360035446
|
||||
Zeit: 2024-10-10 15:00:00, DC: 2325.6643189163906, AC: 1860.5314551331126, Messwert: None, AC GET: 1860.5314551331126
|
||||
Zeit: 2024-10-10 16:00:00, DC: 1619.6896864930636, AC: 1295.751749194451, Messwert: None, AC GET: 1295.751749194451
|
||||
Zeit: 2024-10-10 17:00:00, DC: 1238.7267357864635, AC: 990.9813886291707, Messwert: None, AC GET: 990.9813886291707
|
||||
Zeit: 2024-10-10 18:00:00, DC: 831.5919590566375, AC: 665.27356724531, Messwert: None, AC GET: 665.27356724531
|
||||
Zeit: 2024-10-10 19:00:00, DC: 153.0017240639474, AC: 122.40137925115792, Messwert: None, AC GET: 122.40137925115792
|
||||
Zeit: 2024-10-10 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-10 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 08:00:00, DC: 2.6222285230061244, AC: 2.0977828184049, Messwert: None, AC GET: 2.0977828184049
|
||||
Zeit: 2024-10-11 09:00:00, DC: 327.0591156174436, AC: 261.6472924939549, Messwert: None, AC GET: 261.6472924939549
|
||||
Zeit: 2024-10-11 10:00:00, DC: 1151.3464680423976, AC: 921.0771744339181, Messwert: None, AC GET: 921.0771744339181
|
||||
Zeit: 2024-10-11 11:00:00, DC: 3403.1305734949497, AC: 2722.5044587959596, Messwert: None, AC GET: 2722.5044587959596
|
||||
Zeit: 2024-10-11 12:00:00, DC: 4994.314478762743, AC: 3995.451583010195, Messwert: None, AC GET: 3995.451583010195
|
||||
Zeit: 2024-10-11 13:00:00, DC: 5121.461754652362, AC: 4097.1694037218895, Messwert: None, AC GET: 4097.1694037218895
|
||||
Zeit: 2024-10-11 14:00:00, DC: 3975.0670223226057, AC: 3180.0536178580846, Messwert: None, AC GET: 3180.0536178580846
|
||||
Zeit: 2024-10-11 15:00:00, DC: 3493.274881906432, AC: 2794.6199055251454, Messwert: None, AC GET: 2794.6199055251454
|
||||
Zeit: 2024-10-11 16:00:00, DC: 1744.5344793073837, AC: 1395.627583445907, Messwert: None, AC GET: 1395.627583445907
|
||||
Zeit: 2024-10-11 17:00:00, DC: 1405.205675341826, AC: 1124.164540273461, Messwert: None, AC GET: 1124.164540273461
|
||||
Zeit: 2024-10-11 18:00:00, DC: 749.0031661781884, AC: 599.2025329425508, Messwert: None, AC GET: 599.2025329425508
|
||||
Zeit: 2024-10-11 19:00:00, DC: 154.82154503859067, AC: 123.85723603087254, Messwert: None, AC GET: 123.85723603087254
|
||||
Zeit: 2024-10-11 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-11 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 08:00:00, DC: 2.647094615296928, AC: 2.1176756922375426, Messwert: None, AC GET: 2.1176756922375426
|
||||
Zeit: 2024-10-12 09:00:00, DC: 488.8155531584445, AC: 391.05244252675567, Messwert: None, AC GET: 391.05244252675567
|
||||
Zeit: 2024-10-12 10:00:00, DC: 1366.6831332414504, AC: 1093.3465065931605, Messwert: None, AC GET: 1093.3465065931605
|
||||
Zeit: 2024-10-12 11:00:00, DC: 2221.0044689711904, AC: 1776.803575176952, Messwert: None, AC GET: 1776.803575176952
|
||||
Zeit: 2024-10-12 12:00:00, DC: 4794.297312124362, AC: 3835.4378496994896, Messwert: None, AC GET: 3835.4378496994896
|
||||
Zeit: 2024-10-12 13:00:00, DC: 4291.706152495934, AC: 3433.364921996747, Messwert: None, AC GET: 3433.364921996747
|
||||
Zeit: 2024-10-12 14:00:00, DC: 3602.495502815165, AC: 2881.996402252132, Messwert: None, AC GET: 2881.996402252132
|
||||
Zeit: 2024-10-12 15:00:00, DC: 2889.920147477597, AC: 2311.9361179820776, Messwert: None, AC GET: 2311.9361179820776
|
||||
Zeit: 2024-10-12 16:00:00, DC: 1835.783463931089, AC: 1468.6267711448713, Messwert: None, AC GET: 1468.6267711448713
|
||||
Zeit: 2024-10-12 17:00:00, DC: 1206.9973161263288, AC: 965.5978529010631, Messwert: None, AC GET: 965.5978529010631
|
||||
Zeit: 2024-10-12 18:00:00, DC: 436.98494395762776, AC: 349.58795516610223, Messwert: None, AC GET: 349.58795516610223
|
||||
Zeit: 2024-10-12 19:00:00, DC: 67.60866005620485, AC: 54.086928044963884, Messwert: None, AC GET: 54.086928044963884
|
||||
Zeit: 2024-10-12 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-12 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 08:00:00, DC: 2.629333120803497, AC: 2.1034664966427976, Messwert: None, AC GET: 2.1034664966427976
|
||||
Zeit: 2024-10-13 09:00:00, DC: 360.35011327486654, AC: 288.2800906198932, Messwert: None, AC GET: 288.2800906198932
|
||||
Zeit: 2024-10-13 10:00:00, DC: 1042.6260475555746, AC: 834.1008380444597, Messwert: None, AC GET: 834.1008380444597
|
||||
Zeit: 2024-10-13 11:00:00, DC: 1394.2770921302458, AC: 1115.4216737041968, Messwert: None, AC GET: 1115.4216737041968
|
||||
Zeit: 2024-10-13 12:00:00, DC: 2399.8313632276004, AC: 1919.8650905820803, Messwert: None, AC GET: 1919.8650905820803
|
||||
Zeit: 2024-10-13 13:00:00, DC: 3958.1919497070767, AC: 3166.5535597656617, Messwert: None, AC GET: 3166.5535597656617
|
||||
Zeit: 2024-10-13 14:00:00, DC: 3367.4412350178764, AC: 2693.9529880143014, Messwert: None, AC GET: 2693.9529880143014
|
||||
Zeit: 2024-10-13 15:00:00, DC: 3169.290947155804, AC: 2535.432757724643, Messwert: None, AC GET: 2535.432757724643
|
||||
Zeit: 2024-10-13 16:00:00, DC: 1911.7740840246706, AC: 1529.4192672197366, Messwert: None, AC GET: 1529.4192672197366
|
||||
Zeit: 2024-10-13 17:00:00, DC: 1386.220592426786, AC: 1108.9764739414288, Messwert: None, AC GET: 1108.9764739414288
|
||||
Zeit: 2024-10-13 18:00:00, DC: 575.9733531838393, AC: 460.77868254707147, Messwert: None, AC GET: 460.77868254707147
|
||||
Zeit: 2024-10-13 19:00:00, DC: 78.56691565855094, AC: 62.85353252684076, Messwert: None, AC GET: 62.85353252684076
|
||||
Zeit: 2024-10-13 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-13 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 08:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 09:00:00, DC: 328.9765131894571, AC: 263.18121055156564, Messwert: None, AC GET: 263.18121055156564
|
||||
Zeit: 2024-10-14 10:00:00, DC: 1003.7648088236231, AC: 803.0118470588985, Messwert: None, AC GET: 803.0118470588985
|
||||
Zeit: 2024-10-14 11:00:00, DC: 1530.7257802155832, AC: 1224.5806241724667, Messwert: None, AC GET: 1224.5806241724667
|
||||
Zeit: 2024-10-14 12:00:00, DC: 1346.3074573320503, AC: 1077.0459658656403, Messwert: None, AC GET: 1077.0459658656403
|
||||
Zeit: 2024-10-14 13:00:00, DC: 1875.1615396721313, AC: 1500.1292317377051, Messwert: None, AC GET: 1500.1292317377051
|
||||
Zeit: 2024-10-14 14:00:00, DC: 1940.5354145494168, AC: 1552.4283316395336, Messwert: None, AC GET: 1552.4283316395336
|
||||
Zeit: 2024-10-14 15:00:00, DC: 1768.1679163173571, AC: 1414.5343330538858, Messwert: None, AC GET: 1414.5343330538858
|
||||
Zeit: 2024-10-14 16:00:00, DC: 1458.7871531001254, AC: 1167.0297224801004, Messwert: None, AC GET: 1167.0297224801004
|
||||
Zeit: 2024-10-14 17:00:00, DC: 1010.7950430626171, AC: 808.6360344500937, Messwert: None, AC GET: 808.6360344500937
|
||||
Zeit: 2024-10-14 18:00:00, DC: 468.9879780808202, AC: 375.1903824646562, Messwert: None, AC GET: 375.1903824646562
|
||||
Zeit: 2024-10-14 19:00:00, DC: 78.50914028438824, AC: 62.80731222751059, Messwert: None, AC GET: 62.80731222751059
|
||||
Zeit: 2024-10-14 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-14 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 08:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 09:00:00, DC: 400.8076493987202, AC: 320.64611951897615, Messwert: None, AC GET: 320.64611951897615
|
||||
Zeit: 2024-10-15 10:00:00, DC: 1135.8607353766422, AC: 908.6885883013138, Messwert: None, AC GET: 908.6885883013138
|
||||
Zeit: 2024-10-15 11:00:00, DC: 2075.472520835528, AC: 1660.3780166684228, Messwert: None, AC GET: 1660.3780166684228
|
||||
Zeit: 2024-10-15 12:00:00, DC: 3664.671214337204, AC: 2931.7369714697634, Messwert: None, AC GET: 2931.7369714697634
|
||||
Zeit: 2024-10-15 13:00:00, DC: 6061.0533141581545, AC: 4848.842651326524, Messwert: None, AC GET: 4848.842651326524
|
||||
Zeit: 2024-10-15 14:00:00, DC: 4141.306741623349, AC: 3313.0453932986793, Messwert: None, AC GET: 3313.0453932986793
|
||||
Zeit: 2024-10-15 15:00:00, DC: 3620.4523441068936, AC: 2896.361875285515, Messwert: None, AC GET: 2896.361875285515
|
||||
Zeit: 2024-10-15 16:00:00, DC: 1666.2167974329839, AC: 1332.973437946387, Messwert: None, AC GET: 1332.973437946387
|
||||
Zeit: 2024-10-15 17:00:00, DC: 1144.0387748455216, AC: 915.2310198764173, Messwert: None, AC GET: 915.2310198764173
|
||||
Zeit: 2024-10-15 18:00:00, DC: 620.723373108851, AC: 496.57869848708077, Messwert: None, AC GET: 496.57869848708077
|
||||
Zeit: 2024-10-15 19:00:00, DC: 132.65653839949636, AC: 106.1252307195971, Messwert: None, AC GET: 106.1252307195971
|
||||
Zeit: 2024-10-15 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-15 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 08:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 09:00:00, DC: 300.0660361917257, AC: 240.05282895338058, Messwert: None, AC GET: 240.05282895338058
|
||||
Zeit: 2024-10-16 10:00:00, DC: 868.5128047099465, AC: 694.8102437679572, Messwert: None, AC GET: 694.8102437679572
|
||||
Zeit: 2024-10-16 11:00:00, DC: 1555.4453842802168, AC: 1244.3563074241736, Messwert: None, AC GET: 1244.3563074241736
|
||||
Zeit: 2024-10-16 12:00:00, DC: 2675.3832192006007, AC: 2140.3065753604806, Messwert: None, AC GET: 2140.3065753604806
|
||||
Zeit: 2024-10-16 13:00:00, DC: 4191.900905711933, AC: 3353.520724569547, Messwert: None, AC GET: 3353.520724569547
|
||||
Zeit: 2024-10-16 14:00:00, DC: 3386.3457857410863, AC: 2709.076628592869, Messwert: None, AC GET: 2709.076628592869
|
||||
Zeit: 2024-10-16 15:00:00, DC: 2980.635337976421, AC: 2384.508270381137, Messwert: None, AC GET: 2384.508270381137
|
||||
Zeit: 2024-10-16 16:00:00, DC: 1728.6316869128443, AC: 1382.9053495302755, Messwert: None, AC GET: 1382.9053495302755
|
||||
Zeit: 2024-10-16 17:00:00, DC: 1143.3109041676573, AC: 914.6487233341259, Messwert: None, AC GET: 914.6487233341259
|
||||
Zeit: 2024-10-16 18:00:00, DC: 567.6444132249978, AC: 454.11553057999834, Messwert: None, AC GET: 454.11553057999834
|
||||
Zeit: 2024-10-16 19:00:00, DC: 116.3312759656061, AC: 93.06502077248489, Messwert: None, AC GET: 93.06502077248489
|
||||
Zeit: 2024-10-16 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-16 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 00:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 01:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 02:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 03:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 04:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 05:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 06:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 07:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 08:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 09:00:00, DC: 271.7219822789749, AC: 217.3775858231799, Messwert: None, AC GET: 217.3775858231799
|
||||
Zeit: 2024-10-17 10:00:00, DC: 809.4024309606523, AC: 647.5219447685218, Messwert: None, AC GET: 647.5219447685218
|
||||
Zeit: 2024-10-17 11:00:00, DC: 1387.6787429050005, AC: 1110.1429943240005, Messwert: None, AC GET: 1110.1429943240005
|
||||
Zeit: 2024-10-17 12:00:00, DC: 1990.9522710681556, AC: 1592.7618168545246, Messwert: None, AC GET: 1592.7618168545246
|
||||
Zeit: 2024-10-17 13:00:00, DC: 2147.642554452618, AC: 1718.1140435620944, Messwert: None, AC GET: 1718.1140435620944
|
||||
Zeit: 2024-10-17 14:00:00, DC: 2158.8258001478607, AC: 1727.0606401182886, Messwert: None, AC GET: 1727.0606401182886
|
||||
Zeit: 2024-10-17 15:00:00, DC: 2027.8192437466564, AC: 1622.2553949973253, Messwert: None, AC GET: 1622.2553949973253
|
||||
Zeit: 2024-10-17 16:00:00, DC: 1626.5734434020972, AC: 1301.2587547216779, Messwert: None, AC GET: 1301.2587547216779
|
||||
Zeit: 2024-10-17 17:00:00, DC: 1165.2889667744384, AC: 932.2311734195508, Messwert: None, AC GET: 932.2311734195508
|
||||
Zeit: 2024-10-17 18:00:00, DC: 573.52703976922, AC: 458.821631815376, Messwert: None, AC GET: 458.821631815376
|
||||
Zeit: 2024-10-17 19:00:00, DC: 102.43605685684926, AC: 81.94884548547941, Messwert: None, AC GET: 81.94884548547941
|
||||
Zeit: 2024-10-17 20:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 21:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 22:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
||||
Zeit: 2024-10-17 23:00:00, DC: 0, AC: 0, Messwert: None, AC GET: 0
|
Loading…
x
Reference in New Issue
Block a user