mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-11-21 04:46:31 +00:00
chore: improve doc generation and test (#762)
Some checks failed
docker-build / platform-excludes (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
Close stale pull requests/issues / Find Stale issues and PRs (push) Has been cancelled
Some checks failed
docker-build / platform-excludes (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
Close stale pull requests/issues / Find Stale issues and PRs (push) Has been cancelled
Improve documentation generation and add tests for documentation. Extend sphinx by todo directive. The configuration table is now split into several tables. The test is adapted accordingly. There is a new test that checks the docstrings to be compliant to the RST format as used by sphinx to create the documentation. We can not use Markdown in docstrings. The docstrings are adapted accordingly. An additional test checks that the documentation can be build with sphinx. This test takes very long is only enabled in full run (aka. ci) mode. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
@@ -287,10 +287,10 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
|
||||
Example:
|
||||
To initialize and access configuration attributes (only one instance is created):
|
||||
```python
|
||||
config_eos = ConfigEOS() # Always returns the same instance
|
||||
print(config_eos.prediction.hours) # Access a setting from the loaded configuration
|
||||
```
|
||||
.. code-block:: python
|
||||
|
||||
config_eos = ConfigEOS() # Always returns the same instance
|
||||
print(config_eos.prediction.hours) # Access a setting from the loaded configuration
|
||||
|
||||
"""
|
||||
|
||||
@@ -461,9 +461,12 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
ValidationError: If the data contains invalid values for the defined fields.
|
||||
|
||||
Example:
|
||||
>>> config = get_config()
|
||||
>>> new_data = {"prediction": {"hours": 24}, "server": {"port": 8000}}
|
||||
>>> config.merge_settings_from_dict(new_data)
|
||||
.. code-block:: python
|
||||
|
||||
config = get_config()
|
||||
new_data = {"prediction": {"hours": 24}, "server": {"port": 8000}}
|
||||
config.merge_settings_from_dict(new_data)
|
||||
|
||||
"""
|
||||
self._setup(**merge_models(self, data))
|
||||
|
||||
@@ -518,8 +521,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
The returned dictionary uses `backup_id` (suffix) as keys. The value for
|
||||
each key is a dictionary including:
|
||||
- ``storage_time``: The file modification timestamp in ISO-8601 format.
|
||||
- ``version``: Version information found in the backup file
|
||||
(defaults to ``"unknown"``).
|
||||
- ``version``: Version information found in the backup file (defaults to ``"unknown"``).
|
||||
|
||||
Returns:
|
||||
dict[str, dict[str, Any]]: Mapping of backup identifiers to metadata.
|
||||
|
||||
@@ -90,7 +90,10 @@ class CacheEnergyManagementStore(SingletonMixin):
|
||||
the application lifecycle.
|
||||
|
||||
Example:
|
||||
>>> cache = CacheEnergyManagementStore()
|
||||
.. code-block:: python
|
||||
|
||||
cache = CacheEnergyManagementStore()
|
||||
|
||||
"""
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
@@ -112,7 +115,10 @@ class CacheEnergyManagementStore(SingletonMixin):
|
||||
AttributeError: If the cache object does not have the requested method.
|
||||
|
||||
Example:
|
||||
>>> result = cache.get("key")
|
||||
.. code-block:: python
|
||||
|
||||
result = cache.get("key")
|
||||
|
||||
"""
|
||||
# This will return a method of the target cache, or raise an AttributeError
|
||||
target_attr = getattr(self.cache, name)
|
||||
@@ -134,7 +140,10 @@ class CacheEnergyManagementStore(SingletonMixin):
|
||||
KeyError: If the key does not exist in the cache.
|
||||
|
||||
Example:
|
||||
>>> value = cache["user_data"]
|
||||
.. code-block:: python
|
||||
|
||||
value = cache["user_data"]
|
||||
|
||||
"""
|
||||
return CacheEnergyManagementStore.cache[key]
|
||||
|
||||
@@ -146,7 +155,10 @@ class CacheEnergyManagementStore(SingletonMixin):
|
||||
value (Any): The value to store.
|
||||
|
||||
Example:
|
||||
>>> cache["user_data"] = {"name": "Alice", "age": 30}
|
||||
.. code-block:: python
|
||||
|
||||
cache["user_data"] = {"name": "Alice", "age": 30}
|
||||
|
||||
"""
|
||||
CacheEnergyManagementStore.cache[key] = value
|
||||
|
||||
@@ -166,7 +178,10 @@ class CacheEnergyManagementStore(SingletonMixin):
|
||||
management system run).
|
||||
|
||||
Example:
|
||||
>>> cache.clear()
|
||||
.. code-block:: python
|
||||
|
||||
cache.clear()
|
||||
|
||||
"""
|
||||
if hasattr(self.cache, "clear") and callable(getattr(self.cache, "clear")):
|
||||
CacheEnergyManagementStore.cache.clear()
|
||||
@@ -248,12 +263,15 @@ class CacheFileStore(ConfigMixin, SingletonMixin):
|
||||
with their associated keys and dates.
|
||||
|
||||
Example:
|
||||
>>> cache_store = CacheFileStore()
|
||||
>>> cache_store.create('example_file')
|
||||
>>> cache_file = cache_store.get('example_file')
|
||||
>>> cache_file.write('Some data')
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: 'Some data'
|
||||
.. code-block:: python
|
||||
|
||||
cache_store = CacheFileStore()
|
||||
cache_store.create('example_file')
|
||||
cache_file = cache_store.get('example_file')
|
||||
cache_file.write('Some data')
|
||||
cache_file.seek(0)
|
||||
print(cache_file.read()) # Output: 'Some data'
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
@@ -462,10 +480,13 @@ class CacheFileStore(ConfigMixin, SingletonMixin):
|
||||
file_obj: A file-like object representing the cache file.
|
||||
|
||||
Example:
|
||||
>>> cache_file = cache_store.create('example_file', suffix='.txt')
|
||||
>>> cache_file.write('Some cached data')
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: 'Some cached data'
|
||||
.. code-block:: python
|
||||
|
||||
cache_file = cache_store.create('example_file', suffix='.txt')
|
||||
cache_file.write('Some cached data')
|
||||
cache_file.seek(0)
|
||||
print(cache_file.read()) # Output: 'Some cached data'
|
||||
|
||||
"""
|
||||
cache_file_key, until_datetime_dt, ttl_duration = self._generate_cache_file_key(
|
||||
key, until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
|
||||
@@ -514,7 +535,10 @@ class CacheFileStore(ConfigMixin, SingletonMixin):
|
||||
ValueError: If the key is already in store.
|
||||
|
||||
Example:
|
||||
>>> cache_store.set('example_file', io.BytesIO(b'Some binary data'))
|
||||
.. code-block:: python
|
||||
|
||||
cache_store.set('example_file', io.BytesIO(b'Some binary data'))
|
||||
|
||||
"""
|
||||
cache_file_key, until_datetime_dt, ttl_duration = self._generate_cache_file_key(
|
||||
key, until_datetime=until_datetime, until_date=until_date, with_ttl=with_ttl
|
||||
@@ -570,10 +594,13 @@ class CacheFileStore(ConfigMixin, SingletonMixin):
|
||||
file_obj: The file-like cache object, or None if no file is found.
|
||||
|
||||
Example:
|
||||
>>> cache_file = cache_store.get('example_file')
|
||||
>>> if cache_file:
|
||||
>>> cache_file.seek(0)
|
||||
>>> print(cache_file.read()) # Output: Cached data (if exists)
|
||||
.. code-block:: python
|
||||
|
||||
cache_file = cache_store.get('example_file')
|
||||
if cache_file:
|
||||
cache_file.seek(0)
|
||||
print(cache_file.read()) # Output: Cached data (if exists)
|
||||
|
||||
"""
|
||||
if until_datetime or until_date:
|
||||
until_datetime, _ttl_duration = self._until_datetime_by_options(
|
||||
@@ -852,13 +879,15 @@ def cache_in_file(
|
||||
A decorated function that caches its result in a temporary file.
|
||||
|
||||
Example:
|
||||
>>> from datetime import date
|
||||
>>> @cache_in_file(suffix='.txt')
|
||||
>>> def expensive_computation(until_date=None):
|
||||
>>> # Perform some expensive computation
|
||||
>>> return 'Some large result'
|
||||
>>>
|
||||
>>> result = expensive_computation(until_date=date.today())
|
||||
.. code-block:: python
|
||||
|
||||
from datetime import date
|
||||
@cache_in_file(suffix='.txt')
|
||||
def expensive_computation(until_date=None):
|
||||
# Perform some expensive computation
|
||||
return 'Some large result'
|
||||
|
||||
result = expensive_computation(until_date=date.today())
|
||||
|
||||
Notes:
|
||||
- The cache key is based on the function arguments after excluding those in `ignore_params`.
|
||||
|
||||
@@ -39,11 +39,12 @@ class ConfigMixin:
|
||||
config (ConfigEOS): Property to access the global EOS configuration.
|
||||
|
||||
Example:
|
||||
```python
|
||||
class MyEOSClass(ConfigMixin):
|
||||
def my_method(self):
|
||||
if self.config.myconfigval:
|
||||
```
|
||||
.. code-block:: python
|
||||
|
||||
class MyEOSClass(ConfigMixin):
|
||||
def my_method(self):
|
||||
if self.config.myconfigval:
|
||||
|
||||
"""
|
||||
|
||||
@classproperty
|
||||
@@ -78,12 +79,13 @@ class MeasurementMixin:
|
||||
measurement (Measurement): Property to access the global EOS measurement data.
|
||||
|
||||
Example:
|
||||
```python
|
||||
class MyOptimizationClass(MeasurementMixin):
|
||||
def analyze_mymeasurement(self):
|
||||
measurement_data = self.measurement.mymeasurement
|
||||
# Perform analysis
|
||||
```
|
||||
.. code-block:: python
|
||||
|
||||
class MyOptimizationClass(MeasurementMixin):
|
||||
def analyze_mymeasurement(self):
|
||||
measurement_data = self.measurement.mymeasurement
|
||||
# Perform analysis
|
||||
|
||||
"""
|
||||
|
||||
@classproperty
|
||||
@@ -118,12 +120,13 @@ class PredictionMixin:
|
||||
prediction (Prediction): Property to access the global EOS prediction data.
|
||||
|
||||
Example:
|
||||
```python
|
||||
class MyOptimizationClass(PredictionMixin):
|
||||
def analyze_myprediction(self):
|
||||
prediction_data = self.prediction.mypredictionresult
|
||||
# Perform analysis
|
||||
```
|
||||
.. code-block:: python
|
||||
|
||||
class MyOptimizationClass(PredictionMixin):
|
||||
def analyze_myprediction(self):
|
||||
prediction_data = self.prediction.mypredictionresult
|
||||
# Perform analysis
|
||||
|
||||
"""
|
||||
|
||||
@classproperty
|
||||
@@ -159,12 +162,13 @@ class EnergyManagementSystemMixin:
|
||||
ems (EnergyManagementSystem): Property to access the global EOS energy management system.
|
||||
|
||||
Example:
|
||||
```python
|
||||
class MyOptimizationClass(EnergyManagementSystemMixin):
|
||||
def analyze_myprediction(self):
|
||||
ems_data = self.ems.the_ems_method()
|
||||
# Perform analysis
|
||||
```
|
||||
.. code-block:: python
|
||||
|
||||
class MyOptimizationClass(EnergyManagementSystemMixin):
|
||||
def analyze_myprediction(self):
|
||||
ems_data = self.ems.the_ems_method()
|
||||
# Perform analysis
|
||||
|
||||
"""
|
||||
|
||||
@classproperty
|
||||
@@ -224,22 +228,25 @@ class SingletonMixin:
|
||||
- Avoid using `__init__` to reinitialize the singleton instance after it has been created.
|
||||
|
||||
Example:
|
||||
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
||||
name: str
|
||||
.. code-block:: python
|
||||
|
||||
# implement __init__ to avoid re-initialization of parent classes:
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
# Your initialisation here
|
||||
...
|
||||
super().__init__(*args, **kwargs)
|
||||
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
||||
name: str
|
||||
|
||||
instance1 = MySingletonModel(name="Instance 1")
|
||||
instance2 = MySingletonModel(name="Instance 2")
|
||||
# implement __init__ to avoid re-initialization of parent classes:
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
if hasattr(self, "_initialized"):
|
||||
return
|
||||
# Your initialisation here
|
||||
...
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
instance1 = MySingletonModel(name="Instance 1")
|
||||
instance2 = MySingletonModel(name="Instance 2")
|
||||
|
||||
assert instance1 is instance2 # True
|
||||
print(instance1.name) # Output: "Instance 1"
|
||||
|
||||
assert instance1 is instance2 # True
|
||||
print(instance1.name) # Output: "Instance 1"
|
||||
"""
|
||||
|
||||
_lock: ClassVar[threading.Lock] = threading.Lock()
|
||||
|
||||
@@ -432,20 +432,23 @@ class DataSequence(DataBase, MutableSequence):
|
||||
Derived classes have to provide their own records field with correct record type set.
|
||||
|
||||
Usage:
|
||||
# Example of creating, adding, and using DataSequence
|
||||
class DerivedSequence(DataSquence):
|
||||
records: List[DerivedDataRecord] = Field(default_factory=list, json_schema_extra={ "description": "List of data records" })
|
||||
.. code-block:: python
|
||||
|
||||
seq = DerivedSequence()
|
||||
seq.insert(DerivedDataRecord(date_time=datetime.now(), temperature=72))
|
||||
seq.insert(DerivedDataRecord(date_time=datetime.now(), temperature=75))
|
||||
# Example of creating, adding, and using DataSequence
|
||||
class DerivedSequence(DataSquence):
|
||||
records: List[DerivedDataRecord] = Field(default_factory=list, json_schema_extra={ "description": "List of data records" })
|
||||
|
||||
# Convert to JSON and back
|
||||
json_data = seq.to_json()
|
||||
new_seq = DerivedSequence.from_json(json_data)
|
||||
seq = DerivedSequence()
|
||||
seq.insert(DerivedDataRecord(date_time=datetime.now(), temperature=72))
|
||||
seq.insert(DerivedDataRecord(date_time=datetime.now(), temperature=75))
|
||||
|
||||
# Convert to JSON and back
|
||||
json_data = seq.to_json()
|
||||
new_seq = DerivedSequence.from_json(json_data)
|
||||
|
||||
# Convert to Pandas Series
|
||||
series = seq.key_to_series('temperature')
|
||||
|
||||
# Convert to Pandas Series
|
||||
series = seq.key_to_series('temperature')
|
||||
"""
|
||||
|
||||
# To be overloaded by derived classes.
|
||||
@@ -737,9 +740,12 @@ class DataSequence(DataBase, MutableSequence):
|
||||
**kwargs: Key-value pairs as keyword arguments
|
||||
|
||||
Examples:
|
||||
>>> update_value(date, 'temperature', 25.5)
|
||||
>>> update_value(date, {'temperature': 25.5, 'humidity': 80})
|
||||
>>> update_value(date, temperature=25.5, humidity=80)
|
||||
.. code-block:: python
|
||||
|
||||
update_value(date, 'temperature', 25.5)
|
||||
update_value(date, {'temperature': 25.5, 'humidity': 80})
|
||||
update_value(date, temperature=25.5, humidity=80)
|
||||
|
||||
"""
|
||||
# Process input arguments into a dictionary
|
||||
values: Dict[str, Any] = {}
|
||||
@@ -1378,15 +1384,18 @@ class DataImportMixin:
|
||||
"""Mixin class for import of generic data.
|
||||
|
||||
This class is designed to handle generic data provided in the form of a key-value dictionary.
|
||||
|
||||
- **Keys**: Represent identifiers from the record keys of a specific data.
|
||||
- **Values**: Are lists of data values starting at a specified `start_datetime`, where
|
||||
- **Values**: Are lists of data values starting at a specified start_datetime, where
|
||||
each value corresponds to a subsequent time interval (e.g., hourly).
|
||||
|
||||
Two special keys are handled. `start_datetime` may be used to defined the starting datetime of
|
||||
the values. `ìnterval` may be used to define the fixed time interval between two values.
|
||||
Two special keys are handled. start_datetime may be used to defined the starting datetime of
|
||||
the values. ìnterval may be used to define the fixed time interval between two values.
|
||||
|
||||
On import self.update_value(datetime, key, value) is called which has to be provided.
|
||||
Also self.ems_start_datetime may be necessary as a default in case start_datetime is not
|
||||
given.
|
||||
|
||||
On import `self.update_value(datetime, key, value)` is called which has to be provided.
|
||||
Also `self.ems_start_datetime` may be necessary as a default in case `start_datetime`is not given.
|
||||
"""
|
||||
|
||||
# Attributes required but defined elsehere.
|
||||
@@ -1418,16 +1427,20 @@ class DataImportMixin:
|
||||
Behavior:
|
||||
- Skips invalid timestamps during DST spring forward transitions.
|
||||
- Includes both instances of repeated timestamps during DST fall back transitions.
|
||||
- Ensures the list contains exactly `value_count` entries.
|
||||
- Ensures the list contains exactly 'value_count' entries.
|
||||
|
||||
Example:
|
||||
>>> start_datetime = pendulum.datetime(2024, 11, 3, 0, 0, tz="America/New_York")
|
||||
>>> import_datetimes(start_datetime, 5)
|
||||
[(DateTime(2024, 11, 3, 0, 0, tzinfo=Timezone('America/New_York')), 0),
|
||||
(DateTime(2024, 11, 3, 1, 0, tzinfo=Timezone('America/New_York')), 1),
|
||||
(DateTime(2024, 11, 3, 1, 0, tzinfo=Timezone('America/New_York')), 1), # Repeated hour
|
||||
(DateTime(2024, 11, 3, 2, 0, tzinfo=Timezone('America/New_York')), 2),
|
||||
(DateTime(2024, 11, 3, 3, 0, tzinfo=Timezone('America/New_York')), 3)]
|
||||
.. code-block:: python
|
||||
|
||||
start_datetime = pendulum.datetime(2024, 11, 3, 0, 0, tz="America/New_York")
|
||||
import_datetimes(start_datetime, 5)
|
||||
|
||||
[(DateTime(2024, 11, 3, 0, 0, tzinfo=Timezone('America/New_York')), 0),
|
||||
(DateTime(2024, 11, 3, 1, 0, tzinfo=Timezone('America/New_York')), 1),
|
||||
(DateTime(2024, 11, 3, 1, 0, tzinfo=Timezone('America/New_York')), 1), # Repeated hour
|
||||
(DateTime(2024, 11, 3, 2, 0, tzinfo=Timezone('America/New_York')), 2),
|
||||
(DateTime(2024, 11, 3, 3, 0, tzinfo=Timezone('America/New_York')), 3)]
|
||||
|
||||
"""
|
||||
timestamps_with_indices: List[Tuple[DateTime, int]] = []
|
||||
|
||||
@@ -1665,17 +1678,18 @@ class DataImportMixin:
|
||||
JSONDecodeError: If the file content is not valid JSON.
|
||||
|
||||
Example:
|
||||
Given a JSON string with the following content:
|
||||
```json
|
||||
{
|
||||
"start_datetime": "2024-11-10 00:00:00"
|
||||
"interval": "30 minutes"
|
||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
||||
"other_xyz: [10.5, 11.0, 12.1],
|
||||
}
|
||||
```
|
||||
and `key_prefix = "load"`, only the "loadforecast_power_w" key will be processed even though
|
||||
both keys are in the record.
|
||||
Given a JSON string with the following content and `key_prefix = "load"`, only the
|
||||
"loadforecast_power_w" key will be processed even though both keys are in the record.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"start_datetime": "2024-11-10 00:00:00",
|
||||
"interval": "30 minutes",
|
||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
||||
"other_xyz: [10.5, 11.0, 12.1]
|
||||
}
|
||||
|
||||
"""
|
||||
# Try pandas dataframe with orient="split"
|
||||
try:
|
||||
@@ -1741,15 +1755,16 @@ class DataImportMixin:
|
||||
JSONDecodeError: If the file content is not valid JSON.
|
||||
|
||||
Example:
|
||||
Given a JSON file with the following content:
|
||||
```json
|
||||
{
|
||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
||||
"other_xyz: [10.5, 11.0, 12.1],
|
||||
}
|
||||
```
|
||||
and `key_prefix = "load"`, only the "loadforecast_power_w" key will be processed even though
|
||||
both keys are in the record.
|
||||
Given a JSON file with the following content and `key_prefix = "load"`, only the
|
||||
"loadforecast_power_w" key will be processed even though both keys are in the record.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
||||
"other_xyz: [10.5, 11.0, 12.1],
|
||||
}
|
||||
|
||||
"""
|
||||
with import_file_path.open("r", encoding="utf-8", newline=None) as import_file:
|
||||
import_str = import_file.read()
|
||||
@@ -1762,9 +1777,10 @@ class DataImportProvider(DataImportMixin, DataProvider):
|
||||
"""Abstract base class for data providers that import generic data.
|
||||
|
||||
This class is designed to handle generic data provided in the form of a key-value dictionary.
|
||||
|
||||
- **Keys**: Represent identifiers from the record keys of a specific data.
|
||||
- **Values**: Are lists of data values starting at a specified `start_datetime`, where
|
||||
each value corresponds to a subsequent time interval (e.g., hourly).
|
||||
each value corresponds to a subsequent time interval (e.g., hourly).
|
||||
|
||||
Subclasses must implement the logic for managing generic data based on the imported records.
|
||||
"""
|
||||
|
||||
@@ -12,14 +12,16 @@ class classproperty:
|
||||
the class rather than any instance of the class.
|
||||
|
||||
Example:
|
||||
class MyClass:
|
||||
_value = 42
|
||||
.. code-block:: python
|
||||
|
||||
@classproperty
|
||||
def value(cls):
|
||||
return cls._value
|
||||
class MyClass:
|
||||
_value = 42
|
||||
|
||||
print(MyClass.value) # Outputs: 42
|
||||
@classproperty
|
||||
def value(cls):
|
||||
return cls._value
|
||||
|
||||
print(MyClass.value) # Outputs: 42
|
||||
|
||||
Methods:
|
||||
__get__: Retrieves the value of the class property by calling the
|
||||
|
||||
@@ -6,10 +6,12 @@ These enhancements facilitate the use of Pydantic models in applications requiri
|
||||
datetime fields and consistent data serialization.
|
||||
|
||||
Key Features:
|
||||
|
||||
- Custom type adapter for `pendulum.DateTime` fields with automatic serialization to ISO 8601 strings.
|
||||
- Utility methods for converting models to and from dictionaries and JSON strings.
|
||||
- Validation tools for maintaining data consistency, including specialized support for
|
||||
pandas DataFrames and Series with datetime indexes.
|
||||
|
||||
"""
|
||||
|
||||
import inspect
|
||||
@@ -157,16 +159,19 @@ class PydanticModelNestedValueMixin:
|
||||
or an invalid transition is made (such as an attribute on a non-model).
|
||||
|
||||
Example:
|
||||
class Address(PydanticBaseModel):
|
||||
city: str
|
||||
.. code-block:: python
|
||||
|
||||
class User(PydanticBaseModel):
|
||||
name: str
|
||||
address: Address
|
||||
class Address(PydanticBaseModel):
|
||||
city: str
|
||||
|
||||
class User(PydanticBaseModel):
|
||||
name: str
|
||||
address: Address
|
||||
|
||||
user = User(name="Alice", address=Address(city="NY"))
|
||||
user._validate_path_structure("address/city") # OK
|
||||
user._validate_path_structure("address/zipcode") # Raises ValueError
|
||||
|
||||
user = User(name="Alice", address=Address(city="NY"))
|
||||
user._validate_path_structure("address/city") # OK
|
||||
user._validate_path_structure("address/zipcode") # Raises ValueError
|
||||
"""
|
||||
path_elements = path.strip("/").split("/")
|
||||
# The model we are currently working on
|
||||
@@ -264,18 +269,19 @@ class PydanticModelNestedValueMixin:
|
||||
IndexError: If a list index is out of bounds or invalid.
|
||||
|
||||
Example:
|
||||
```python
|
||||
class Address(PydanticBaseModel):
|
||||
city: str
|
||||
.. code-block:: python
|
||||
|
||||
class User(PydanticBaseModel):
|
||||
name: str
|
||||
address: Address
|
||||
class Address(PydanticBaseModel):
|
||||
city: str
|
||||
|
||||
class User(PydanticBaseModel):
|
||||
name: str
|
||||
address: Address
|
||||
|
||||
user = User(name="Alice", address=Address(city="New York"))
|
||||
city = user.get_nested_value("address/city")
|
||||
print(city) # Output: "New York"
|
||||
|
||||
user = User(name="Alice", address=Address(city="New York"))
|
||||
city = user.get_nested_value("address/city")
|
||||
print(city) # Output: "New York"
|
||||
```
|
||||
"""
|
||||
path_elements = path.strip("/").split("/")
|
||||
model: Any = self
|
||||
@@ -318,22 +324,23 @@ class PydanticModelNestedValueMixin:
|
||||
TypeError: If a missing field cannot be initialized.
|
||||
|
||||
Example:
|
||||
```python
|
||||
class Address(PydanticBaseModel):
|
||||
city: Optional[str]
|
||||
.. code-block:: python
|
||||
|
||||
class User(PydanticBaseModel):
|
||||
name: str
|
||||
address: Optional[Address]
|
||||
settings: Optional[Dict[str, Any]]
|
||||
class Address(PydanticBaseModel):
|
||||
city: Optional[str]
|
||||
|
||||
user = User(name="Alice", address=None, settings=None)
|
||||
user.set_nested_value("address/city", "Los Angeles")
|
||||
user.set_nested_value("settings/theme", "dark")
|
||||
class User(PydanticBaseModel):
|
||||
name: str
|
||||
address: Optional[Address]
|
||||
settings: Optional[Dict[str, Any]]
|
||||
|
||||
user = User(name="Alice", address=None, settings=None)
|
||||
user.set_nested_value("address/city", "Los Angeles")
|
||||
user.set_nested_value("settings/theme", "dark")
|
||||
|
||||
print(user.address.city) # Output: "Los Angeles"
|
||||
print(user.settings) # Output: {'theme': 'dark'}
|
||||
|
||||
print(user.address.city) # Output: "Los Angeles"
|
||||
print(user.settings) # Output: {'theme': 'dark'}
|
||||
```
|
||||
"""
|
||||
path = path.strip("/")
|
||||
# Store old value (if possible)
|
||||
@@ -753,18 +760,21 @@ class PydanticBaseModel(PydanticModelNestedValueMixin, BaseModel):
|
||||
gracefully by returning an empty dictionary.
|
||||
|
||||
Examples:
|
||||
>>> class User(Base):
|
||||
... name: str = Field(
|
||||
... json_schema_extra={"description": "User name"}
|
||||
... )
|
||||
...
|
||||
>>> field = User.model_fields["name"]
|
||||
>>> User.get_field_extra_dict(field)
|
||||
{'description': 'User name'}
|
||||
.. code-block:: python
|
||||
|
||||
class User(Base):
|
||||
name: str = Field(
|
||||
json_schema_extra={"description": "User name"}
|
||||
)
|
||||
|
||||
field = User.model_fields["name"]
|
||||
User.get_field_extra_dict(field)
|
||||
{'description': 'User name'}
|
||||
|
||||
missing = User.model_fields.get("unknown", None)
|
||||
User.get_field_extra_dict(missing) if missing else {}
|
||||
{}
|
||||
|
||||
>>> missing = User.model_fields.get("unknown", None)
|
||||
>>> User.get_field_extra_dict(missing) if missing else {}
|
||||
{}
|
||||
"""
|
||||
if model_field is None:
|
||||
return {}
|
||||
@@ -873,12 +883,15 @@ class PydanticDateTimeData(RootModel):
|
||||
- All value lists must have the same length
|
||||
|
||||
Example:
|
||||
{
|
||||
"start_datetime": "2024-01-01 00:00:00", # optional
|
||||
"interval": "1 Hour", # optional
|
||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
||||
"load_min": [18.5, 19.0, 20.1]
|
||||
}
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"start_datetime": "2024-01-01 00:00:00", # optional
|
||||
"interval": "1 Hour", # optional
|
||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
||||
"load_min": [18.5, 19.0, 20.1]
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
root: Dict[str, Union[str, List[Union[float, int, str, None]]]]
|
||||
@@ -1275,9 +1288,12 @@ class PydanticDateTimeSeries(PydanticBaseModel):
|
||||
ValueError: If series index is not datetime type.
|
||||
|
||||
Example:
|
||||
>>> dates = pd.date_range('2024-01-01', periods=3)
|
||||
>>> s = pd.Series([1.1, 2.2, 3.3], index=dates)
|
||||
>>> model = PydanticDateTimeSeries.from_series(s)
|
||||
.. code-block:: python
|
||||
|
||||
dates = pd.date_range('2024-01-01', periods=3)
|
||||
s = pd.Series([1.1, 2.2, 3.3], index=dates)
|
||||
model = PydanticDateTimeSeries.from_series(s)
|
||||
|
||||
"""
|
||||
index = pd.Index([to_datetime(dt, as_string=True, in_timezone=tz) for dt in series.index])
|
||||
series.index = index
|
||||
|
||||
@@ -171,25 +171,28 @@ class Battery:
|
||||
|
||||
Two **exclusive** modes:
|
||||
|
||||
Mode 1:
|
||||
- `wh is not None` and `charge_factor == 0`
|
||||
→ The raw requested charge energy is `wh` (pre-efficiency).
|
||||
→ If remaining capacity is insufficient, charging is automatically limited.
|
||||
→ No exception is raised due to capacity limits.
|
||||
**Mode 1:**
|
||||
|
||||
Mode 2:
|
||||
- `wh is None` and `charge_factor > 0`
|
||||
→ The raw requested energy is `max_charge_power_w * charge_factor`.
|
||||
→ If the request exceeds remaining capacity, the algorithm tries to
|
||||
find a lower charge_factor that is compatible. If such a charge factor
|
||||
exists, this hour’s charge_factor is replaced.
|
||||
→ If no charge factor can accommodate charging, the request is ignored
|
||||
(`(0.0, 0.0)` is returned) and a penalty is applied elsewhere.
|
||||
- `wh is not None` and `charge_factor == 0`
|
||||
- The raw requested charge energy is `wh` (pre-efficiency).
|
||||
- If remaining capacity is insufficient, charging is automatically limited.
|
||||
- No exception is raised due to capacity limits.
|
||||
|
||||
**Mode 2:**
|
||||
|
||||
- `wh is None` and `charge_factor > 0`
|
||||
- The raw requested energy is `max_charge_power_w * charge_factor`.
|
||||
- If the request exceeds remaining capacity, the algorithm tries to find a lower
|
||||
`charge_factor` that is compatible. If such a charge factor exists, this hour’s
|
||||
`charge_factor` is replaced.
|
||||
- If no charge factor can accommodate charging, the request is ignored (``(0.0, 0.0)`` is
|
||||
returned) and a penalty is applied elsewhere.
|
||||
|
||||
Charging is constrained by:
|
||||
• Available SoC headroom (max_soc_wh − soc_wh)
|
||||
• max_charge_power_w
|
||||
• charging_efficiency
|
||||
|
||||
- Available SoC headroom (``max_soc_wh − soc_wh``)
|
||||
- ``max_charge_power_w``
|
||||
- ``charging_efficiency``
|
||||
|
||||
Args:
|
||||
wh (float | None):
|
||||
|
||||
@@ -212,15 +212,14 @@ class GeneticSolution(ConfigMixin, GeneticParametersBaseModel):
|
||||
discharge_allowed (bool): Whether discharging is permitted.
|
||||
|
||||
Returns:
|
||||
tuple[BatteryOperationMode, float]:
|
||||
A tuple containing:
|
||||
tuple[BatteryOperationMode, float]: A tuple containing
|
||||
- `BatteryOperationMode`: the representative high-level operation mode.
|
||||
- `float`: the operation factor corresponding to the active signal.
|
||||
|
||||
Notes:
|
||||
- The mapping prioritizes AC charge > DC charge > discharge.
|
||||
- Multiple strategies can produce the same low-level signals; this function
|
||||
returns a representative mode based on a defined priority order.
|
||||
returns a representative mode based on a defined priority order.
|
||||
"""
|
||||
# (0,0,0) → Nothing allowed
|
||||
if ac_charge <= 0.0 and dc_charge <= 0.0 and not discharge_allowed:
|
||||
|
||||
@@ -70,20 +70,23 @@ class PredictionSequence(DataSequence):
|
||||
Derived classes have to provide their own records field with correct record type set.
|
||||
|
||||
Usage:
|
||||
# Example of creating, adding, and using PredictionSequence
|
||||
class DerivedSequence(PredictionSquence):
|
||||
records: List[DerivedPredictionRecord] = Field(default_factory=list, json_schema_extra={ "description": "List of prediction records" })
|
||||
.. code-block:: python
|
||||
|
||||
seq = DerivedSequence()
|
||||
seq.insert(DerivedPredictionRecord(date_time=datetime.now(), temperature=72))
|
||||
seq.insert(DerivedPredictionRecord(date_time=datetime.now(), temperature=75))
|
||||
# Example of creating, adding, and using PredictionSequence
|
||||
class DerivedSequence(PredictionSquence):
|
||||
records: List[DerivedPredictionRecord] = Field(default_factory=list, json_schema_extra={ "description": "List of prediction records" })
|
||||
|
||||
# Convert to JSON and back
|
||||
json_data = seq.to_json()
|
||||
new_seq = DerivedSequence.from_json(json_data)
|
||||
seq = DerivedSequence()
|
||||
seq.insert(DerivedPredictionRecord(date_time=datetime.now(), temperature=72))
|
||||
seq.insert(DerivedPredictionRecord(date_time=datetime.now(), temperature=75))
|
||||
|
||||
# Convert to JSON and back
|
||||
json_data = seq.to_json()
|
||||
new_seq = DerivedSequence.from_json(json_data)
|
||||
|
||||
# Convert to Pandas Series
|
||||
series = seq.key_to_series('temperature')
|
||||
|
||||
# Convert to Pandas Series
|
||||
series = seq.key_to_series('temperature')
|
||||
"""
|
||||
|
||||
# To be overloaded by derived classes.
|
||||
@@ -224,9 +227,10 @@ class PredictionImportProvider(PredictionProvider, DataImportProvider):
|
||||
"""Abstract base class for prediction providers that import prediction data.
|
||||
|
||||
This class is designed to handle prediction data provided in the form of a key-value dictionary.
|
||||
|
||||
- **Keys**: Represent identifiers from the record keys of a specific prediction.
|
||||
- **Values**: Are lists of prediction values starting at a specified `start_datetime`, where
|
||||
each value corresponds to a subsequent time interval (e.g., hourly).
|
||||
each value corresponds to a subsequent time interval (e.g., hourly).
|
||||
|
||||
Subclasses must implement the logic for managing prediction data based on the imported records.
|
||||
"""
|
||||
|
||||
@@ -12,51 +12,53 @@ Classes:
|
||||
PVForecastAkkudoktor: Primary class to manage PV power forecasts, handle data retrieval, caching, and integration with Akkudoktor.net.
|
||||
|
||||
Example:
|
||||
# Set up the configuration with necessary fields for URL generation
|
||||
settings_data = {
|
||||
"general": {
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
},
|
||||
"prediction": {
|
||||
"hours": 48,
|
||||
"historic_hours": 24,
|
||||
},
|
||||
"pvforecast": {
|
||||
"provider": "PVForecastAkkudoktor",
|
||||
"planes": [
|
||||
{
|
||||
"peakpower": 5.0,
|
||||
"surface_azimuth": 170,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [20, 27, 22, 20],
|
||||
"inverter_paco": 10000,
|
||||
},
|
||||
{
|
||||
"peakpower": 4.8,
|
||||
"surface_azimuth": 90,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [30, 30, 30, 50],
|
||||
"inverter_paco": 10000,
|
||||
}
|
||||
]
|
||||
.. code-block:: python
|
||||
|
||||
# Set up the configuration with necessary fields for URL generation
|
||||
settings_data = {
|
||||
"general": {
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.405,
|
||||
},
|
||||
"prediction": {
|
||||
"hours": 48,
|
||||
"historic_hours": 24,
|
||||
},
|
||||
"pvforecast": {
|
||||
"provider": "PVForecastAkkudoktor",
|
||||
"planes": [
|
||||
{
|
||||
"peakpower": 5.0,
|
||||
"surface_azimuth": 170,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [20, 27, 22, 20],
|
||||
"inverter_paco": 10000,
|
||||
},
|
||||
{
|
||||
"peakpower": 4.8,
|
||||
"surface_azimuth": 90,
|
||||
"surface_tilt": 7,
|
||||
"userhorizon": [30, 30, 30, 50],
|
||||
"inverter_paco": 10000,
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Create the config instance from the provided data
|
||||
config = PVForecastAkkudoktorSettings(**settings_data)
|
||||
# Create the config instance from the provided data
|
||||
config = PVForecastAkkudoktorSettings(**settings_data)
|
||||
|
||||
# Initialize the forecast object with the generated configuration
|
||||
forecast = PVForecastAkkudoktor(settings=config)
|
||||
# Initialize the forecast object with the generated configuration
|
||||
forecast = PVForecastAkkudoktor(settings=config)
|
||||
|
||||
# Get an actual forecast
|
||||
forecast.update_data()
|
||||
# Get an actual forecast
|
||||
forecast.update_data()
|
||||
|
||||
# Update the AC power measurement for a specific date and time
|
||||
forecast.update_value(to_datetime(None, to_maxtime=False), "pvforecastakkudoktor_ac_power_measured", 1000.0)
|
||||
# Update the AC power measurement for a specific date and time
|
||||
forecast.update_value(to_datetime(None, to_maxtime=False), "pvforecastakkudoktor_ac_power_measured", 1000.0)
|
||||
|
||||
# Report the DC and AC power forecast along with AC measurements
|
||||
print(forecast.report_ac_power_and_measurement())
|
||||
# Report the DC and AC power forecast along with AC measurements
|
||||
print(forecast.report_ac_power_and_measurement())
|
||||
|
||||
Attributes:
|
||||
hours (int): Number of hours into the future to forecast. Default is 48.
|
||||
|
||||
@@ -117,17 +117,25 @@ class WeatherClearOutside(WeatherProvider):
|
||||
|
||||
Workflow:
|
||||
1. **Retrieve Web Content**: Uses a helper method to fetch or retrieve cached ClearOutside HTML content.
|
||||
|
||||
2. **Extract Forecast Date and Timezone**:
|
||||
- Parses the forecast's start and end dates and the UTC offset from the "Generated" header.
|
||||
- Parses the forecast's start and end dates and the UTC offset from the "Generated"
|
||||
header.
|
||||
|
||||
3. **Extract Weather Data**:
|
||||
- For each day in the 7-day forecast, the function finds detailed weather parameters
|
||||
and associates values for each hour.
|
||||
- Parameters include cloud cover, temperature, humidity, visibility, and precipitation type, among others.
|
||||
and associates values for each hour.
|
||||
- Parameters include cloud cover, temperature, humidity, visibility, and
|
||||
precipitation type, among others.
|
||||
|
||||
4. **Irradiance Calculation**:
|
||||
- Calculates irradiance (GHI, DNI, DHI) values using cloud cover data and the `pvlib` library.
|
||||
- Calculates irradiance (GHI, DNI, DHI) values using cloud cover data and the
|
||||
`pvlib` library.
|
||||
|
||||
5. **Store Data**:
|
||||
- Combines all hourly data into `WeatherDataRecord` objects, with keys
|
||||
standardized according to `WeatherDataRecord` attributes.
|
||||
standardized according to `WeatherDataRecord` attributes.
|
||||
|
||||
"""
|
||||
# Get ClearOutside web content - either from site or cached
|
||||
response = self._request_forecast(force_update=force_update) # type: ignore
|
||||
|
||||
Reference in New Issue
Block a user