mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-09-20 10:41:14 +00:00
Add Documentation 2 (#334)
Add documentation that covers: - configuration - prediction Add Python scripts that support automatic documentation generation for configuration data defined with pydantic. Adapt EOS configuration to provide more methods for REST API and automatic documentation generation. Adapt REST API to allow for EOS configuration file load and save. Sort REST API on generation of openapi markdown for docs. Move logutil to core/logging to allow configuration of logging by standard config. Make Akkudoktor predictions always start extraction of prediction data at start of day. Previously extraction started at actual hour. This is to support the code that assumes prediction data to start at start of day. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
@@ -16,7 +16,7 @@ from typing import Any, ClassVar, Dict, Optional, Type
|
||||
from pendulum import DateTime
|
||||
from pydantic import computed_field
|
||||
|
||||
from akkudoktoreos.utils.logutil import get_logger
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
@@ -31,13 +31,13 @@ from pydantic import (
|
||||
)
|
||||
|
||||
from akkudoktoreos.core.coreabc import ConfigMixin, SingletonMixin, StartMixin
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import (
|
||||
PydanticBaseModel,
|
||||
PydanticDateTimeData,
|
||||
PydanticDateTimeDataFrame,
|
||||
)
|
||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_duration
|
||||
from akkudoktoreos.utils.logutil import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -583,20 +583,48 @@ class DataSequence(DataBase, MutableSequence):
|
||||
# Sort the list by datetime after adding/updating
|
||||
self.sort_by_datetime()
|
||||
|
||||
def update_value(self, date: DateTime, key: str, value: Any) -> None:
|
||||
"""Updates a specific value in the data record for a given date.
|
||||
@overload
|
||||
def update_value(self, date: DateTime, key: str, value: Any) -> None: ...
|
||||
|
||||
If a record for the date exists, updates the specified attribute with the new value.
|
||||
Otherwise, appends a new record with the given value and maintains chronological order.
|
||||
@overload
|
||||
def update_value(self, date: DateTime, values: Dict[str, Any]) -> None: ...
|
||||
|
||||
def update_value(self, date: DateTime, *args: Any, **kwargs: Any) -> None:
|
||||
"""Updates specific values in the data record for a given date.
|
||||
|
||||
If a record for the date exists, updates the specified attributes with the new values.
|
||||
Otherwise, appends a new record with the given values and maintains chronological order.
|
||||
|
||||
Args:
|
||||
date (datetime): The date for which the weather value is to be added or updated.
|
||||
key (str): The attribute name to be updated.
|
||||
value: The new value to set for the specified attribute.
|
||||
date (datetime): The date for which the values are to be added or updated.
|
||||
key (str), value (Any): Single key-value pair to update
|
||||
OR
|
||||
values (Dict[str, Any]): Dictionary of key-value pairs to update
|
||||
OR
|
||||
**kwargs: Key-value pairs as keyword arguments
|
||||
|
||||
Examples:
|
||||
>>> update_value(date, 'temperature', 25.5)
|
||||
>>> update_value(date, {'temperature': 25.5, 'humidity': 80})
|
||||
>>> update_value(date, temperature=25.5, humidity=80)
|
||||
"""
|
||||
self._validate_key_writable(key)
|
||||
# Process input arguments into a dictionary
|
||||
values: Dict[str, Any] = {}
|
||||
if len(args) == 2: # Single key-value pair
|
||||
values[args[0]] = args[1]
|
||||
elif len(args) == 1 and isinstance(args[0], dict): # Dictionary input
|
||||
values.update(args[0])
|
||||
elif len(args) > 0: # Invalid number of arguments
|
||||
raise ValueError("Expected either 2 arguments (key, value) or 1 dictionary argument")
|
||||
values.update(kwargs) # Add any keyword arguments
|
||||
|
||||
# Validate all keys are writable
|
||||
for key in values:
|
||||
self._validate_key_writable(key)
|
||||
|
||||
# Ensure datetime objects are normalized
|
||||
date = to_datetime(date, to_maxtime=False)
|
||||
|
||||
# Check if a record with the given date already exists
|
||||
for record in self.records:
|
||||
if not isinstance(record.date_time, DateTime):
|
||||
@@ -604,12 +632,13 @@ class DataSequence(DataBase, MutableSequence):
|
||||
f"Record date '{record.date_time}' is not a datetime, but a `{type(record.date_time).__name__}`."
|
||||
)
|
||||
if compare_datetimes(record.date_time, date).equal:
|
||||
# Update the DataRecord with the new value for the specified key
|
||||
setattr(record, key, value)
|
||||
# Update the DataRecord with all new values
|
||||
for key, value in values.items():
|
||||
setattr(record, key, value)
|
||||
break
|
||||
else:
|
||||
# Create a new record and append to the list
|
||||
record = self.record_class()(date_time=date, **{key: value})
|
||||
record = self.record_class()(date_time=date, **values)
|
||||
self.records.append(record)
|
||||
# Sort the list by datetime after adding/updating
|
||||
self.sort_by_datetime()
|
||||
@@ -841,7 +870,7 @@ class DataSequence(DataBase, MutableSequence):
|
||||
if start_index == 0:
|
||||
# No value before start
|
||||
# Add dummy value
|
||||
dates.insert(0, dates[0] - interval)
|
||||
dates.insert(0, start_datetime - interval)
|
||||
values.insert(0, values[0])
|
||||
elif start_index > 1:
|
||||
# Truncate all values before latest value before start_datetime
|
||||
|
@@ -7,12 +7,12 @@ from pydantic import ConfigDict, Field, computed_field, field_validator, model_v
|
||||
from typing_extensions import Self
|
||||
|
||||
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin, SingletonMixin
|
||||
from akkudoktoreos.core.logging import get_logger
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
from akkudoktoreos.devices.battery import Battery
|
||||
from akkudoktoreos.devices.generic import HomeAppliance
|
||||
from akkudoktoreos.devices.inverter import Inverter
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||
from akkudoktoreos.utils.logutil import get_logger
|
||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
20
src/akkudoktoreos/core/logabc.py
Normal file
20
src/akkudoktoreos/core/logabc.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Abstract and base classes for logging."""
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
def logging_str_to_level(level_str: str) -> int:
|
||||
"""Convert log level string to logging level."""
|
||||
if level_str == "DEBUG":
|
||||
level = logging.DEBUG
|
||||
elif level_str == "INFO":
|
||||
level = logging.INFO
|
||||
elif level_str == "WARNING":
|
||||
level = logging.WARNING
|
||||
elif level_str == "CRITICAL":
|
||||
level = logging.CRITICAL
|
||||
elif level_str == "ERROR":
|
||||
level = logging.ERROR
|
||||
else:
|
||||
raise ValueError(f"Unknown loggin level: {level_str}")
|
||||
return level
|
91
src/akkudoktoreos/core/logging.py
Normal file
91
src/akkudoktoreos/core/logging.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Utility functions for handling logging tasks.
|
||||
|
||||
Functions:
|
||||
----------
|
||||
- get_logger: Creates and configures a logger with console and optional rotating file logging.
|
||||
|
||||
Example usage:
|
||||
--------------
|
||||
# Logger setup
|
||||
>>> logger = get_logger(__name__, log_file="app.log", logging_level="DEBUG")
|
||||
>>> logger.info("Logging initialized.")
|
||||
|
||||
Notes:
|
||||
------
|
||||
- The logger supports rotating log files to prevent excessive log file size.
|
||||
"""
|
||||
|
||||
import logging as pylogging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from typing import Optional
|
||||
|
||||
from akkudoktoreos.core.logabc import logging_str_to_level
|
||||
|
||||
|
||||
def get_logger(
|
||||
name: str,
|
||||
log_file: Optional[str] = None,
|
||||
logging_level: Optional[str] = None,
|
||||
max_bytes: int = 5000000,
|
||||
backup_count: int = 5,
|
||||
) -> pylogging.Logger:
|
||||
"""Creates and configures a logger with a given name.
|
||||
|
||||
The logger supports logging to both the console and an optional log file. File logging is
|
||||
handled by a rotating file handler to prevent excessive log file size.
|
||||
|
||||
Args:
|
||||
name (str): The name of the logger, typically `__name__` from the calling module.
|
||||
log_file (Optional[str]): Path to the log file for file logging. If None, no file logging is done.
|
||||
logging_level (Optional[str]): Logging level (e.g., "INFO", "DEBUG"). Defaults to "INFO".
|
||||
max_bytes (int): Maximum size in bytes for log file before rotation. Defaults to 5 MB.
|
||||
backup_count (int): Number of backup log files to keep. Defaults to 5.
|
||||
|
||||
Returns:
|
||||
logging.Logger: Configured logger instance.
|
||||
|
||||
Example:
|
||||
logger = get_logger(__name__, log_file="app.log", logging_level="DEBUG")
|
||||
logger.info("Application started")
|
||||
"""
|
||||
# Create a logger with the specified name
|
||||
logger = pylogging.getLogger(name)
|
||||
logger.propagate = True
|
||||
if logging_level is not None:
|
||||
level = logging_str_to_level(logging_level)
|
||||
logger.setLevel(level)
|
||||
|
||||
# The log message format
|
||||
formatter = pylogging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
|
||||
# Prevent loggers from being added multiple times
|
||||
# There may already be a logger from pytest
|
||||
if not logger.handlers:
|
||||
# Create a console handler with a standard output stream
|
||||
console_handler = pylogging.StreamHandler()
|
||||
if logging_level is not None:
|
||||
console_handler.setLevel(level)
|
||||
console_handler.setFormatter(formatter)
|
||||
|
||||
# Add the console handler to the logger
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
if log_file and len(logger.handlers) < 2: # We assume a console logger to be the first logger
|
||||
# If a log file path is specified, create a rotating file handler
|
||||
|
||||
# Ensure the log directory exists
|
||||
log_dir = os.path.dirname(log_file)
|
||||
if log_dir and not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
|
||||
# Create a rotating file handler
|
||||
file_handler = RotatingFileHandler(log_file, maxBytes=max_bytes, backupCount=backup_count)
|
||||
if logging_level is not None:
|
||||
file_handler.setLevel(level)
|
||||
file_handler.setFormatter(formatter)
|
||||
|
||||
# Add the file handler to the logger
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
return logger
|
45
src/akkudoktoreos/core/logsettings.py
Normal file
45
src/akkudoktoreos/core/logsettings.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Settings for logging.
|
||||
|
||||
Kept in an extra module to avoid cyclic dependencies on package import.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, computed_field, field_validator
|
||||
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
from akkudoktoreos.core.logabc import logging_str_to_level
|
||||
|
||||
|
||||
class LoggingCommonSettings(SettingsBaseModel):
|
||||
"""Common settings for logging."""
|
||||
|
||||
logging_level_default: Optional[str] = Field(
|
||||
default=None, description="EOS default logging level."
|
||||
)
|
||||
|
||||
# Validators
|
||||
@field_validator("logging_level_default", mode="after")
|
||||
@classmethod
|
||||
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
|
||||
if isinstance(value, str) and value.upper() == "NONE":
|
||||
value = None
|
||||
if value is None and (env_level := os.getenv("EOS_LOGGING_LEVEL")) is not None:
|
||||
# Take default logging level from special environment variable
|
||||
value = env_level
|
||||
if value is None:
|
||||
return None
|
||||
level = logging_str_to_level(value)
|
||||
logging.getLogger().setLevel(level)
|
||||
return value
|
||||
|
||||
# Computed fields
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def logging_level_root(self) -> str:
|
||||
"""Root logger logging level."""
|
||||
level = logging.getLogger().getEffectiveLevel()
|
||||
level_name = logging.getLevelName(level)
|
||||
return level_name
|
@@ -275,6 +275,7 @@ class PydanticDateTimeDataFrame(PydanticBaseModel):
|
||||
)
|
||||
|
||||
@field_validator("tz")
|
||||
@classmethod
|
||||
def validate_timezone(cls, v: Optional[str]) -> Optional[str]:
|
||||
"""Validate that the timezone is valid."""
|
||||
if v is not None:
|
||||
|
Reference in New Issue
Block a user