mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2026-03-15 02:56:17 +00:00
feat: add fixed electricity prediction with time window support (#930)
Some checks failed
Bump Version / Bump Version Workflow (push) Has been cancelled
docker-build / platform-excludes (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
Close stale pull requests/issues / Find Stale issues and PRs (push) Has been cancelled
Some checks failed
Bump Version / Bump Version Workflow (push) Has been cancelled
docker-build / platform-excludes (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
Close stale pull requests/issues / Find Stale issues and PRs (push) Has been cancelled
Add a fixed electricity prediction that supports prices per time window.
The time windows may flexible be defined by day or date.
The prediction documentation is updated to also cover the ElecPriceFixed
provider.
The feature includes several changes that are not directly related to the
electricity price prediction implementation but are necessary to keep
EOS running properly and to test and document the changes.
* feat: add value time windows
Add time windows with an associated float value.
* feat: harden eos measurements endpoints error detection and reporting
Cover more errors that may be raised during endpoint access. Report the
errors including trace information to ease debugging.
* feat: extend server configuration to cover all arguments
Make the argument controlled options also available in server configuration.
* fix: eos config configuration by cli arguments
Move the command line argument handling to config eos so that it is
excuted whenever eos config is rebuild or reset.
* chore: extend measurement endpoint system test
* chore: refactor time windows
Move time windows to configabc as they are only used in configurations.
Also move all tests to test_configabc.
* chore: provide config update errors in eosdash with summarized error text
If there is an update error provide the error text as a summary. On click
provide the full error text.
* chore: force eosdash ip address and port in makefile dev run
Ensure eosdash ip address and port are correctly set for development runs.
Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
@@ -32,6 +32,7 @@ from akkudoktoreos.core.decorators import classproperty
|
||||
from akkudoktoreos.core.emsettings import (
|
||||
EnergyManagementCommonSettings,
|
||||
)
|
||||
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
||||
from akkudoktoreos.core.logsettings import LoggingCommonSettings
|
||||
from akkudoktoreos.core.pydantic import PydanticModelNestedValueMixin, merge_models
|
||||
from akkudoktoreos.core.version import __version__
|
||||
@@ -44,6 +45,7 @@ from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||
from akkudoktoreos.prediction.prediction import PredictionCommonSettings
|
||||
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||
from akkudoktoreos.prediction.weather import WeatherCommonSettings
|
||||
from akkudoktoreos.server.rest.cli import cli_argument_parser
|
||||
from akkudoktoreos.server.server import ServerCommonSettings
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_timezone
|
||||
from akkudoktoreos.utils.utils import UtilsCommonSettings
|
||||
@@ -421,6 +423,62 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
- It ensures that a fallback to a default configuration file is always possible.
|
||||
"""
|
||||
|
||||
def lazy_config_cli_settings() -> dict:
|
||||
"""CLI settings.
|
||||
|
||||
This function runs at **instance creation**, not class definition. Ensures if ConfigEOS
|
||||
is recreated this function is run.
|
||||
"""
|
||||
args, args_unknown = cli_argument_parser().parse_known_args() # defaults to sys.ARGV
|
||||
|
||||
# Initialize nested settings dictionary
|
||||
settings: dict[str, Any] = {}
|
||||
|
||||
# Helper function to set nested dictionary values
|
||||
def set_nested(dict_obj: dict[str, Any], path: str, value: Any) -> None:
|
||||
"""Set a value in a nested dictionary using dot notation path."""
|
||||
parts = path.split(".")
|
||||
current = dict_obj
|
||||
for part in parts[:-1]:
|
||||
if part not in current:
|
||||
current[part] = {}
|
||||
current = current[part]
|
||||
current[parts[-1]] = value
|
||||
|
||||
# Server host
|
||||
if args.host is not None:
|
||||
set_nested(settings, "server.host", args.host)
|
||||
logger.debug(f"CLI arg: server.host set to {args.host}")
|
||||
|
||||
# Server port
|
||||
if args.port is not None:
|
||||
set_nested(settings, "server.port", args.port)
|
||||
logger.debug(f"CLI arg: server.port set to {args.port}")
|
||||
|
||||
# Server startup_eosdash
|
||||
if args.startup_eosdash is not None:
|
||||
set_nested(settings, "server.startup_eosdash", args.startup_eosdash)
|
||||
logger.debug(f"CLI arg: server.startup_eosdash set to {args.startup_eosdash}")
|
||||
|
||||
# Logging level (skip if "none" as that means don't change)
|
||||
if args.log_level is not None and args.log_level.lower() != "none":
|
||||
log_level = args.log_level.upper()
|
||||
if log_level in LOGGING_LEVELS:
|
||||
set_nested(settings, "logging.console_level", log_level)
|
||||
logger.debug(f"CLI arg: logging.console_level set to {log_level}")
|
||||
else:
|
||||
logger.warning(f"Invalid log level '{args.log_level}' ignored")
|
||||
|
||||
if args.run_as_user is not None:
|
||||
set_nested(settings, "server.run_as_user", args.run_as_user)
|
||||
logger.debug(f"CLI arg: server.run_as_user set to {args.run_as_user}")
|
||||
|
||||
if args.reload is not None:
|
||||
set_nested(settings, "server.reload", args.reload)
|
||||
logger.debug(f"CLI arg: server.reload set to {args.reload}")
|
||||
|
||||
return settings
|
||||
|
||||
def lazy_config_file_settings() -> dict:
|
||||
"""Config file settings.
|
||||
|
||||
@@ -561,7 +619,8 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||
# The settings are all lazyly evaluated at instance creation time to allow for
|
||||
# runtime configuration.
|
||||
setting_sources = [
|
||||
lazy_config_file_settings, # Prio high
|
||||
lazy_config_cli_settings, # Prio high
|
||||
lazy_config_file_settings,
|
||||
lazy_init_settings,
|
||||
lazy_env_settings,
|
||||
lazy_dotenv_settings,
|
||||
|
||||
@@ -1,8 +1,21 @@
|
||||
"""Abstract and base classes for configuration."""
|
||||
|
||||
from typing import Any, ClassVar
|
||||
import calendar
|
||||
from typing import Any, ClassVar, Iterator, Optional, Union
|
||||
|
||||
import numpy as np
|
||||
import pendulum
|
||||
from babel.dates import get_day_names
|
||||
from pydantic import Field, field_serializer, field_validator, model_validator
|
||||
|
||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||
from akkudoktoreos.utils.datetimeutil import (
|
||||
Date,
|
||||
DateTime,
|
||||
Duration,
|
||||
Time,
|
||||
to_duration,
|
||||
)
|
||||
|
||||
|
||||
class SettingsBaseModel(PydanticBaseModel):
|
||||
@@ -10,3 +23,801 @@ class SettingsBaseModel(PydanticBaseModel):
|
||||
|
||||
# EOS configuration - set by ConfigEOS
|
||||
config: ClassVar[Any] = None
|
||||
|
||||
|
||||
class TimeWindow(SettingsBaseModel):
|
||||
"""Model defining a daily or date time window with optional localization support.
|
||||
|
||||
Represents a time interval starting at `start_time` and lasting for `duration`.
|
||||
Can restrict applicability to a specific day of the week or a specific calendar date.
|
||||
Supports day names in multiple languages via locale-aware parsing.
|
||||
|
||||
Timezone contract:
|
||||
|
||||
``start_time`` is always **naive** (no ``tzinfo``). It is interpreted as a
|
||||
local wall-clock time in whatever timezone the caller's ``date_time`` or
|
||||
``reference_date`` carries. When those arguments are timezone-aware the
|
||||
window boundaries are evaluated in that timezone; when they are naive,
|
||||
arithmetic is performed as-is (no timezone conversion occurs).
|
||||
|
||||
``date``, being a calendar ``Date`` object, is inherently timezone-free.
|
||||
|
||||
This design avoids the ambiguity that arises when a stored ``start_time``
|
||||
carries its own timezone that differs from the caller's timezone, and keeps
|
||||
the model serialisable without timezone state.
|
||||
"""
|
||||
|
||||
start_time: Time = Field(
|
||||
...,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Naive start time of the time window (time of day, no timezone). "
|
||||
"Interpreted in the timezone of the datetime passed to contains() "
|
||||
"or earliest_start_time()."
|
||||
),
|
||||
"examples": [
|
||||
"00:00:00",
|
||||
],
|
||||
},
|
||||
)
|
||||
duration: Duration = Field(
|
||||
...,
|
||||
json_schema_extra={
|
||||
"description": "Duration of the time window starting from `start_time`.",
|
||||
"examples": [
|
||||
"2 hours",
|
||||
],
|
||||
},
|
||||
)
|
||||
day_of_week: Optional[Union[int, str]] = Field(
|
||||
default=None,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Optional day of the week restriction. "
|
||||
"Can be specified as integer (0=Monday to 6=Sunday) or localized weekday name. "
|
||||
"If None, applies every day unless `date` is set."
|
||||
),
|
||||
"examples": [
|
||||
None,
|
||||
],
|
||||
},
|
||||
)
|
||||
date: Optional[Date] = Field(
|
||||
default=None,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Optional specific calendar date for the time window. "
|
||||
"Naive — matched against the local date of the datetime passed to contains(). "
|
||||
"Overrides `day_of_week` if set."
|
||||
),
|
||||
"examples": [
|
||||
None,
|
||||
],
|
||||
},
|
||||
)
|
||||
locale: Optional[str] = Field(
|
||||
default=None,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Locale used to parse weekday names in `day_of_week` when given as string. "
|
||||
"If not set, Pendulum's default locale is used. "
|
||||
"Examples: 'en', 'de', 'fr', etc."
|
||||
),
|
||||
"examples": [
|
||||
None,
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@field_validator("start_time", mode="after")
|
||||
@classmethod
|
||||
def require_naive_start_time(cls, value: Time) -> Time:
|
||||
"""Strip timezone from ``start_time`` if present, emitting a debug message.
|
||||
|
||||
``start_time`` must be naive: it is interpreted as wall-clock time in
|
||||
the timezone of the ``date_time`` / ``reference_date`` supplied at call
|
||||
time. The project's ``to_time`` helper may silently attach a timezone
|
||||
during deserialisation; rather than rejecting such values the validator
|
||||
strips the timezone and logs a debug message so the behaviour is
|
||||
transparent without breaking normal construction.
|
||||
|
||||
Args:
|
||||
value: The ``Time`` value to validate.
|
||||
|
||||
Returns:
|
||||
A naive ``Time`` with the same hour / minute / second / microsecond
|
||||
but no ``tzinfo``.
|
||||
"""
|
||||
if value.tzinfo is not None:
|
||||
import logging
|
||||
|
||||
logging.getLogger(__name__).debug(
|
||||
"TimeWindow.start_time received an aware Time (%s); "
|
||||
"stripping timezone '%s'. start_time is always interpreted "
|
||||
"as wall-clock time in the timezone of the datetime passed "
|
||||
"to contains() / earliest_start_time() / latest_start_time().",
|
||||
value,
|
||||
value.tzinfo,
|
||||
)
|
||||
value = value.replace(tzinfo=None)
|
||||
return value
|
||||
|
||||
@field_validator("duration", mode="before")
|
||||
@classmethod
|
||||
def transform_to_duration(cls, value: Any) -> Duration:
|
||||
"""Converts various duration formats into Duration.
|
||||
|
||||
Args:
|
||||
value: The value to convert to Duration.
|
||||
|
||||
Returns:
|
||||
Duration: The converted Duration object.
|
||||
"""
|
||||
return to_duration(value)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_day_of_week_with_locale(self) -> "TimeWindow":
|
||||
"""Validates and normalizes the `day_of_week` field using the specified locale.
|
||||
|
||||
This method supports both integer (0–6) and string inputs for ``day_of_week``.
|
||||
String inputs are matched first against English weekday names (case-insensitive),
|
||||
and then against localized weekday names using the provided ``locale``.
|
||||
|
||||
If a valid match is found, ``day_of_week`` is converted to its corresponding
|
||||
integer value (0 for Monday through 6 for Sunday).
|
||||
|
||||
Returns:
|
||||
TimeWindow: The validated instance with ``day_of_week`` normalized to an integer.
|
||||
|
||||
Raises:
|
||||
ValueError: If ``day_of_week`` is an invalid integer (not in 0–6),
|
||||
or an unrecognized string (not matching English or localized names),
|
||||
or of an unsupported type.
|
||||
"""
|
||||
if self.day_of_week is None:
|
||||
return self
|
||||
|
||||
if isinstance(self.day_of_week, int):
|
||||
if not 0 <= self.day_of_week <= 6:
|
||||
raise ValueError("day_of_week must be in 0 (Monday) to 6 (Sunday)")
|
||||
return self
|
||||
|
||||
if isinstance(self.day_of_week, str):
|
||||
# Try matching against English names first (lowercase)
|
||||
english_days = {name.lower(): i for i, name in enumerate(calendar.day_name)}
|
||||
lowercase_value = self.day_of_week.lower()
|
||||
if lowercase_value in english_days:
|
||||
self.day_of_week = english_days[lowercase_value]
|
||||
return self
|
||||
|
||||
# Try localized names
|
||||
if self.locale:
|
||||
localized_days = {
|
||||
get_day_names("wide", locale=self.locale)[i].lower(): i for i in range(7)
|
||||
}
|
||||
if lowercase_value in localized_days:
|
||||
self.day_of_week = localized_days[lowercase_value]
|
||||
return self
|
||||
|
||||
raise ValueError(
|
||||
f"Invalid weekday name '{self.day_of_week}' for locale '{self.locale}'. "
|
||||
f"Expected English names (monday–sunday) or localized names."
|
||||
)
|
||||
|
||||
raise ValueError(f"Invalid type for day_of_week: {type(self.day_of_week)}")
|
||||
|
||||
@field_serializer("duration")
|
||||
def serialize_duration(self, value: Duration) -> str:
|
||||
"""Serialize duration to string."""
|
||||
return str(value)
|
||||
|
||||
def _window_start_end(self, reference_date: DateTime) -> tuple[DateTime, DateTime]:
|
||||
"""Get the actual start and end datetimes for the time window on a given date.
|
||||
|
||||
``start_time`` is naive and is interpreted as a wall-clock time in
|
||||
the timezone of ``reference_date``. When ``reference_date`` is
|
||||
timezone-aware the resulting window boundaries carry the same timezone;
|
||||
when it is naive the arithmetic is performed without timezone conversion.
|
||||
|
||||
Args:
|
||||
reference_date: The reference date on which to calculate the window.
|
||||
May be timezone-aware or naive.
|
||||
|
||||
Returns:
|
||||
tuple[DateTime, DateTime]: Start and end datetimes for the time window,
|
||||
in the same timezone as ``reference_date``.
|
||||
"""
|
||||
# start_time is always naive: just replace the time components on
|
||||
# reference_date directly. The result inherits reference_date's timezone
|
||||
# (or lack thereof) automatically.
|
||||
start = reference_date.replace(
|
||||
hour=self.start_time.hour,
|
||||
minute=self.start_time.minute,
|
||||
second=self.start_time.second,
|
||||
microsecond=self.start_time.microsecond,
|
||||
)
|
||||
end = start + self.duration
|
||||
return start, end
|
||||
|
||||
def contains(self, date_time: DateTime, duration: Optional[Duration] = None) -> bool:
|
||||
"""Check whether a datetime (and optional duration) fits within the time window.
|
||||
|
||||
``start_time`` is naive and is interpreted as wall-clock time in the
|
||||
timezone of ``date_time``. Day-of-week and date constraints are
|
||||
evaluated against ``date_time`` after any timezone conversion has
|
||||
been applied.
|
||||
|
||||
Args:
|
||||
date_time: The datetime to test. May be timezone-aware or naive.
|
||||
duration: An optional duration that must fit entirely within the
|
||||
time window starting from ``date_time``.
|
||||
|
||||
Returns:
|
||||
bool: True if the datetime (and optional duration) is fully
|
||||
contained in the time window, False otherwise.
|
||||
"""
|
||||
# Date and weekday constraints are checked against date_time as-is;
|
||||
# since start_time is naive it is always interpreted in date_time's tz.
|
||||
if self.date and date_time.date() != self.date:
|
||||
return False
|
||||
|
||||
if self.day_of_week is not None and date_time.day_of_week != self.day_of_week:
|
||||
return False
|
||||
|
||||
start, end = self._window_start_end(date_time)
|
||||
|
||||
if not (start <= date_time < end):
|
||||
return False
|
||||
|
||||
if duration is not None:
|
||||
return date_time + duration <= end
|
||||
|
||||
return True
|
||||
|
||||
def earliest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the earliest datetime that allows a duration to fit within the time window.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within the window.
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The earliest start time for the duration, or None if it doesn't fit.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
if self.date and reference_date.date() != self.date:
|
||||
return None
|
||||
|
||||
if self.day_of_week is not None and reference_date.day_of_week != self.day_of_week:
|
||||
return None
|
||||
|
||||
if duration > self.duration:
|
||||
return None
|
||||
|
||||
window_start, _ = self._window_start_end(reference_date)
|
||||
return window_start
|
||||
|
||||
def latest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the latest datetime that allows a duration to fit within the time window.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within the window.
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The latest start time for the duration, or None if it doesn't fit.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
if self.date and reference_date.date() != self.date:
|
||||
return None
|
||||
|
||||
if self.day_of_week is not None and reference_date.day_of_week != self.day_of_week:
|
||||
return None
|
||||
|
||||
if duration > self.duration:
|
||||
return None
|
||||
|
||||
window_start, window_end = self._window_start_end(reference_date)
|
||||
latest_start = window_end - duration
|
||||
|
||||
if latest_start < window_start:
|
||||
return None
|
||||
|
||||
return latest_start
|
||||
|
||||
def can_fit_duration(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> bool:
|
||||
"""Check if a duration can fit within the time window on a given date.
|
||||
|
||||
Args:
|
||||
duration: The duration to check.
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
bool: True if the duration can fit, False otherwise.
|
||||
"""
|
||||
return self.earliest_start_time(duration, reference_date) is not None
|
||||
|
||||
def available_duration(self, reference_date: Optional[DateTime] = None) -> Optional[Duration]:
|
||||
"""Get the total available duration for the time window on a given date.
|
||||
|
||||
Args:
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The available duration, or None if the date doesn't match constraints.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
if self.date and reference_date.date() != self.date:
|
||||
return None
|
||||
|
||||
if self.day_of_week is not None and reference_date.day_of_week != self.day_of_week:
|
||||
return None
|
||||
|
||||
return self.duration
|
||||
|
||||
|
||||
class TimeWindowSequence(SettingsBaseModel):
|
||||
"""Model representing a sequence of time windows with collective operations.
|
||||
|
||||
Manages multiple TimeWindow objects and provides methods to work with them
|
||||
as a cohesive unit for scheduling and availability checking.
|
||||
"""
|
||||
|
||||
windows: list[TimeWindow] = Field(
|
||||
default_factory=list,
|
||||
json_schema_extra={"description": "List of TimeWindow objects that make up this sequence."},
|
||||
)
|
||||
|
||||
def __iter__(self) -> Iterator[TimeWindow]:
|
||||
"""Allow iteration over the time windows."""
|
||||
return iter(self.windows)
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the number of time windows in the sequence."""
|
||||
return len(self.windows)
|
||||
|
||||
def __getitem__(self, index: int) -> TimeWindow:
|
||||
"""Allow indexing into the time windows."""
|
||||
return self.windows[index]
|
||||
|
||||
def contains(self, date_time: DateTime, duration: Optional[Duration] = None) -> bool:
|
||||
"""Check if any time window in the sequence contains the given datetime and duration.
|
||||
|
||||
Args:
|
||||
date_time: The datetime to test.
|
||||
duration: An optional duration that must fit entirely within one of the time windows.
|
||||
|
||||
Returns:
|
||||
bool: True if any time window contains the datetime (and optional duration), False if no windows.
|
||||
"""
|
||||
return any(window.contains(date_time, duration) for window in self.windows)
|
||||
|
||||
def earliest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the earliest datetime across all windows that allows a duration to fit.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within a window.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The earliest start time across all windows, or None if no window can fit the duration.
|
||||
"""
|
||||
if not self.windows:
|
||||
return None
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
earliest_times = [
|
||||
t
|
||||
for window in self.windows
|
||||
if (t := window.earliest_start_time(duration, reference_date)) is not None
|
||||
]
|
||||
return min(earliest_times) if earliest_times else None
|
||||
|
||||
def latest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the latest datetime across all windows that allows a duration to fit.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within a window.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The latest start time across all windows, or None if no window can fit the duration.
|
||||
"""
|
||||
if not self.windows:
|
||||
return None
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
latest_times = [
|
||||
t
|
||||
for window in self.windows
|
||||
if (t := window.latest_start_time(duration, reference_date)) is not None
|
||||
]
|
||||
return max(latest_times) if latest_times else None
|
||||
|
||||
def can_fit_duration(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> bool:
|
||||
"""Check if the duration can fit within any time window in the sequence.
|
||||
|
||||
Args:
|
||||
duration: The duration to check.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
bool: True if any window can fit the duration, False if no windows.
|
||||
"""
|
||||
return any(window.can_fit_duration(duration, reference_date) for window in self.windows)
|
||||
|
||||
def available_duration(self, reference_date: Optional[DateTime] = None) -> Optional[Duration]:
|
||||
"""Get the total available duration across all applicable windows.
|
||||
|
||||
Args:
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The sum of available durations from all applicable windows, or None if no windows apply.
|
||||
"""
|
||||
if not self.windows:
|
||||
return None
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
durations = [
|
||||
d
|
||||
for window in self.windows
|
||||
if (d := window.available_duration(reference_date)) is not None
|
||||
]
|
||||
if not durations:
|
||||
return None
|
||||
total = Duration()
|
||||
for d in durations:
|
||||
total += d
|
||||
return total
|
||||
|
||||
def get_applicable_windows(self, reference_date: Optional[DateTime] = None) -> list[TimeWindow]:
|
||||
"""Get all windows that apply to the given reference date.
|
||||
|
||||
Args:
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
List of TimeWindow objects that apply to the reference date.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
return [
|
||||
window
|
||||
for window in self.windows
|
||||
if window.available_duration(reference_date) is not None
|
||||
]
|
||||
|
||||
def find_windows_for_duration(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> list[TimeWindow]:
|
||||
"""Find all windows that can accommodate the given duration.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
List of TimeWindow objects that can fit the duration.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
return [
|
||||
window for window in self.windows if window.can_fit_duration(duration, reference_date)
|
||||
]
|
||||
|
||||
def get_all_possible_start_times(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> list[tuple[DateTime, DateTime, TimeWindow]]:
|
||||
"""Get all possible start time ranges for a duration across all windows.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
List of tuples containing (earliest_start, latest_start, window) for each
|
||||
window that can accommodate the duration.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
result = []
|
||||
for window in self.windows:
|
||||
earliest = window.earliest_start_time(duration, reference_date)
|
||||
latest = window.latest_start_time(duration, reference_date)
|
||||
if earliest is not None and latest is not None:
|
||||
result.append((earliest, latest, window))
|
||||
return result
|
||||
|
||||
def to_array(
|
||||
self,
|
||||
start_datetime: DateTime,
|
||||
end_datetime: DateTime,
|
||||
interval: Duration,
|
||||
dropna: bool = True,
|
||||
boundary: str = "context",
|
||||
align_to_interval: bool = True,
|
||||
) -> np.ndarray:
|
||||
"""Return a 1-D NumPy array indicating window coverage over a time grid.
|
||||
|
||||
The time grid is constructed from ``start_datetime`` to ``end_datetime``
|
||||
(exclusive) in steps of ``interval``, matching the ``key_to_array``
|
||||
signature used by the prediction store. Each element is ``1.0`` when
|
||||
the corresponding step falls inside any window in this sequence, and
|
||||
``0.0`` otherwise.
|
||||
|
||||
Parameters mirror ``key_to_array`` so that ``to_array`` can be used as
|
||||
a drop-in source in the same contexts:
|
||||
|
||||
Args:
|
||||
start_datetime: First step of the time grid (inclusive).
|
||||
end_datetime: Upper bound of the time grid (exclusive).
|
||||
interval: Fixed step size between consecutive grid points.
|
||||
dropna: Unused for ``TimeWindowSequence`` (no NaN values are
|
||||
produced — every step is either ``0.0`` or ``1.0``). Accepted
|
||||
for signature compatibility.
|
||||
boundary: Controls range enforcement. Only ``"context"`` is
|
||||
currently supported; the output is always clipped to
|
||||
``[start_datetime, end_datetime)``.
|
||||
align_to_interval: When ``True``, ``start_datetime`` is floored to
|
||||
the nearest interval boundary in wall-clock time before
|
||||
generating the grid (e.g. 08:10 with a 1-hour interval becomes
|
||||
08:00). The timezone (or naivety) of ``start_datetime`` is
|
||||
preserved exactly — no UTC conversion is performed. When
|
||||
``False``, ``start_datetime`` is used as-is.
|
||||
|
||||
Returns:
|
||||
``np.ndarray`` of shape ``(n_steps,)`` with ``dtype=float64``.
|
||||
``1.0`` at position ``i`` means step ``i`` is inside a window;
|
||||
``0.0`` means it is not.
|
||||
|
||||
Raises:
|
||||
ValueError: If ``boundary`` is not ``"context"``.
|
||||
"""
|
||||
if boundary != "context":
|
||||
raise ValueError(f"Unsupported boundary {boundary!r}. Only 'context' is supported.")
|
||||
|
||||
interval_s = interval.total_seconds()
|
||||
|
||||
if align_to_interval and interval_s > 0:
|
||||
# Floor purely in wall-clock seconds so the timezone (or naivety)
|
||||
# of start_datetime is never touched and no UTC conversion occurs.
|
||||
# This is correct regardless of the machine's local timezone.
|
||||
wall_s = (
|
||||
start_datetime.hour * 3600
|
||||
+ start_datetime.minute * 60
|
||||
+ start_datetime.second
|
||||
+ start_datetime.microsecond / 1_000_000
|
||||
)
|
||||
remainder_s = wall_s % interval_s
|
||||
if remainder_s:
|
||||
start_datetime = start_datetime.subtract(seconds=remainder_s)
|
||||
|
||||
result = []
|
||||
current = start_datetime
|
||||
while current < end_datetime:
|
||||
result.append(1.0 if self.contains(current) else 0.0)
|
||||
current = current.add(seconds=interval_s)
|
||||
|
||||
return np.array(result, dtype=np.float64)
|
||||
|
||||
def add_window(self, window: TimeWindow) -> None:
|
||||
"""Add a new time window to the sequence.
|
||||
|
||||
Args:
|
||||
window: The TimeWindow to add.
|
||||
"""
|
||||
self.windows.append(window)
|
||||
|
||||
def remove_window(self, index: int) -> TimeWindow:
|
||||
"""Remove a time window from the sequence by index.
|
||||
|
||||
Args:
|
||||
index: The index of the window to remove.
|
||||
|
||||
Returns:
|
||||
The removed TimeWindow.
|
||||
|
||||
Raises:
|
||||
IndexError: If the index is out of range.
|
||||
"""
|
||||
if not self.windows:
|
||||
raise IndexError("pop from empty list")
|
||||
return self.windows.pop(index)
|
||||
|
||||
def clear_windows(self) -> None:
|
||||
"""Remove all windows from the sequence."""
|
||||
self.windows.clear()
|
||||
|
||||
def sort_windows_by_start_time(self, reference_date: Optional[DateTime] = None) -> None:
|
||||
"""Sort the windows by their start time on the given reference date.
|
||||
|
||||
Windows that don't apply to the reference date are placed at the end.
|
||||
|
||||
Args:
|
||||
reference_date: The date to use for sorting. Defaults to today.
|
||||
"""
|
||||
if not self.windows:
|
||||
return
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
def sort_key(window: TimeWindow) -> tuple[int, DateTime]:
|
||||
start_time = window.earliest_start_time(Duration(), reference_date)
|
||||
if start_time is None:
|
||||
return (1, reference_date)
|
||||
return (0, start_time)
|
||||
|
||||
self.windows.sort(key=sort_key)
|
||||
|
||||
|
||||
class ValueTimeWindow(TimeWindow):
|
||||
"""Value applicable during a specific time window.
|
||||
|
||||
This model extends `TimeWindow` by associating a value with the defined time interval.
|
||||
"""
|
||||
|
||||
value: Optional[float] = Field(
|
||||
default=None,
|
||||
ge=0,
|
||||
json_schema_extra={
|
||||
"description": ("Value applicable during this time window."),
|
||||
"examples": [0.288],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ValueTimeWindowSequence(TimeWindowSequence):
|
||||
"""Sequence of value time windows.
|
||||
|
||||
This model specializes `TimeWindowSequence` to ensure that all
|
||||
contained windows are instances of `ValueTimeWindow`.
|
||||
It provides the full set of sequence operations (containment checks,
|
||||
availability, start time calculations) for value windows.
|
||||
"""
|
||||
|
||||
windows: list[ValueTimeWindow] = Field(
|
||||
default_factory=list,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Ordered list of value time windows. "
|
||||
"Each window defines a time interval and an associated value."
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
def get_value_for_datetime(self, dt: DateTime) -> float:
|
||||
"""Get value for a specific datetime.
|
||||
|
||||
Args:
|
||||
dt: Datetime to get value for.
|
||||
|
||||
Returns:
|
||||
float: value or 0.0 if no window matches.
|
||||
"""
|
||||
for window in self.windows:
|
||||
if window.contains(dt):
|
||||
return window.value or 0.0
|
||||
return 0.0
|
||||
|
||||
def to_array(
|
||||
self,
|
||||
start_datetime: DateTime,
|
||||
end_datetime: DateTime,
|
||||
interval: Duration,
|
||||
dropna: bool = True,
|
||||
boundary: str = "context",
|
||||
align_to_interval: bool = True,
|
||||
) -> np.ndarray:
|
||||
"""Return a 1-D NumPy array of window values over a time grid.
|
||||
|
||||
The time grid is constructed from ``start_datetime`` to ``end_datetime``
|
||||
(exclusive) in steps of ``interval``, matching the ``key_to_array``
|
||||
signature used by the prediction store. Each element holds the
|
||||
``value`` of the first matching window at that step, ``0.0`` when no
|
||||
window matches, or ``NaN`` when the matching window has ``value=None``
|
||||
and ``dropna=False``.
|
||||
|
||||
When ``dropna=True`` steps whose matching window has ``value=None`` are
|
||||
omitted from the output entirely (the array is shorter than the full
|
||||
grid), consistent with the ``key_to_array`` ``dropna`` contract.
|
||||
|
||||
Parameters mirror ``key_to_array`` so that ``to_array`` can be used as
|
||||
a drop-in source in the same contexts:
|
||||
|
||||
Args:
|
||||
start_datetime: First step of the time grid (inclusive).
|
||||
end_datetime: Upper bound of the time grid (exclusive).
|
||||
interval: Fixed step size between consecutive grid points.
|
||||
dropna: When ``True``, steps whose matching window carries
|
||||
``value=None`` are dropped from the output array. When
|
||||
``False``, those steps emit ``NaN``.
|
||||
boundary: Controls range enforcement. Only ``"context"`` is
|
||||
currently supported; the output is always clipped to
|
||||
``[start_datetime, end_datetime)``.
|
||||
align_to_interval: When ``True``, ``start_datetime`` is floored to
|
||||
the nearest interval boundary in wall-clock time before
|
||||
generating the grid (e.g. 08:10 with a 1-hour interval becomes
|
||||
08:00). The timezone (or naivety) of ``start_datetime`` is
|
||||
preserved exactly — no UTC conversion is performed. When
|
||||
``False``, ``start_datetime`` is used as-is.
|
||||
|
||||
Returns:
|
||||
``np.ndarray`` of shape ``(n_steps,)`` with ``dtype=float64``.
|
||||
Positive values are window values; ``0.0`` means no window matched;
|
||||
``NaN`` means a window matched but its value was ``None`` (only
|
||||
when ``dropna=False``).
|
||||
|
||||
Raises:
|
||||
ValueError: If ``boundary`` is not ``"context"``.
|
||||
"""
|
||||
if boundary != "context":
|
||||
raise ValueError(f"Unsupported boundary {boundary!r}. Only 'context' is supported.")
|
||||
|
||||
interval_s = interval.total_seconds()
|
||||
|
||||
if align_to_interval and interval_s > 0:
|
||||
# Floor purely in wall-clock seconds so the timezone (or naivety)
|
||||
# of start_datetime is never touched and no UTC conversion occurs.
|
||||
# This is correct regardless of the machine's local timezone.
|
||||
wall_s = (
|
||||
start_datetime.hour * 3600
|
||||
+ start_datetime.minute * 60
|
||||
+ start_datetime.second
|
||||
+ start_datetime.microsecond / 1_000_000
|
||||
)
|
||||
remainder_s = wall_s % interval_s
|
||||
if remainder_s:
|
||||
start_datetime = start_datetime.subtract(seconds=remainder_s)
|
||||
|
||||
result = []
|
||||
current = start_datetime
|
||||
while current < end_datetime:
|
||||
step_value: Optional[float] = None
|
||||
matched = False
|
||||
for window in self.windows:
|
||||
if window.contains(current):
|
||||
step_value = window.value
|
||||
matched = True
|
||||
break
|
||||
|
||||
if not matched:
|
||||
result.append(0.0)
|
||||
elif step_value is None:
|
||||
if not dropna:
|
||||
result.append(float("nan"))
|
||||
# else: omit this step entirely (dropna=True)
|
||||
else:
|
||||
result.append(step_value)
|
||||
|
||||
current = current.add(seconds=interval_s)
|
||||
|
||||
return np.array(result, dtype=np.float64)
|
||||
|
||||
@@ -680,6 +680,10 @@ class DataSequence(DataABC, DatabaseRecordProtocolMixin[DataRecord]):
|
||||
The matching DataRecord, the nearest DataRecord within the specified time window
|
||||
if no exact match exists, or ``None`` if no suitable record is found.
|
||||
"""
|
||||
# Ensure target_datetime is a datetime object
|
||||
if not isinstance(target_datetime, DateTime):
|
||||
target_datetime = to_datetime(target_datetime)
|
||||
|
||||
# Ensure datetime objects are normalized
|
||||
db_target = DatabaseTimestamp.from_datetime(target_datetime)
|
||||
|
||||
@@ -702,6 +706,10 @@ class DataSequence(DataABC, DatabaseRecordProtocolMixin[DataRecord]):
|
||||
Raises:
|
||||
ValueError: If ``time_window`` is negative.
|
||||
"""
|
||||
# Ensure target_datetime is a datetime object
|
||||
if not isinstance(target_datetime, DateTime):
|
||||
target_datetime = to_datetime(target_datetime)
|
||||
|
||||
# Ensure datetime objects are normalized
|
||||
db_target = DatabaseTimestamp.from_datetime(target_datetime)
|
||||
|
||||
@@ -780,6 +788,10 @@ class DataSequence(DataABC, DatabaseRecordProtocolMixin[DataRecord]):
|
||||
for key in values:
|
||||
self._validate_key_writable(key)
|
||||
|
||||
# Ensure date is a datetime object
|
||||
if not isinstance(date, DateTime):
|
||||
date = to_datetime(date)
|
||||
|
||||
# Ensure datetime objects are normalized
|
||||
db_target = DatabaseTimestamp.from_datetime(date)
|
||||
|
||||
@@ -1083,6 +1095,8 @@ class DataSequence(DataABC, DatabaseRecordProtocolMixin[DataRecord]):
|
||||
interval = to_duration("1 hour")
|
||||
resample_freq = "1h"
|
||||
else:
|
||||
# Ensure interval is normalized
|
||||
interval = to_duration(interval)
|
||||
resample_freq = to_duration(interval, as_string="pandas")
|
||||
|
||||
# Extend window for context resampling
|
||||
|
||||
@@ -9,13 +9,13 @@ from loguru import logger
|
||||
from numpydantic import NDArray, Shape
|
||||
from pydantic import Field, computed_field, field_validator, model_validator
|
||||
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel, TimeWindowSequence
|
||||
from akkudoktoreos.core.cache import CacheFileStore
|
||||
from akkudoktoreos.core.coreabc import ConfigMixin, SingletonMixin
|
||||
from akkudoktoreos.core.emplan import ResourceStatus
|
||||
from akkudoktoreos.core.pydantic import ConfigDict, PydanticBaseModel
|
||||
from akkudoktoreos.devices.devicesabc import DevicesBaseSettings
|
||||
from akkudoktoreos.utils.datetimeutil import DateTime, TimeWindowSequence, to_datetime
|
||||
from akkudoktoreos.utils.datetimeutil import DateTime, to_datetime
|
||||
|
||||
# Default charge rates for battery
|
||||
BATTERY_DEFAULT_CHARGE_RATES: list[float] = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
import numpy as np
|
||||
|
||||
from akkudoktoreos.config.configabc import TimeWindow, TimeWindowSequence
|
||||
from akkudoktoreos.optimization.genetic.geneticdevices import HomeApplianceParameters
|
||||
from akkudoktoreos.utils.datetimeutil import (
|
||||
TimeWindow,
|
||||
TimeWindowSequence,
|
||||
to_datetime,
|
||||
to_duration,
|
||||
to_time,
|
||||
)
|
||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration, to_time
|
||||
|
||||
|
||||
class HomeAppliance:
|
||||
|
||||
@@ -4,8 +4,8 @@ from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from akkudoktoreos.config.configabc import TimeWindowSequence
|
||||
from akkudoktoreos.optimization.genetic.geneticabc import GeneticParametersBaseModel
|
||||
from akkudoktoreos.utils.datetimeutil import TimeWindowSequence
|
||||
|
||||
|
||||
class DeviceParameters(GeneticParametersBaseModel):
|
||||
|
||||
@@ -8,6 +8,7 @@ from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
||||
from akkudoktoreos.prediction.elecpriceenergycharts import (
|
||||
ElecPriceEnergyChartsCommonSettings,
|
||||
)
|
||||
from akkudoktoreos.prediction.elecpricefixed import ElecPriceFixedCommonSettings
|
||||
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettings
|
||||
|
||||
|
||||
@@ -37,6 +38,7 @@ class ElecPriceCommonSettings(SettingsBaseModel):
|
||||
"examples": ["ElecPriceAkkudoktor"],
|
||||
},
|
||||
)
|
||||
|
||||
charges_kwh: Optional[float] = Field(
|
||||
default=None,
|
||||
ge=0,
|
||||
@@ -45,6 +47,7 @@ class ElecPriceCommonSettings(SettingsBaseModel):
|
||||
"examples": [0.21],
|
||||
},
|
||||
)
|
||||
|
||||
vat_rate: Optional[float] = Field(
|
||||
default=1.19,
|
||||
ge=0,
|
||||
@@ -54,6 +57,11 @@ class ElecPriceCommonSettings(SettingsBaseModel):
|
||||
},
|
||||
)
|
||||
|
||||
elecpricefixed: ElecPriceFixedCommonSettings = Field(
|
||||
default_factory=ElecPriceFixedCommonSettings,
|
||||
json_schema_extra={"description": "Fixed electricity price provider settings."},
|
||||
)
|
||||
|
||||
elecpriceimport: ElecPriceImportCommonSettings = Field(
|
||||
default_factory=ElecPriceImportCommonSettings,
|
||||
json_schema_extra={"description": "Import provider settings."},
|
||||
|
||||
111
src/akkudoktoreos/prediction/elecpricefixed.py
Normal file
111
src/akkudoktoreos/prediction/elecpricefixed.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""Provides fixed price electricity price data."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from loguru import logger
|
||||
from pydantic import Field
|
||||
|
||||
from akkudoktoreos.config.configabc import (
|
||||
SettingsBaseModel,
|
||||
ValueTimeWindowSequence,
|
||||
)
|
||||
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
||||
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||
|
||||
|
||||
class ElecPriceFixedCommonSettings(SettingsBaseModel):
|
||||
"""Common configuration settings for fixed electricity pricing.
|
||||
|
||||
This model defines a fixed electricity price schedule using a sequence
|
||||
of time windows. Each window specifies a time interval and the electricity
|
||||
price applicable during that interval.
|
||||
"""
|
||||
|
||||
time_windows: ValueTimeWindowSequence = Field(
|
||||
default_factory=ValueTimeWindowSequence,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Sequence of time windows defining the fixed "
|
||||
"price schedule. If not provided, no fixed pricing is applied."
|
||||
),
|
||||
"examples": [
|
||||
{
|
||||
"windows": [
|
||||
{"start_time": "00:00", "duration": "8 hours", "value": 0.288},
|
||||
{"start_time": "08:00", "duration": "16 hours", "value": 0.34},
|
||||
],
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ElecPriceFixed(ElecPriceProvider):
|
||||
"""Fixed price electricity price data.
|
||||
|
||||
ElecPriceFixed is a singleton-based class that retrieves electricity price data
|
||||
from a fixed schedule defined by time windows.
|
||||
|
||||
The provider generates hourly electricity prices based on the configured time windows.
|
||||
For each hour in the forecast period, it determines which time window applies and
|
||||
assigns the corresponding price.
|
||||
|
||||
Attributes:
|
||||
time_windows: Sequence of time windows with associated electricity prices.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def provider_id(cls) -> str:
|
||||
"""Return the unique identifier for the ElecPriceFixed provider."""
|
||||
return "ElecPriceFixed"
|
||||
|
||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||
"""Update electricity price data from fixed schedule.
|
||||
|
||||
Generates electricity prices based on the configured time windows
|
||||
at the optimization interval granularity. The price sequence starts
|
||||
synchronized to the wall clock at the next full interval boundary.
|
||||
|
||||
Args:
|
||||
force_update: If True, forces update even if data exists.
|
||||
|
||||
Raises:
|
||||
ValueError: If no time windows are configured.
|
||||
"""
|
||||
time_windows_seq = self.config.elecprice.elecpricefixed.time_windows
|
||||
|
||||
if time_windows_seq is None or not time_windows_seq.windows:
|
||||
error_msg = "No time windows configured for fixed electricity price"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
start_datetime = self.ems_start_datetime
|
||||
interval_seconds = self.config.optimization.interval
|
||||
total_hours = self.config.prediction.hours
|
||||
interval = to_duration(interval_seconds)
|
||||
|
||||
end_datetime = start_datetime.add(hours=total_hours)
|
||||
|
||||
logger.debug(
|
||||
f"Generating fixed electricity prices for {total_hours} hours "
|
||||
f"starting at {start_datetime}"
|
||||
)
|
||||
|
||||
# Build the full price array in one call — kWh values aligned to the
|
||||
# optimization grid. to_array mirrors the key_to_array signature so
|
||||
# the grid is constructed identically to how prediction data is read.
|
||||
prices_kwh = time_windows_seq.to_array(
|
||||
start_datetime=start_datetime,
|
||||
end_datetime=end_datetime,
|
||||
interval=interval,
|
||||
dropna=True,
|
||||
boundary="context",
|
||||
align_to_interval=True,
|
||||
)
|
||||
|
||||
# Convert kWh → Wh and store one entry per interval step.
|
||||
for idx, price_kwh in enumerate(prices_kwh):
|
||||
current_dt = start_datetime.add(seconds=idx * interval_seconds)
|
||||
self.update_value(current_dt, "elecprice_marketprice_wh", price_kwh / 1000.0)
|
||||
|
||||
logger.debug(f"Successfully generated {len(prices_kwh)} fixed electricity price entries")
|
||||
@@ -33,6 +33,7 @@ from pydantic import Field
|
||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||
from akkudoktoreos.prediction.elecpriceakkudoktor import ElecPriceAkkudoktor
|
||||
from akkudoktoreos.prediction.elecpriceenergycharts import ElecPriceEnergyCharts
|
||||
from akkudoktoreos.prediction.elecpricefixed import ElecPriceFixed
|
||||
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImport
|
||||
from akkudoktoreos.prediction.feedintarifffixed import FeedInTariffFixed
|
||||
from akkudoktoreos.prediction.feedintariffimport import FeedInTariffImport
|
||||
@@ -72,6 +73,7 @@ class PredictionCommonSettings(SettingsBaseModel):
|
||||
# Initialize forecast providers, all are singletons.
|
||||
elecprice_akkudoktor = ElecPriceAkkudoktor()
|
||||
elecprice_energy_charts = ElecPriceEnergyCharts()
|
||||
elecprice_fixed = ElecPriceFixed()
|
||||
elecprice_import = ElecPriceImport()
|
||||
feedintariff_fixed = FeedInTariffFixed()
|
||||
feedintariff_import = FeedInTariffImport()
|
||||
@@ -91,6 +93,7 @@ def prediction_providers() -> list[
|
||||
Union[
|
||||
ElecPriceAkkudoktor,
|
||||
ElecPriceEnergyCharts,
|
||||
ElecPriceFixed,
|
||||
ElecPriceImport,
|
||||
FeedInTariffFixed,
|
||||
FeedInTariffImport,
|
||||
@@ -110,6 +113,7 @@ def prediction_providers() -> list[
|
||||
global \
|
||||
elecprice_akkudoktor, \
|
||||
elecprice_energy_charts, \
|
||||
elecprice_fixed, \
|
||||
elecprice_import, \
|
||||
feedintariff_fixed, \
|
||||
feedintariff_import, \
|
||||
@@ -128,6 +132,7 @@ def prediction_providers() -> list[
|
||||
return [
|
||||
elecprice_akkudoktor,
|
||||
elecprice_energy_charts,
|
||||
elecprice_fixed,
|
||||
elecprice_import,
|
||||
feedintariff_fixed,
|
||||
feedintariff_import,
|
||||
@@ -151,6 +156,7 @@ class Prediction(PredictionContainer):
|
||||
Union[
|
||||
ElecPriceAkkudoktor,
|
||||
ElecPriceEnergyCharts,
|
||||
ElecPriceFixed,
|
||||
ElecPriceImport,
|
||||
FeedInTariffFixed,
|
||||
FeedInTariffImport,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import re
|
||||
from typing import Any, Callable, Optional, Union
|
||||
|
||||
from fasthtml.common import H1, Button, Div, Li, Select
|
||||
@@ -165,6 +166,44 @@ def ConfigButton(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any
|
||||
return Button(*c, submit=False, **kwargs)
|
||||
|
||||
|
||||
def UpdateError(error_text: str) -> Alert:
|
||||
"""Renders a compact error with collapsible full detail.
|
||||
|
||||
Extracts the short pydantic validation message (text after
|
||||
'validation error for ...') as the summary. Falls back to
|
||||
the first line if no match is found.
|
||||
|
||||
Args:
|
||||
error_text: The full error string from a config update failure.
|
||||
|
||||
Returns:
|
||||
Alert: A collapsible error element with error styling.
|
||||
"""
|
||||
short = None
|
||||
match = re.search(r"validation error for [^\n]+\n([^\n]+)\n\s+([^\[]+)", error_text)
|
||||
if match:
|
||||
short = f"Validation error: {match.group(1).strip()}: {match.group(2).strip()}"
|
||||
if not short:
|
||||
short = error_text.splitlines()[0].strip()
|
||||
|
||||
return Alert(
|
||||
Details(
|
||||
Summary(
|
||||
DivLAligned(
|
||||
UkIcon("triangle-alert"),
|
||||
P(short, cls="text-sm ml-2"),
|
||||
),
|
||||
cls="list-none cursor-pointer",
|
||||
),
|
||||
Pre(
|
||||
Code(error_text, cls="language-python"),
|
||||
cls="rounded-lg bg-muted p-3 mt-2 max-h-[30vh] overflow-y-auto overflow-x-hidden whitespace-pre-wrap text-xs",
|
||||
),
|
||||
),
|
||||
cls=AlertT.error,
|
||||
)
|
||||
|
||||
|
||||
def make_config_update_form() -> Callable[[str, str], Grid]:
|
||||
"""Factory for a form that sets a single configuration value.
|
||||
|
||||
@@ -456,6 +495,199 @@ def make_config_update_map_form(
|
||||
return ConfigUpdateMapForm
|
||||
|
||||
|
||||
def make_config_update_time_windows_windows_form(
|
||||
value_description: Optional[str] = None,
|
||||
) -> Callable[[str, str], Grid]:
|
||||
"""Factory for a form that edits the windows field of a TimeWindowSequence.
|
||||
|
||||
Args:
|
||||
value_description: If given, a numeric value field is included in the form
|
||||
and shown in the column header (e.g. "electricity_price_kwh [Amt/kWh]").
|
||||
If None, no value field is rendered.
|
||||
"""
|
||||
|
||||
def ConfigUpdateTimeWindowsWindowsForm(config_name: str, value: str) -> Grid:
|
||||
config_id = config_name.lower().replace(".", "-")
|
||||
|
||||
try:
|
||||
parsed = json.loads(value)
|
||||
current_windows: list[dict] = parsed if isinstance(parsed, list) else []
|
||||
except (json.JSONDecodeError, AttributeError):
|
||||
current_windows = []
|
||||
|
||||
DOW_LABELS = [
|
||||
"0 – Monday",
|
||||
"1 – Tuesday",
|
||||
"2 – Wednesday",
|
||||
"3 – Thursday",
|
||||
"4 – Friday",
|
||||
"5 – Saturday",
|
||||
"6 – Sunday",
|
||||
]
|
||||
|
||||
# ---- Existing windows rows ----
|
||||
window_rows = []
|
||||
for idx, win in enumerate(current_windows):
|
||||
start_time = win.get("start_time", "")
|
||||
duration = win.get("duration", "")
|
||||
dow = win.get("day_of_week")
|
||||
date_val = win.get("date")
|
||||
locale_val = win.get("locale")
|
||||
|
||||
dow_str = f" dow={dow}" if dow is not None else ""
|
||||
date_str = f" date={date_val}" if date_val else ""
|
||||
locale_str = f" locale={locale_val}" if locale_val else ""
|
||||
|
||||
if value_description is not None:
|
||||
val = win.get("value", "")
|
||||
val_str = f" | {val} {value_description}"
|
||||
else:
|
||||
val_str = ""
|
||||
|
||||
label = f"{start_time} | {duration}{val_str}{dow_str}{date_str}{locale_str}"
|
||||
|
||||
remaining = [w for i, w in enumerate(current_windows) if i != idx]
|
||||
remaining_json = json.dumps(json.dumps(remaining))
|
||||
window_rows.append(
|
||||
DivHStacked(
|
||||
ConfigButton(
|
||||
UkIcon("trash-2"),
|
||||
hx_put=request_url_for("/eosdash/configuration"),
|
||||
hx_target="#page-content",
|
||||
hx_swap="innerHTML",
|
||||
hx_vals=f'js:{{ action: "update", key: "{config_name}", value: {remaining_json} }}',
|
||||
cls="px-2 py-1",
|
||||
),
|
||||
P(label, cls="ml-2 text-sm font-mono"),
|
||||
)
|
||||
)
|
||||
|
||||
# ---- Column headers and inputs ----
|
||||
num_cols = 5 + (1 if value_description is not None else 0)
|
||||
|
||||
header_cols = [
|
||||
P("start_time *", cls="text-xs text-muted-foreground font-semibold"),
|
||||
P("duration *", cls="text-xs text-muted-foreground font-semibold"),
|
||||
]
|
||||
input_cols = [
|
||||
Input(
|
||||
placeholder="e.g. 08:00 Europe/Berlin",
|
||||
name=f"{config_id}_tw_start_time",
|
||||
cls="border rounded px-2 py-1 text-sm",
|
||||
),
|
||||
Input(
|
||||
placeholder="e.g. 8 hours",
|
||||
name=f"{config_id}_tw_duration",
|
||||
cls="border rounded px-2 py-1 text-sm",
|
||||
),
|
||||
]
|
||||
|
||||
if value_description is not None:
|
||||
header_cols.append(
|
||||
P(f"{value_description} *", cls="text-xs text-muted-foreground font-semibold")
|
||||
)
|
||||
input_cols.append(
|
||||
Input(
|
||||
placeholder="e.g. 0.288",
|
||||
name=f"{config_id}_tw_value",
|
||||
type="number",
|
||||
step="0.001",
|
||||
cls="border rounded px-2 py-1 text-sm",
|
||||
)
|
||||
)
|
||||
|
||||
header_cols += [
|
||||
P("day_of_week", cls="text-xs text-muted-foreground font-semibold"),
|
||||
P("date (YYYY-MM-DD)", cls="text-xs text-muted-foreground font-semibold"),
|
||||
P("locale", cls="text-xs text-muted-foreground font-semibold"),
|
||||
]
|
||||
input_cols += [
|
||||
Select(
|
||||
Option("— any day —", value="", selected=True),
|
||||
*[Option(lbl, value=str(i)) for i, lbl in enumerate(DOW_LABELS)],
|
||||
name=f"{config_id}_tw_dow",
|
||||
cls="border rounded px-2 py-1 text-sm",
|
||||
),
|
||||
Input(
|
||||
placeholder="e.g. 2025-12-24",
|
||||
name=f"{config_id}_tw_date",
|
||||
cls="border rounded px-2 py-1 text-sm",
|
||||
),
|
||||
Input(
|
||||
placeholder="e.g. de",
|
||||
name=f"{config_id}_tw_locale",
|
||||
cls="border rounded px-2 py-1 text-sm",
|
||||
),
|
||||
]
|
||||
|
||||
# ---- JS for Add button ----
|
||||
current_json = json.dumps(json.dumps(current_windows))
|
||||
if value_description is not None:
|
||||
val_js_read = f"const val = parseFloat(document.querySelector(\"[name='{config_id}_tw_value']\").value);"
|
||||
val_js_guard = "isNaN(val)"
|
||||
val_js_field = "value: val,"
|
||||
else:
|
||||
val_js_read = ""
|
||||
val_js_guard = "false"
|
||||
val_js_field = ""
|
||||
|
||||
add_section = Grid(
|
||||
Grid(*header_cols, cols=num_cols),
|
||||
Grid(*input_cols, cols=num_cols),
|
||||
ConfigButton(
|
||||
UkIcon("plus"),
|
||||
" Add window",
|
||||
hx_put=request_url_for("/eosdash/configuration"),
|
||||
hx_target="#page-content",
|
||||
hx_swap="innerHTML",
|
||||
hx_vals=f"""js:{{
|
||||
action: "update",
|
||||
key: "{config_name}",
|
||||
value: (() => {{
|
||||
const start = document.querySelector("[name='{config_id}_tw_start_time']").value.trim();
|
||||
const dur = document.querySelector("[name='{config_id}_tw_duration']").value.trim();
|
||||
{val_js_read}
|
||||
const dowRaw = document.querySelector("[name='{config_id}_tw_dow']").value;
|
||||
const date = document.querySelector("[name='{config_id}_tw_date']").value.trim();
|
||||
const locale = document.querySelector("[name='{config_id}_tw_locale']").value.trim();
|
||||
if (!start || !dur || {val_js_guard}) return {current_json};
|
||||
const newWin = {{
|
||||
start_time: start,
|
||||
duration: dur,
|
||||
{val_js_field}
|
||||
day_of_week: dowRaw !== "" ? parseInt(dowRaw) : null,
|
||||
date: date !== "" ? date : null,
|
||||
locale: locale !== "" ? locale : null,
|
||||
}};
|
||||
const existing = {json.dumps(current_windows)};
|
||||
existing.push(newWin);
|
||||
return JSON.stringify(existing);
|
||||
}})()
|
||||
}}""",
|
||||
),
|
||||
cols=1,
|
||||
cls="gap-2 mt-2",
|
||||
)
|
||||
|
||||
return Grid(
|
||||
DivRAligned(P("update time windows")),
|
||||
Grid(
|
||||
*window_rows,
|
||||
P("Add new window", cls="text-sm font-semibold mt-3 mb-1"),
|
||||
P(
|
||||
"* required | day_of_week: overridden by date if both set",
|
||||
cls="text-xs text-muted-foreground mb-1",
|
||||
),
|
||||
add_section,
|
||||
cols=1,
|
||||
cls="gap-1",
|
||||
),
|
||||
id=f"{config_id}-update-time-windows-windows-form",
|
||||
)
|
||||
|
||||
return ConfigUpdateTimeWindowsWindowsForm
|
||||
|
||||
|
||||
def ConfigCard(
|
||||
config_name: str,
|
||||
config_type: str,
|
||||
@@ -548,7 +780,7 @@ def ConfigCard(
|
||||
# Last error
|
||||
Grid(
|
||||
DivRAligned(P("update error")),
|
||||
TextView(update_error),
|
||||
UpdateError(update_error),
|
||||
)
|
||||
if update_error
|
||||
else None,
|
||||
|
||||
@@ -34,6 +34,7 @@ from akkudoktoreos.server.dash.components import (
|
||||
TextView,
|
||||
make_config_update_list_form,
|
||||
make_config_update_map_form,
|
||||
make_config_update_time_windows_windows_form,
|
||||
make_config_update_value_form,
|
||||
)
|
||||
from akkudoktoreos.server.dash.context import request_url_for
|
||||
@@ -730,6 +731,10 @@ def Configuration(
|
||||
update_form_factory = make_config_update_value_form(
|
||||
["OPTIMIZATION", "PREDICTION", "None"]
|
||||
)
|
||||
elif config["name"].endswith("elecpricefixed.time_windows.windows"):
|
||||
update_form_factory = make_config_update_time_windows_windows_form(
|
||||
value_description="electricity_price_kwh [Amt/kWh]"
|
||||
)
|
||||
|
||||
rows.append(
|
||||
ConfigCard(
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
@@ -15,7 +14,7 @@ import psutil
|
||||
import uvicorn
|
||||
from fastapi import Body, FastAPI
|
||||
from fastapi import Path as FastapiPath
|
||||
from fastapi import Query, Request
|
||||
from fastapi import Query, Request, status
|
||||
from fastapi.exceptions import HTTPException
|
||||
from fastapi.responses import (
|
||||
FileResponse,
|
||||
@@ -57,13 +56,13 @@ from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
||||
from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||
from akkudoktoreos.server.rest.cli import cli_apply_args_to_config, cli_parse_args
|
||||
from akkudoktoreos.server.rest.error import create_error_page
|
||||
from akkudoktoreos.server.rest.starteosdash import supervise_eosdash
|
||||
from akkudoktoreos.server.retentionmanager import RetentionManager
|
||||
from akkudoktoreos.server.server import (
|
||||
drop_root_privileges,
|
||||
fix_data_directories_permissions,
|
||||
get_default_host,
|
||||
get_host_ip,
|
||||
wait_for_port_free,
|
||||
)
|
||||
@@ -458,12 +457,12 @@ def fastapi_config_revert_put(
|
||||
"""
|
||||
try:
|
||||
get_config().revert_settings(backup_id)
|
||||
return get_config()
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Error on reverting of configuration: {e}",
|
||||
)
|
||||
return get_config()
|
||||
|
||||
|
||||
@app.put("/v1/config/file", tags=["config"])
|
||||
@@ -475,12 +474,12 @@ def fastapi_config_file_put() -> ConfigEOS:
|
||||
"""
|
||||
try:
|
||||
get_config().to_config_file()
|
||||
return get_config()
|
||||
except:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Cannot save configuration to file '{get_config().config_file_path}'.",
|
||||
)
|
||||
return get_config()
|
||||
|
||||
|
||||
@app.get("/v1/config", tags=["config"])
|
||||
@@ -490,7 +489,10 @@ def fastapi_config_get() -> ConfigEOS:
|
||||
Returns:
|
||||
configuration (ConfigEOS): The current configuration.
|
||||
"""
|
||||
return get_config()
|
||||
try:
|
||||
return get_config()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Error on configuration retrieval: {e}")
|
||||
|
||||
|
||||
@app.put("/v1/config", tags=["config"])
|
||||
@@ -509,9 +511,9 @@ def fastapi_config_put(settings: SettingsEOS) -> ConfigEOS:
|
||||
"""
|
||||
try:
|
||||
get_config().merge_settings(settings)
|
||||
return get_config()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Error on update of configuration: {e}")
|
||||
return get_config()
|
||||
|
||||
|
||||
@app.put("/v1/config/{path:path}", tags=["config"])
|
||||
@@ -534,6 +536,7 @@ def fastapi_config_put_key(
|
||||
"""
|
||||
try:
|
||||
get_config().set_nested_value(path, value)
|
||||
return get_config()
|
||||
except Exception as e:
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
raise HTTPException(
|
||||
@@ -541,8 +544,6 @@ def fastapi_config_put_key(
|
||||
detail=f"Error on update of configuration '{path}','{value}':\n{e}\n{trace}",
|
||||
)
|
||||
|
||||
return get_config()
|
||||
|
||||
|
||||
@app.get("/v1/config/{path:path}", tags=["config"])
|
||||
def fastapi_config_get_key(
|
||||
@@ -660,8 +661,17 @@ def fastapi_devices_status_put(
|
||||
|
||||
@app.get("/v1/measurement/keys", tags=["measurement"])
|
||||
def fastapi_measurement_keys_get() -> list[str]:
|
||||
"""Get a list of available measurement keys."""
|
||||
return sorted(get_measurement().record_keys)
|
||||
try:
|
||||
"""Get a list of available measurement keys."""
|
||||
return sorted(get_measurement().record_keys)
|
||||
except Exception as e:
|
||||
# Log unexpected errors
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
logger.exception("Unexpected error retieving measurement keys")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Internal server error:\n{e}\n{trace}",
|
||||
)
|
||||
|
||||
|
||||
@app.get("/v1/measurement/series", tags=["measurement"])
|
||||
@@ -669,10 +679,22 @@ def fastapi_measurement_series_get(
|
||||
key: Annotated[str, Query(description="Measurement key.")],
|
||||
) -> PydanticDateTimeSeries:
|
||||
"""Get the measurements of given key as series."""
|
||||
if key not in get_measurement().record_keys:
|
||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
|
||||
pdseries = get_measurement().key_to_series(key=key)
|
||||
return PydanticDateTimeSeries.from_series(pdseries)
|
||||
try:
|
||||
if key not in get_measurement().record_keys:
|
||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
|
||||
pdseries = get_measurement().key_to_series(key=key)
|
||||
return PydanticDateTimeSeries.from_series(pdseries)
|
||||
except HTTPException:
|
||||
# Re-raise HTTP exceptions
|
||||
raise
|
||||
except Exception as e:
|
||||
# Log unexpected errors
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
logger.exception(f"Unexpected error retieving measurement: {key}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Internal server error:\n{e}\n{trace}",
|
||||
)
|
||||
|
||||
|
||||
@app.put("/v1/measurement/value", tags=["measurement"])
|
||||
@@ -682,19 +704,42 @@ def fastapi_measurement_value_put(
|
||||
value: Union[float | str],
|
||||
) -> PydanticDateTimeSeries:
|
||||
"""Merge the measurement of given key and value into EOS measurements at given datetime."""
|
||||
if key not in get_measurement().record_keys:
|
||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
|
||||
if isinstance(value, str):
|
||||
# Try to convert to float
|
||||
try:
|
||||
value = float(value)
|
||||
except:
|
||||
logger.debug(
|
||||
f'/v1/measurement/value key: {key} value: "{value}" - string value not convertable to float'
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Value '{value}' cannot be converted to float",
|
||||
)
|
||||
|
||||
if key not in get_measurement().record_keys:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Key '{key}' not found in measurements",
|
||||
)
|
||||
get_measurement().update_value(datetime, key, value)
|
||||
pdseries = get_measurement().key_to_series(key=key)
|
||||
return PydanticDateTimeSeries.from_series(pdseries)
|
||||
|
||||
try:
|
||||
dt = to_datetime(datetime)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid datetime '{datetime}': {e}",
|
||||
)
|
||||
|
||||
get_measurement().update_value(dt, key, value)
|
||||
pdseries = get_measurement().key_to_series(key=key)
|
||||
return PydanticDateTimeSeries.from_series(pdseries)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
logger.exception(f"Unexpected error updating measurement: {datetime}, {key}, {value}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Internal server error:\n{e}\n{trace}",
|
||||
)
|
||||
|
||||
|
||||
@app.put("/v1/measurement/series", tags=["measurement"])
|
||||
@@ -702,26 +747,56 @@ def fastapi_measurement_series_put(
|
||||
key: Annotated[str, Query(description="Measurement key.")], series: PydanticDateTimeSeries
|
||||
) -> PydanticDateTimeSeries:
|
||||
"""Merge measurement given as series into given key."""
|
||||
if key not in get_measurement().record_keys:
|
||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
|
||||
pdseries = series.to_series() # make pandas series from PydanticDateTimeSeries
|
||||
get_measurement().key_from_series(key=key, series=pdseries)
|
||||
pdseries = get_measurement().key_to_series(key=key)
|
||||
return PydanticDateTimeSeries.from_series(pdseries)
|
||||
try:
|
||||
if key not in get_measurement().record_keys:
|
||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
|
||||
pdseries = series.to_series() # make pandas series from PydanticDateTimeSeries
|
||||
get_measurement().key_from_series(key=key, series=pdseries)
|
||||
pdseries = get_measurement().key_to_series(key=key)
|
||||
return PydanticDateTimeSeries.from_series(pdseries)
|
||||
except HTTPException:
|
||||
# Re-raise HTTP exceptions
|
||||
raise
|
||||
except Exception as e:
|
||||
# Log unexpected errors
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
logger.exception(f"Unexpected error updating measurement: {key}, {series}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Internal server error:\n{e}\n{trace}",
|
||||
)
|
||||
|
||||
|
||||
@app.put("/v1/measurement/dataframe", tags=["measurement"])
|
||||
def fastapi_measurement_dataframe_put(data: PydanticDateTimeDataFrame) -> None:
|
||||
"""Merge the measurement data given as dataframe into EOS measurements."""
|
||||
dataframe = data.to_dataframe()
|
||||
get_measurement().import_from_dataframe(dataframe)
|
||||
try:
|
||||
dataframe = data.to_dataframe()
|
||||
get_measurement().import_from_dataframe(dataframe)
|
||||
except Exception as e:
|
||||
# Log unexpected errors
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
logger.exception(f"Unexpected error updating measurement: {data}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Internal server error:\n{e}\n{trace}",
|
||||
)
|
||||
|
||||
|
||||
@app.put("/v1/measurement/data", tags=["measurement"])
|
||||
def fastapi_measurement_data_put(data: PydanticDateTimeData) -> None:
|
||||
"""Merge the measurement data given as datetime data into EOS measurements."""
|
||||
datetimedata = data.to_dict()
|
||||
get_measurement().import_from_dict(datetimedata)
|
||||
try:
|
||||
datetimedata = data.to_dict()
|
||||
get_measurement().import_from_dict(datetimedata)
|
||||
except Exception as e:
|
||||
# Log unexpected errors
|
||||
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||
logger.exception(f"Unexpected error updating measurement: {data}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Internal server error:\n{e}\n{trace}",
|
||||
)
|
||||
|
||||
|
||||
@app.get("/v1/prediction/providers", tags=["prediction"])
|
||||
@@ -1427,52 +1502,42 @@ def run_eos() -> None:
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
# get_config(init=True) creates the configuration
|
||||
# this should not be done before nor later
|
||||
# get_config(init=True) creates the configuration, includes also ARGV args.
|
||||
# This should not be done before nor later
|
||||
config_eos = get_config(init=True)
|
||||
|
||||
# set logging to what is in config
|
||||
# Set logging to what is in config
|
||||
logger.remove()
|
||||
logging_track_config(config_eos, "logging", None, None)
|
||||
|
||||
# make logger track logging changes in config
|
||||
# Make logger track logging changes in config
|
||||
config_eos.track_nested_value("/logging", logging_track_config)
|
||||
|
||||
# Set config to actual environment variable & config file content
|
||||
config_eos.reset_settings()
|
||||
# Ensure host and port config settings are at least set to default values
|
||||
if config_eos.server.host is None:
|
||||
config_eos.set_nested_value("server/host", get_default_host())
|
||||
if config_eos.server.port is None:
|
||||
config_eos.set_nested_value("server/port", 8503)
|
||||
|
||||
# add arguments to config
|
||||
args: argparse.Namespace
|
||||
args_unknown: list[str]
|
||||
args, args_unknown = cli_parse_args()
|
||||
cli_apply_args_to_config(args)
|
||||
if config_eos.server.port is None: # make mypy happy
|
||||
raise RuntimeError("server.port is None despite default setup")
|
||||
|
||||
# prepare runtime arguments
|
||||
if args:
|
||||
run_as_user = args.run_as_user
|
||||
# Setup EOS reload for development
|
||||
if args.reload is None:
|
||||
reload = False
|
||||
else:
|
||||
logger.debug(f"reload set by argument to {args.reload}")
|
||||
reload = args.reload
|
||||
else:
|
||||
run_as_user = None
|
||||
if config_eos.server.eosdash_host is None:
|
||||
config_eos.set_nested_value("server/eosdash_host", config_eos.server.host)
|
||||
if config_eos.server.eosdash_port is None:
|
||||
config_eos.set_nested_value("server/eosdash_port", config_eos.server.port + 1)
|
||||
|
||||
# Switch data directories ownership to user
|
||||
fix_data_directories_permissions(run_as_user=run_as_user)
|
||||
fix_data_directories_permissions(run_as_user=config_eos.server.run_as_user)
|
||||
|
||||
# Switch privileges to run_as_user
|
||||
drop_root_privileges(run_as_user=run_as_user)
|
||||
drop_root_privileges(run_as_user=config_eos.server.run_as_user)
|
||||
|
||||
# Init the other singletons (besides config_eos)
|
||||
singletons_init()
|
||||
|
||||
# Wait for EOS port to be free - e.g. in case of restart
|
||||
port = config_eos.server.port
|
||||
if port is None:
|
||||
port = 8503
|
||||
wait_for_port_free(port, timeout=120, waiting_app_name="EOS")
|
||||
wait_for_port_free(config_eos.server.port, timeout=120, waiting_app_name="EOS")
|
||||
|
||||
# Normalize log_level to uvicorn log level
|
||||
VALID_UVICORN_LEVELS = {"critical", "error", "warning", "info", "debug", "trace"}
|
||||
@@ -1486,15 +1551,21 @@ def run_eos() -> None:
|
||||
elif uv_log_level not in VALID_UVICORN_LEVELS:
|
||||
uv_log_level = "info" # fallback
|
||||
|
||||
logger.info(f"Starting EOS server on {config_eos.server.host}:{config_eos.server.port}")
|
||||
if config_eos.server.startup_eosdash:
|
||||
logger.info(
|
||||
f"EOSdash will be available at {config_eos.server.eosdash_host}:{config_eos.server.eosdash_port}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Let uvicorn run the fastAPI app
|
||||
uvicorn.run(
|
||||
"akkudoktoreos.server.eos:app",
|
||||
host=str(config_eos.server.host),
|
||||
port=port,
|
||||
port=config_eos.server.port,
|
||||
log_level=uv_log_level,
|
||||
access_log=True, # Fix server access logging to True
|
||||
reload=reload,
|
||||
reload=config_eos.server.reload,
|
||||
proxy_headers=True,
|
||||
forwarded_allow_ips="*",
|
||||
)
|
||||
@@ -1504,12 +1575,7 @@ def run_eos() -> None:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Parse command-line arguments and start the EOS server with the specified options.
|
||||
|
||||
This function sets up the argument parser to accept command-line arguments for
|
||||
host, port, log_level, access_log, and reload. It uses default values from the
|
||||
config_eos module if arguments are not provided. After parsing the arguments,
|
||||
it starts the EOS server with the specified configurations.
|
||||
"""Start the EOS server with the specified options.
|
||||
|
||||
Command-line Arguments:
|
||||
--host (str): Host for the EOS server (default: value from config).
|
||||
|
||||
@@ -485,6 +485,10 @@ def run_eosdash() -> None:
|
||||
elif uv_log_level not in VALID_UVICORN_LEVELS:
|
||||
uv_log_level = "info" # fallback
|
||||
|
||||
logger.info(
|
||||
f"Starting EOSdash server on {config_eosdash['eosdash_host']}:{config_eosdash['eosdash_port']}"
|
||||
)
|
||||
|
||||
try:
|
||||
uvicorn.run(
|
||||
"akkudoktoreos.server.eosdash:app",
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
import argparse
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from akkudoktoreos.core.coreabc import get_config
|
||||
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
||||
from akkudoktoreos.server.server import get_default_host
|
||||
from akkudoktoreos.utils.stringutil import str2bool
|
||||
|
||||
|
||||
@@ -71,79 +66,3 @@ def cli_parse_args(
|
||||
"""
|
||||
args, args_unknown = cli_argument_parser().parse_known_args(argv)
|
||||
return args, args_unknown
|
||||
|
||||
|
||||
def cli_apply_args_to_config(args: argparse.Namespace) -> None:
|
||||
"""Apply parsed CLI arguments to the EOS configuration.
|
||||
|
||||
This function updates the EOS configuration with values provided via
|
||||
the command line. For each parameter, the precedence is:
|
||||
|
||||
CLI argument > existing config value > default value
|
||||
|
||||
Currently handled arguments:
|
||||
|
||||
- log_level: Updates "logging/console_level" in config.
|
||||
- host: Updates "server/host" in config.
|
||||
- port: Updates "server/port" in config.
|
||||
- startup_eosdash: Updates "server/startup_eosdash" in config.
|
||||
- eosdash_host/port: Initialized if EOSdash is enabled and not already set.
|
||||
|
||||
Args:
|
||||
args: Parsed command-line arguments from argparse.
|
||||
"""
|
||||
config_eos = get_config()
|
||||
|
||||
# Setup parameters from args, config_eos and default
|
||||
# Remember parameters in config
|
||||
|
||||
# Setup EOS logging level - first to have the other logging messages logged
|
||||
if args.log_level is not None:
|
||||
log_level = args.log_level.upper()
|
||||
# Ensure log_level from command line is in config settings
|
||||
if log_level in LOGGING_LEVELS:
|
||||
# Setup console logging level using nested value
|
||||
# - triggers logging configuration by logging_track_config
|
||||
config_eos.set_nested_value("logging/console_level", log_level)
|
||||
logger.debug(f"logging/console_level configuration set by argument to {log_level}")
|
||||
|
||||
# Setup EOS server host
|
||||
if args.host:
|
||||
host = args.host
|
||||
logger.debug(f"server/host configuration set by argument to {host}")
|
||||
elif config_eos.server.host:
|
||||
host = config_eos.server.host
|
||||
else:
|
||||
host = get_default_host()
|
||||
# Ensure host from command line is in config settings
|
||||
config_eos.set_nested_value("server/host", host)
|
||||
|
||||
# Setup EOS server port
|
||||
if args.port:
|
||||
port = args.port
|
||||
logger.debug(f"server/port configuration set by argument to {port}")
|
||||
elif config_eos.server.port:
|
||||
port = config_eos.server.port
|
||||
else:
|
||||
port = 8503
|
||||
# Ensure port from command line is in config settings
|
||||
config_eos.set_nested_value("server/port", port)
|
||||
|
||||
# Setup EOSdash startup
|
||||
if args.startup_eosdash is not None:
|
||||
# Ensure startup_eosdash from command line is in config settings
|
||||
config_eos.set_nested_value("server/startup_eosdash", args.startup_eosdash)
|
||||
logger.debug(
|
||||
f"server/startup_eosdash configuration set by argument to {args.startup_eosdash}"
|
||||
)
|
||||
|
||||
if config_eos.server.startup_eosdash:
|
||||
# Ensure EOSdash host and port config settings are at least set to default values
|
||||
|
||||
# Setup EOS server host
|
||||
if config_eos.server.eosdash_host is None:
|
||||
config_eos.set_nested_value("server/eosdash_host", host)
|
||||
|
||||
# Setup EOS server host
|
||||
if config_eos.server.eosdash_port is None:
|
||||
config_eos.set_nested_value("server/eosdash_port", port + 1)
|
||||
|
||||
@@ -309,8 +309,14 @@ async def supervise_eosdash() -> None:
|
||||
config_eos = get_config()
|
||||
|
||||
# Skip if EOSdash not configured to start
|
||||
if not getattr(config_eos.server, "startup_eosdash", False):
|
||||
startup_eosdash = config_eos.server.startup_eosdash
|
||||
if not startup_eosdash:
|
||||
logger.debug(
|
||||
f"EOSdash subprocess not monitored - startup_eosdash not set: '{startup_eosdash}'"
|
||||
)
|
||||
return
|
||||
else:
|
||||
logger.debug(f"EOSdash subprocess monitored - startup_eosdash set: '{startup_eosdash}'")
|
||||
|
||||
host = config_eos.server.eosdash_host
|
||||
port = config_eos.server.eosdash_port
|
||||
|
||||
@@ -374,6 +374,31 @@ class ServerCommonSettings(SettingsBaseModel):
|
||||
],
|
||||
},
|
||||
)
|
||||
run_as_user: Optional[str] = Field(
|
||||
default=None,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"The name of the target user to switch to. If ``None`` (default), the current "
|
||||
"effective user is used and no privilege change is attempted."
|
||||
),
|
||||
"examples": [
|
||||
None,
|
||||
"user",
|
||||
],
|
||||
},
|
||||
)
|
||||
reload: Optional[bool] = Field(
|
||||
default=False,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Enable server auto-reload for debugging or development. Default is False. "
|
||||
"Monitors the package directory for changes and reloads the server."
|
||||
),
|
||||
"examples": [
|
||||
True,
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@field_validator("host", "eosdash_host", mode="before")
|
||||
def validate_server_host(cls, value: Optional[str]) -> Optional[str]:
|
||||
@@ -386,3 +411,13 @@ class ServerCommonSettings(SettingsBaseModel):
|
||||
if value is not None and not (1024 <= value <= 49151):
|
||||
raise ValueError("Server port number must be between 1024 and 49151.")
|
||||
return value
|
||||
|
||||
@field_validator("run_as_user")
|
||||
def validate_user(cls, value: Optional[str]) -> Optional[str]:
|
||||
if value is not None:
|
||||
# Resolve target user info
|
||||
try:
|
||||
pw_record = pwd.getpwnam(value)
|
||||
except KeyError:
|
||||
raise ValueError(f"User '{value}' does not exist.")
|
||||
return value
|
||||
|
||||
@@ -10,7 +10,7 @@ Features:
|
||||
- Convert durations from strings or numerics into `pendulum.Duration`.
|
||||
- Infer timezone from UTC offset or geolocation.
|
||||
- Support for custom output formats (ISO 8601, UTC normalized, or user-specified formats).
|
||||
- Makes pendulum types usable in pydantic models using `pydantic_extra_types.pendulum_dt`
|
||||
- Makes pendulum types usable in pydantic models using `pydantic_extra_types.pe ndulum_dt`
|
||||
and the `Time` class.
|
||||
|
||||
Types:
|
||||
@@ -19,7 +19,6 @@ Types:
|
||||
- `DateTime`: Pendulum's timezone-aware datetime type.
|
||||
- `Date`: Pendulum's date type.
|
||||
- `Duration`: Pendulum's representation of a time delta.
|
||||
- `TimeWindow`: Daily or specific date time window with optional localization support.
|
||||
|
||||
Functions:
|
||||
----------
|
||||
@@ -45,22 +44,15 @@ Usage Examples:
|
||||
See each function's docstring for detailed argument options and examples.
|
||||
"""
|
||||
|
||||
import calendar
|
||||
import datetime
|
||||
import re
|
||||
from typing import Any, Iterator, List, Literal, Optional, Tuple, Union, overload
|
||||
from typing import Any, List, Literal, Optional, Tuple, Union, overload
|
||||
|
||||
import pendulum
|
||||
from babel.dates import get_day_names
|
||||
from loguru import logger
|
||||
from pendulum.tz.timezone import Timezone
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
Field,
|
||||
GetCoreSchemaHandler,
|
||||
field_serializer,
|
||||
field_validator,
|
||||
model_validator,
|
||||
)
|
||||
from pydantic_core import core_schema
|
||||
from pydantic_extra_types.pendulum_dt import ( # make pendulum types pydantic
|
||||
@@ -830,627 +822,6 @@ def to_time(
|
||||
raise ValueError(f"Invalid time value: {value!r} of type: {type(value)}") from e
|
||||
|
||||
|
||||
class TimeWindow(BaseModel):
|
||||
"""Model defining a daily or specific date time window with optional localization support.
|
||||
|
||||
Represents a time interval starting at `start_time` and lasting for `duration`.
|
||||
Can restrict applicability to a specific day of the week or a specific calendar date.
|
||||
Supports day names in multiple languages via locale-aware parsing.
|
||||
"""
|
||||
|
||||
start_time: Time = Field(
|
||||
..., json_schema_extra={"description": "Start time of the time window (time of day)."}
|
||||
)
|
||||
duration: Duration = Field(
|
||||
...,
|
||||
json_schema_extra={
|
||||
"description": "Duration of the time window starting from `start_time`."
|
||||
},
|
||||
)
|
||||
day_of_week: Optional[Union[int, str]] = Field(
|
||||
default=None,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Optional day of the week restriction. "
|
||||
"Can be specified as integer (0=Monday to 6=Sunday) or localized weekday name. "
|
||||
"If None, applies every day unless `date` is set."
|
||||
)
|
||||
},
|
||||
)
|
||||
date: Optional[Date] = Field(
|
||||
default=None,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Optional specific calendar date for the time window. Overrides `day_of_week` if set."
|
||||
)
|
||||
},
|
||||
)
|
||||
locale: Optional[str] = Field(
|
||||
default=None,
|
||||
json_schema_extra={
|
||||
"description": (
|
||||
"Locale used to parse weekday names in `day_of_week` when given as string. "
|
||||
"If not set, Pendulum's default locale is used. "
|
||||
"Examples: 'en', 'de', 'fr', etc."
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@field_validator("duration", mode="before")
|
||||
@classmethod
|
||||
def transform_to_duration(cls, value: Any) -> Duration:
|
||||
"""Converts various duration formats into Duration.
|
||||
|
||||
Args:
|
||||
value: The value to convert to Duration.
|
||||
|
||||
Returns:
|
||||
Duration: The converted Duration object.
|
||||
"""
|
||||
return to_duration(value)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_day_of_week_with_locale(self) -> "TimeWindow":
|
||||
"""Validates and normalizes the `day_of_week` field using the specified locale.
|
||||
|
||||
This method supports both integer (0–6) and string inputs for `day_of_week`.
|
||||
String inputs are matched first against English weekday names (case-insensitive),
|
||||
and then against localized weekday names using the provided `locale`.
|
||||
|
||||
If a valid match is found, `day_of_week` is converted to its corresponding
|
||||
integer value (0 for Monday through 6 for Sunday).
|
||||
|
||||
Returns:
|
||||
TimeWindow: The validated instance with `day_of_week` normalized to an integer.
|
||||
|
||||
Raises:
|
||||
ValueError: If `day_of_week` is an invalid integer (not in 0–6),
|
||||
or an unrecognized string (not matching English or localized names),
|
||||
or of an unsupported type.
|
||||
"""
|
||||
if self.day_of_week is None:
|
||||
return self
|
||||
|
||||
if isinstance(self.day_of_week, int):
|
||||
if not 0 <= self.day_of_week <= 6:
|
||||
raise ValueError("day_of_week must be in 0 (Monday) to 6 (Sunday)")
|
||||
return self
|
||||
|
||||
if isinstance(self.day_of_week, str):
|
||||
# Try matching against English names first (lowercase)
|
||||
english_days = {name.lower(): i for i, name in enumerate(calendar.day_name)}
|
||||
lowercase_value = self.day_of_week.lower()
|
||||
if lowercase_value in english_days:
|
||||
self.day_of_week = english_days[lowercase_value]
|
||||
return self
|
||||
|
||||
# Try localized names
|
||||
if self.locale:
|
||||
localized_days = {
|
||||
get_day_names("wide", locale=self.locale)[i].lower(): i for i in range(7)
|
||||
}
|
||||
if lowercase_value in localized_days:
|
||||
self.day_of_week = localized_days[lowercase_value]
|
||||
return self
|
||||
|
||||
raise ValueError(
|
||||
f"Invalid weekday name '{self.day_of_week}' for locale '{self.locale}'. "
|
||||
f"Expected English names (monday–sunday) or localized names."
|
||||
)
|
||||
|
||||
raise ValueError(f"Invalid type for day_of_week: {type(self.day_of_week)}")
|
||||
|
||||
@field_serializer("duration")
|
||||
def serialize_duration(self, value: Duration) -> str:
|
||||
"""Serialize duration to string."""
|
||||
return str(value)
|
||||
|
||||
def _window_start_end(self, reference_date: DateTime) -> tuple[DateTime, DateTime]:
|
||||
"""Get the actual start and end datetimes for the time window on a given date.
|
||||
|
||||
This method computes the concrete start and end datetimes of the configured
|
||||
time window for a specific date, taking into account timezone information.
|
||||
|
||||
Handles timezone-aware and naive `DateTime` and `Time` objects:
|
||||
- If both `reference_date` and `start_time` have timezones but differ,
|
||||
`start_time` is converted to the timezone of `reference_date`.
|
||||
- If only one has a timezone, the other inherits it.
|
||||
- If both are naive, UTC is assumed for both.
|
||||
|
||||
Args:
|
||||
reference_date: The reference date on which to calculate the window.
|
||||
|
||||
Returns:
|
||||
tuple[DateTime, DateTime]: A tuple containing the start and end datetimes
|
||||
for the time window, both timezone-aware.
|
||||
"""
|
||||
ref_tz = reference_date.timezone
|
||||
start_tz = self.start_time.tzinfo
|
||||
|
||||
# --- Timezone resolution logic ---
|
||||
if ref_tz and start_tz:
|
||||
# Both aware: align start_time to reference_date's tz
|
||||
if ref_tz != start_tz:
|
||||
start_time = self.start_time.in_timezone(ref_tz)
|
||||
else:
|
||||
start_time = self.start_time
|
||||
elif ref_tz and not start_tz:
|
||||
# Only reference_date aware → assume same tz for time
|
||||
start_time = self.start_time.replace_timezone(ref_tz)
|
||||
elif not ref_tz and start_tz:
|
||||
# Only start_time aware → apply its tz to reference_date
|
||||
reference_date = reference_date.replace(tzinfo=start_tz)
|
||||
start_time = self.start_time
|
||||
else:
|
||||
# Both naive → default to UTC
|
||||
reference_date = reference_date.replace(tzinfo="UTC")
|
||||
start_time = self.start_time.replace_timezone("UTC")
|
||||
|
||||
# --- Build window start ---
|
||||
start = reference_date.replace(
|
||||
hour=start_time.hour,
|
||||
minute=start_time.minute,
|
||||
second=start_time.second,
|
||||
microsecond=start_time.microsecond,
|
||||
)
|
||||
|
||||
# --- Compute window end ---
|
||||
end = start + self.duration
|
||||
return start, end
|
||||
|
||||
def contains(self, date_time: DateTime, duration: Optional[Duration] = None) -> bool:
|
||||
"""Check whether a datetime (and optional duration) fits within the time window.
|
||||
|
||||
This method checks if a given datetime `date_time` lies within the start time and duration
|
||||
defined by the `TimeWindow`. If `duration` is provided, it also ensures that
|
||||
the full duration starting at `date_time` ends before or at the end of the time window.
|
||||
|
||||
Handles timezone-aware and naive datetimes:
|
||||
- If both `date_time` and `start_time` are timezone-aware but differ → align `start_time`
|
||||
to `date_time`’s timezone.
|
||||
- If only one has a timezone → assign it to the other.
|
||||
- If both are naive → assume UTC for both.
|
||||
|
||||
If `day_of_week` or `date` are specified in the time window, the method will also
|
||||
ensure that `date_time` falls on the correct day or matches the exact date.
|
||||
|
||||
Args:
|
||||
date_time: The datetime to test.
|
||||
duration: An optional duration that must fit entirely within the time window
|
||||
starting from `date_time`.
|
||||
|
||||
Returns:
|
||||
bool: True if the datetime (and optional duration) is fully contained in the
|
||||
time window, False otherwise.
|
||||
"""
|
||||
start_time = self.start_time # work on a local copy to avoid mutating self
|
||||
start_tz = getattr(start_time, "tzinfo", None)
|
||||
ref_tz = date_time.timezone
|
||||
|
||||
# --- Handle timezone logic ---
|
||||
if ref_tz and start_tz:
|
||||
# Both aware but different → align start_time to date_time's timezone
|
||||
if ref_tz != start_tz:
|
||||
start_time = start_time.in_timezone(ref_tz)
|
||||
elif ref_tz and not start_tz:
|
||||
# Only date_time aware → assign its timezone to start_time
|
||||
start_time = start_time.replace_timezone(ref_tz)
|
||||
elif not ref_tz and start_tz:
|
||||
# Only start_time aware → assign its timezone to date_time
|
||||
date_time = date_time.replace(tzinfo=start_tz)
|
||||
else:
|
||||
# Both naive → assume UTC
|
||||
date_time = date_time.replace(tzinfo="UTC")
|
||||
start_time = start_time.replace_timezone("UTC")
|
||||
|
||||
# --- Date and weekday constraints ---
|
||||
if self.date and date_time.date() != self.date:
|
||||
return False
|
||||
|
||||
if self.day_of_week is not None and date_time.day_of_week != self.day_of_week:
|
||||
return False
|
||||
|
||||
# --- Compute window start and end for this date ---
|
||||
start, end = self._window_start_end(date_time)
|
||||
|
||||
# --- Check containment ---
|
||||
if not (start <= date_time < end):
|
||||
return False
|
||||
|
||||
if duration is not None:
|
||||
date_time_end = date_time + duration
|
||||
return date_time_end <= end
|
||||
|
||||
return True
|
||||
|
||||
def earliest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the earliest datetime that allows a duration to fit within the time window.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within the window.
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The earliest start time for the duration, or None if it doesn't fit.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
# Check if the reference date matches our constraints
|
||||
if self.date and reference_date.date() != self.date:
|
||||
return None
|
||||
|
||||
if self.day_of_week is not None and reference_date.day_of_week != self.day_of_week:
|
||||
return None
|
||||
|
||||
# Check if the duration can fit within the time window
|
||||
if duration > self.duration:
|
||||
return None
|
||||
|
||||
window_start, window_end = self._window_start_end(reference_date)
|
||||
|
||||
# The earliest start time is simply the window start time
|
||||
return window_start
|
||||
|
||||
def latest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the latest datetime that allows a duration to fit within the time window.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within the window.
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The latest start time for the duration, or None if it doesn't fit.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
# Check if the reference date matches our constraints
|
||||
if self.date and reference_date.date() != self.date:
|
||||
return None
|
||||
|
||||
if self.day_of_week is not None and reference_date.day_of_week != self.day_of_week:
|
||||
return None
|
||||
|
||||
# Check if the duration can fit within the time window
|
||||
if duration > self.duration:
|
||||
return None
|
||||
|
||||
window_start, window_end = self._window_start_end(reference_date)
|
||||
|
||||
# The latest start time is the window end minus the duration
|
||||
latest_start = window_end - duration
|
||||
|
||||
# Ensure the latest start time is not before the window start
|
||||
if latest_start < window_start:
|
||||
return None
|
||||
|
||||
return latest_start
|
||||
|
||||
def can_fit_duration(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> bool:
|
||||
"""Check if a duration can fit within the time window on a given date.
|
||||
|
||||
Args:
|
||||
duration: The duration to check.
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
bool: True if the duration can fit, False otherwise.
|
||||
"""
|
||||
return self.earliest_start_time(duration, reference_date) is not None
|
||||
|
||||
def available_duration(self, reference_date: Optional[DateTime] = None) -> Optional[Duration]:
|
||||
"""Get the total available duration for the time window on a given date.
|
||||
|
||||
Args:
|
||||
reference_date: The date to check for the time window. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The available duration, or None if the date doesn't match constraints.
|
||||
"""
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
if self.date and reference_date.date() != self.date:
|
||||
return None
|
||||
|
||||
if self.day_of_week is not None and reference_date.day_of_week != self.day_of_week:
|
||||
return None
|
||||
|
||||
return self.duration
|
||||
|
||||
|
||||
class TimeWindowSequence(BaseModel):
|
||||
"""Model representing a sequence of time windows with collective operations.
|
||||
|
||||
Manages multiple TimeWindow objects and provides methods to work with them
|
||||
as a cohesive unit for scheduling and availability checking.
|
||||
"""
|
||||
|
||||
windows: Optional[list[TimeWindow]] = Field(
|
||||
default_factory=list,
|
||||
json_schema_extra={"description": "List of TimeWindow objects that make up this sequence."},
|
||||
)
|
||||
|
||||
@field_validator("windows")
|
||||
@classmethod
|
||||
def validate_windows(cls, v: Optional[list[TimeWindow]]) -> list[TimeWindow]:
|
||||
"""Validate windows and convert None to empty list."""
|
||||
if v is None:
|
||||
return []
|
||||
return v
|
||||
|
||||
def model_post_init(self, __context: Any) -> None:
|
||||
"""Ensure windows is always a list after initialization."""
|
||||
if self.windows is None:
|
||||
self.windows = []
|
||||
|
||||
def __iter__(self) -> Iterator[TimeWindow]:
|
||||
"""Allow iteration over the time windows."""
|
||||
return iter(self.windows or [])
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the number of time windows in the sequence."""
|
||||
return len(self.windows or [])
|
||||
|
||||
def __getitem__(self, index: int) -> TimeWindow:
|
||||
"""Allow indexing into the time windows."""
|
||||
if not self.windows:
|
||||
raise IndexError("list index out of range")
|
||||
return self.windows[index]
|
||||
|
||||
def contains(self, date_time: DateTime, duration: Optional[Duration] = None) -> bool:
|
||||
"""Check if any time window in the sequence contains the given datetime and duration.
|
||||
|
||||
Args:
|
||||
date_time: The datetime to test.
|
||||
duration: An optional duration that must fit entirely within one of the time windows.
|
||||
|
||||
Returns:
|
||||
bool: True if any time window contains the datetime (and optional duration), False if no windows.
|
||||
"""
|
||||
if not self.windows:
|
||||
return False
|
||||
return any(window.contains(date_time, duration) for window in self.windows)
|
||||
|
||||
def earliest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the earliest datetime across all windows that allows a duration to fit.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within a window.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The earliest start time across all windows, or None if no window can fit the duration.
|
||||
"""
|
||||
if not self.windows:
|
||||
return None
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
earliest_times = []
|
||||
|
||||
for window in self.windows:
|
||||
earliest = window.earliest_start_time(duration, reference_date)
|
||||
if earliest is not None:
|
||||
earliest_times.append(earliest)
|
||||
|
||||
return min(earliest_times) if earliest_times else None
|
||||
|
||||
def latest_start_time(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> Optional[DateTime]:
|
||||
"""Get the latest datetime across all windows that allows a duration to fit.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit within a window.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The latest start time across all windows, or None if no window can fit the duration.
|
||||
"""
|
||||
if not self.windows:
|
||||
return None
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
latest_times = []
|
||||
|
||||
for window in self.windows:
|
||||
latest = window.latest_start_time(duration, reference_date)
|
||||
if latest is not None:
|
||||
latest_times.append(latest)
|
||||
|
||||
return max(latest_times) if latest_times else None
|
||||
|
||||
def can_fit_duration(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> bool:
|
||||
"""Check if the duration can fit within any time window in the sequence.
|
||||
|
||||
Args:
|
||||
duration: The duration to check.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
bool: True if any window can fit the duration, False if no windows.
|
||||
"""
|
||||
if not self.windows:
|
||||
return False
|
||||
|
||||
return any(window.can_fit_duration(duration, reference_date) for window in self.windows)
|
||||
|
||||
def available_duration(self, reference_date: Optional[DateTime] = None) -> Optional[Duration]:
|
||||
"""Get the total available duration across all applicable windows.
|
||||
|
||||
Args:
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
The sum of available durations from all applicable windows, or None if no windows apply.
|
||||
"""
|
||||
if not self.windows:
|
||||
return None
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
total_duration = Duration()
|
||||
has_applicable_windows = False
|
||||
|
||||
for window in self.windows:
|
||||
window_duration = window.available_duration(reference_date)
|
||||
if window_duration is not None:
|
||||
total_duration += window_duration
|
||||
has_applicable_windows = True
|
||||
|
||||
return total_duration if has_applicable_windows else None
|
||||
|
||||
def get_applicable_windows(self, reference_date: Optional[DateTime] = None) -> list[TimeWindow]:
|
||||
"""Get all windows that apply to the given reference date.
|
||||
|
||||
Args:
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
List of TimeWindow objects that apply to the reference date.
|
||||
"""
|
||||
if not self.windows:
|
||||
return []
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
applicable_windows = []
|
||||
|
||||
for window in self.windows:
|
||||
if window.available_duration(reference_date) is not None:
|
||||
applicable_windows.append(window)
|
||||
|
||||
return applicable_windows
|
||||
|
||||
def find_windows_for_duration(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> list[TimeWindow]:
|
||||
"""Find all windows that can accommodate the given duration.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
List of TimeWindow objects that can fit the duration.
|
||||
"""
|
||||
if not self.windows:
|
||||
return []
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
fitting_windows = []
|
||||
|
||||
for window in self.windows:
|
||||
if window.can_fit_duration(duration, reference_date):
|
||||
fitting_windows.append(window)
|
||||
|
||||
return fitting_windows
|
||||
|
||||
def get_all_possible_start_times(
|
||||
self, duration: Duration, reference_date: Optional[DateTime] = None
|
||||
) -> list[tuple[DateTime, DateTime, TimeWindow]]:
|
||||
"""Get all possible start time ranges for a duration across all windows.
|
||||
|
||||
Args:
|
||||
duration: The duration that needs to fit.
|
||||
reference_date: The date to check for the time windows. Defaults to today.
|
||||
|
||||
Returns:
|
||||
List of tuples containing (earliest_start, latest_start, window) for each
|
||||
window that can accommodate the duration.
|
||||
"""
|
||||
if not self.windows:
|
||||
return []
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
possible_times = []
|
||||
|
||||
for window in self.windows:
|
||||
earliest = window.earliest_start_time(duration, reference_date)
|
||||
latest = window.latest_start_time(duration, reference_date)
|
||||
|
||||
if earliest is not None and latest is not None:
|
||||
possible_times.append((earliest, latest, window))
|
||||
|
||||
return possible_times
|
||||
|
||||
def add_window(self, window: TimeWindow) -> None:
|
||||
"""Add a new time window to the sequence.
|
||||
|
||||
Args:
|
||||
window: The TimeWindow to add.
|
||||
"""
|
||||
if self.windows is None:
|
||||
self.windows = []
|
||||
self.windows.append(window)
|
||||
|
||||
def remove_window(self, index: int) -> TimeWindow:
|
||||
"""Remove a time window from the sequence by index.
|
||||
|
||||
Args:
|
||||
index: The index of the window to remove.
|
||||
|
||||
Returns:
|
||||
The removed TimeWindow.
|
||||
|
||||
Raises:
|
||||
IndexError: If the index is out of range.
|
||||
"""
|
||||
if not self.windows:
|
||||
raise IndexError("pop from empty list")
|
||||
return self.windows.pop(index)
|
||||
|
||||
def clear_windows(self) -> None:
|
||||
"""Remove all windows from the sequence."""
|
||||
if self.windows is not None:
|
||||
self.windows.clear()
|
||||
|
||||
def sort_windows_by_start_time(self, reference_date: Optional[DateTime] = None) -> None:
|
||||
"""Sort the windows by their start time on the given reference date.
|
||||
|
||||
Windows that don't apply to the reference date are placed at the end.
|
||||
|
||||
Args:
|
||||
reference_date: The date to use for sorting. Defaults to today.
|
||||
"""
|
||||
if not self.windows:
|
||||
return
|
||||
|
||||
if reference_date is None:
|
||||
reference_date = pendulum.today()
|
||||
|
||||
def sort_key(window: TimeWindow) -> tuple[int, DateTime]:
|
||||
"""Sort key: (priority, start_time) where priority 0 = applicable, 1 = not applicable."""
|
||||
start_time = window.earliest_start_time(Duration(), reference_date)
|
||||
if start_time is None:
|
||||
# Non-applicable windows get a high priority (sorted last) and a dummy time
|
||||
return (1, reference_date)
|
||||
return (0, start_time)
|
||||
|
||||
self.windows.sort(key=sort_key)
|
||||
|
||||
|
||||
@overload
|
||||
def to_datetime(
|
||||
date_input: Optional[Any] = None,
|
||||
@@ -1782,7 +1153,7 @@ def to_duration(
|
||||
elif isinstance(input_value, str):
|
||||
# first try pendulum.parse
|
||||
try:
|
||||
parsed = pendulum.parse(input_value)
|
||||
parsed = pendulum.parse(input_value, strict=False)
|
||||
if isinstance(parsed, pendulum.Duration):
|
||||
duration = parsed # Already a duration
|
||||
else:
|
||||
|
||||
Reference in New Issue
Block a user