mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2026-01-01 00:06:18 +00:00
feat: add Home Assistant and NodeRED adapters (#764)
Adapters for Home Assistant and NodeRED integration are added. Akkudoktor-EOS can now be run as Home Assistant add-on and standalone. As Home Assistant add-on EOS uses ingress to fully integrate the EOSdash dashboard in Home Assistant. The fix includes several bug fixes that are not directly related to the adapter implementation but are necessary to keep EOS running properly and to test and document the changes. * fix: development version scheme The development versioning scheme is adaptet to fit to docker and home assistant expectations. The new scheme is x.y.z and x.y.z.dev<hash>. Hash is only digits as expected by home assistant. Development version is appended by .dev as expected by docker. * fix: use mean value in interval on resampling for array When downsampling data use the mean value of all values within the new sampling interval. * fix: default battery ev soc and appliance wh Make the genetic simulation return default values for the battery SoC, electric vehicle SoC and appliance load if these assets are not used. * fix: import json string Strip outer quotes from JSON strings on import to be compliant to json.loads() expectation. * fix: default interval definition for import data Default interval must be defined in lowercase human definition to be accepted by pendulum. * fix: clearoutside schema change * feat: add adapters for integrations Adapters for Home Assistant and NodeRED integration are added. Akkudoktor-EOS can now be run as Home Assistant add-on and standalone. As Home Assistant add-on EOS uses ingress to fully integrate the EOSdash dashboard in Home Assistant. * feat: allow eos to be started with root permissions and drop priviledges Home assistant starts all add-ons with root permissions. Eos now drops root permissions if an applicable user is defined by paramter --run_as_user. The docker image defines the user eos to be used. * feat: make eos supervise and monitor EOSdash Eos now not only starts EOSdash but also monitors EOSdash during runtime and restarts EOSdash on fault. EOSdash logging is captured by EOS and forwarded to the EOS log to provide better visibility. * feat: add duration to string conversion Make to_duration to also return the duration as string on request. * chore: Use info logging to report missing optimization parameters In parameter preparation for automatic optimization an error was logged for missing paramters. Log is now down using the info level. * chore: make EOSdash use the EOS data directory for file import/ export EOSdash use the EOS data directory for file import/ export by default. This allows to use the configuration import/ export function also within docker images. * chore: improve EOSdash config tab display Improve display of JSON code and add more forms for config value update. * chore: make docker image file system layout similar to home assistant Only use /data directory for persistent data. This is handled as a docker volume. The /data volume is mapped to ~/.local/share/net.akkudoktor.eos if using docker compose. * chore: add home assistant add-on development environment Add VSCode devcontainer and task definition for home assistant add-on development. * chore: improve documentation
This commit is contained in:
39
.devcontainer/devcontainer.json
Normal file
39
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"name": "Devcontainer for Akkudoktor-EOS add-on repository",
|
||||||
|
"image": "ghcr.io/home-assistant/devcontainer:2-addons",
|
||||||
|
"appPort": ["7123:8123", "7357:4357", "8503:8503", "8504:8504"],
|
||||||
|
"remoteUser": "root",
|
||||||
|
"postStartCommand": "bash -c 'echo \"127.0.0.1 $(hostname)\" >> /etc/hosts' && bash devcontainer_bootstrap",
|
||||||
|
"runArgs": [
|
||||||
|
"-e",
|
||||||
|
"GIT_EDITOR=code --wait",
|
||||||
|
"--privileged",
|
||||||
|
"--hostname=homeassistant"
|
||||||
|
],
|
||||||
|
"containerEnv": {
|
||||||
|
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||||
|
},
|
||||||
|
"workspaceFolder": "/mnt/supervisor/addons/local/${localWorkspaceFolderBasename}",
|
||||||
|
"workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached",
|
||||||
|
"customizations": {
|
||||||
|
"vscode": {
|
||||||
|
"extensions": ["timonwong.shellcheck", "esbenp.prettier-vscode"],
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.profiles.linux": {
|
||||||
|
"zsh": {
|
||||||
|
"path": "/usr/bin/zsh"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||||
|
"editor.formatOnPaste": false,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnType": true,
|
||||||
|
"files.trimTrailingWhitespace": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mounts": [
|
||||||
|
"type=volume,target=/var/lib/docker",
|
||||||
|
"type=volume,target=/mnt/supervisor"
|
||||||
|
]
|
||||||
|
}
|
||||||
22
.env
22
.env
@@ -1,5 +1,21 @@
|
|||||||
EOS_VERSION=main
|
# Default environment settings
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Docker Compose defaults
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# Host data directory for EOS (Linux / macOS)
|
||||||
|
# Can be overridden by environment variables (e.g. PowerShell on Windows)
|
||||||
|
DOCKER_COMPOSE_DATA_DIR=${HOME}/.local/share/net.akkudoktor.eos
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Image / build
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
VERSION=0.2.0.dev70048701
|
||||||
|
PYTHON_VERSION=3.13.9
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Ports
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
EOS_SERVER__PORT=8503
|
EOS_SERVER__PORT=8503
|
||||||
EOS_SERVER__EOSDASH_PORT=8504
|
EOS_SERVER__EOSDASH_PORT=8504
|
||||||
|
|
||||||
PYTHON_VERSION=3.12.6
|
|
||||||
|
|||||||
4
.github/workflows/bump-version.yml
vendored
4
.github/workflows/bump-version.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
|||||||
- name: Update files and commit
|
- name: Update files and commit
|
||||||
run: |
|
run: |
|
||||||
# Define files to update
|
# Define files to update
|
||||||
UPDATE_FILES="haaddon/config.yaml"
|
UPDATE_FILES="config.yaml .env"
|
||||||
|
|
||||||
# Call general Python version replacement script
|
# Call general Python version replacement script
|
||||||
python scripts/update_version.py "${{ steps.calc.outputs.version }}" $UPDATE_FILES
|
python scripts/update_version.py "${{ steps.calc.outputs.version }}" $UPDATE_FILES
|
||||||
@@ -62,6 +62,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
git commit -m "chore: bump version to ${{ steps.calc.outputs.version }}"
|
git commit -m "chore: bump version to ${{ steps.calc.outputs.version }}"
|
||||||
git push
|
git push
|
||||||
|
fi
|
||||||
|
|
||||||
# --- Step 6: Create release tag ---
|
# --- Step 6: Create release tag ---
|
||||||
- name: Create release tag if it does not exist
|
- name: Create release tag if it does not exist
|
||||||
@@ -97,3 +98,4 @@ jobs:
|
|||||||
else
|
else
|
||||||
git commit -m "chore: bump dev version to ${VERSION_BASE}"
|
git commit -m "chore: bump dev version to ${VERSION_BASE}"
|
||||||
git push
|
git push
|
||||||
|
fi
|
||||||
|
|||||||
58
.vscode/tasks.json
vendored
Normal file
58
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "Start Home Assistant",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisor_run",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Start Addon",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "ha addons stop \"local_${input:addonName}\"; ha addons start \"local_${input:addonName}\"; docker logs --follow \"addon_local_${input:addonName}\"",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": false
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": [],
|
||||||
|
"runOptions": {
|
||||||
|
"reevaluateOnRerun": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Rebuild and Start Addon",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "ha addons rebuild \"local_${input:addonName}\"; ha addons start \"local_${input:addonName}\"; docker logs --follow \"addon_local_${input:addonName}\"",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": false
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"id": "addonName",
|
||||||
|
"type": "pickString",
|
||||||
|
"description": "Name of addon (to add your addon to this list, please edit .vscode/tasks.json)",
|
||||||
|
"options": ["eos"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
98
CHANGELOG.md
98
CHANGELOG.md
@@ -5,6 +5,104 @@ All notable changes to the akkudoktoreos project will be documented in this file
|
|||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## 0.3.0 (2025-12-??)
|
||||||
|
|
||||||
|
Adapters for Home Assistant and NodeRed integration are added. These adapters
|
||||||
|
provide a simplified interface to these HEMS besides the standard REST interface.
|
||||||
|
Akkudoktor-EOS can now be run as Home Assistant add-on and standalone.
|
||||||
|
As Home Assistant add-on EOS uses ingress to fully integrate the EOSdash dashboard
|
||||||
|
in Home Assistant.
|
||||||
|
|
||||||
|
In addition, bugs were fixed and new features were added.
|
||||||
|
|
||||||
|
### Feat
|
||||||
|
|
||||||
|
- add adapters for integrations
|
||||||
|
|
||||||
|
Adapters for Home Assistant and NodeRED integration are added.
|
||||||
|
Akkudoktor-EOS can now be run as Home Assistant add-on and standalone.
|
||||||
|
|
||||||
|
As Home Assistant add-on EOS uses ingress to fully integrate the EOSdash dashboard
|
||||||
|
in Home Assistant.
|
||||||
|
|
||||||
|
- allow eos to be started with root permissions and drop priviledges
|
||||||
|
|
||||||
|
Home assistant starts all add-ons with root permissions. Eos now drops
|
||||||
|
root permissions if an applicable user is defined by paramter --run_as_user.
|
||||||
|
The docker image defines the user eos to be used.
|
||||||
|
|
||||||
|
- make eos supervise and monitor EOSdash
|
||||||
|
|
||||||
|
Eos now not only starts EOSdash but also monitors EOSdash during runtime
|
||||||
|
and restarts EOSdash on fault. EOSdash logging is captured by EOS
|
||||||
|
and forwarded to the EOS log to provide better visibility.
|
||||||
|
|
||||||
|
- add duration to string conversion
|
||||||
|
|
||||||
|
Make to_duration to also return the duration as string on request.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- development version scheme
|
||||||
|
|
||||||
|
The development versioning scheme is adaptet to fit to docker and
|
||||||
|
home assistant expectations. The new scheme is x.y.z and x.y.z.dev<hash>.
|
||||||
|
Hash is only digits as expected by home assistant. Development version
|
||||||
|
is appended by .dev as expected by docker.
|
||||||
|
|
||||||
|
- use mean value in interval on resampling for array
|
||||||
|
|
||||||
|
When downsampling data use the mean value of all values within the new
|
||||||
|
sampling interval.
|
||||||
|
|
||||||
|
- default battery ev soc and appliance wh
|
||||||
|
|
||||||
|
Make the genetic simulation return default values for the
|
||||||
|
battery SoC, electric vehicle SoC and appliance load if these
|
||||||
|
assets are not used.
|
||||||
|
|
||||||
|
- import json string
|
||||||
|
|
||||||
|
Strip outer quotes from JSON strings on import to be compliant to json.loads()
|
||||||
|
expectation.
|
||||||
|
|
||||||
|
- default interval definition for import data
|
||||||
|
|
||||||
|
Default interval must be defined in lowercase human definition to
|
||||||
|
be accepted by pendulum.
|
||||||
|
|
||||||
|
- clearoutside schema change
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- Use info logging to report missing optimization parameters
|
||||||
|
|
||||||
|
In parameter preparation for automatic optimization an error was logged for missing paramters.
|
||||||
|
Log is now down using the info level.
|
||||||
|
|
||||||
|
- make EOSdash use the EOS data directory for file import/ export
|
||||||
|
|
||||||
|
EOSdash use the EOS data directory for file import/ export by default.
|
||||||
|
This allows to use the configuration import/ export function also
|
||||||
|
within docker images.
|
||||||
|
|
||||||
|
- improve EOSdash config tab display
|
||||||
|
|
||||||
|
Improve display of JSON code and add more forms for config value update.
|
||||||
|
|
||||||
|
- make docker image file system layout similar to home assistant
|
||||||
|
|
||||||
|
Only use /data directory for persistent data. This is handled as a
|
||||||
|
docker volume. The /data volume is mapped to ~/.local/share/net.akkudoktor.eos
|
||||||
|
if using docker compose.
|
||||||
|
|
||||||
|
- add home assistant add-on development environment
|
||||||
|
|
||||||
|
Add VSCode devcontainer and task definition for home assistant add-on
|
||||||
|
development.
|
||||||
|
|
||||||
|
- improve documentation
|
||||||
|
|
||||||
## 0.2.0 (2025-11-09)
|
## 0.2.0 (2025-11-09)
|
||||||
|
|
||||||
The most important new feature is **automatic optimization**.
|
The most important new feature is **automatic optimization**.
|
||||||
|
|||||||
28
DOCS.md
Normal file
28
DOCS.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
<!--
|
||||||
|
DOCS.md is used by Home Assistant for the Add-on Store documentation.
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Akkudoktor-EOS – Home Assistant Add-on Documentation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
**Akkudoktor-EOS** is a Home Assistant add-on that optimizes household energy flows such as battery storage, photovoltaic (PV) generation, grid usage, and controllable loads.
|
||||||
|
|
||||||
|
The add-on is designed to be *practical and user-focused*: once configured, it runs automatically in the background and integrates seamlessly with Home Assistant. Advanced optimization logic and simulations are handled internally — no programming required.
|
||||||
|
|
||||||
|
EOS is especially suited for users who:
|
||||||
|
|
||||||
|
* Have a home battery and/or PV system
|
||||||
|
* Want to use forecasts to make smarter energy decisions
|
||||||
|
* Prefer a transparent, configurable, and locally running solution
|
||||||
|
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
* 🔋 Battery and storage optimization
|
||||||
|
* ☀️ PV forecast integration
|
||||||
|
* ⚡ Load and power profile optimization
|
||||||
|
* 🧠 Forecast-based optimization
|
||||||
|
* 🧩 Modular simulation and controller architecture
|
||||||
|
* 🏠 Native Home Assistant add-on integration
|
||||||
|
* 📊 Designed for extensibility with custom energy devices
|
||||||
86
Dockerfile
86
Dockerfile
@@ -1,17 +1,29 @@
|
|||||||
# syntax=docker/dockerfile:1.7
|
# syntax=docker/dockerfile:1.7
|
||||||
# Dockerfile
|
# Dockerfile
|
||||||
|
|
||||||
# Set base image first
|
# Support both Home Assistant builds and standalone builds
|
||||||
|
# Only Debian based images are supported (no Alpine)
|
||||||
|
ARG BUILD_FROM
|
||||||
ARG PYTHON_VERSION=3.13.9
|
ARG PYTHON_VERSION=3.13.9
|
||||||
FROM python:${PYTHON_VERSION}-slim
|
|
||||||
|
|
||||||
LABEL source="https://github.com/Akkudoktor-EOS/EOS"
|
# If BUILD_FROM is set (Home Assistant), use it; otherwise use python-slim
|
||||||
|
FROM ${BUILD_FROM:-python:${PYTHON_VERSION}-slim}
|
||||||
|
|
||||||
|
LABEL \
|
||||||
|
io.hass.version="VERSION" \
|
||||||
|
io.hass.type="addon" \
|
||||||
|
io.hass.arch="aarch64|amd64" \
|
||||||
|
source="https://github.com/Akkudoktor-EOS/EOS"
|
||||||
|
|
||||||
ENV MPLCONFIGDIR="/tmp/mplconfigdir"
|
|
||||||
ENV EOS_DIR="/opt/eos"
|
ENV EOS_DIR="/opt/eos"
|
||||||
ENV EOS_CACHE_DIR="${EOS_DIR}/cache"
|
# Create persistent data directory similar to home assistant add-on
|
||||||
ENV EOS_OUTPUT_DIR="${EOS_DIR}/output"
|
# - EOS_DATA_DIR: Persistent data directory
|
||||||
ENV EOS_CONFIG_DIR="${EOS_DIR}/config"
|
# - MPLCONFIGDIR: user customizations to Mathplotlib
|
||||||
|
ENV EOS_DATA_DIR="/data"
|
||||||
|
ENV EOS_CACHE_DIR="${EOS_DATA_DIR}/cache"
|
||||||
|
ENV EOS_OUTPUT_DIR="${EOS_DATA_DIR}/output"
|
||||||
|
ENV EOS_CONFIG_DIR="${EOS_DATA_DIR}/config"
|
||||||
|
ENV MPLCONFIGDIR="${EOS_DATA_DIR}/mplconfigdir"
|
||||||
|
|
||||||
# Overwrite when starting the container in a production environment
|
# Overwrite when starting the container in a production environment
|
||||||
ENV EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
ENV EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
||||||
@@ -23,45 +35,65 @@ ENV MKL_NUM_THREADS=1
|
|||||||
ENV PIP_PROGRESS_BAR=off
|
ENV PIP_PROGRESS_BAR=off
|
||||||
ENV PIP_NO_COLOR=1
|
ENV PIP_NO_COLOR=1
|
||||||
|
|
||||||
|
# Generic environment
|
||||||
|
ENV LANG=C.UTF-8
|
||||||
|
ENV VENV_PATH=/opt/venv
|
||||||
|
# - Use .venv for python commands
|
||||||
|
ENV PATH="$VENV_PATH/bin:$PATH"
|
||||||
|
|
||||||
WORKDIR ${EOS_DIR}
|
WORKDIR ${EOS_DIR}
|
||||||
|
|
||||||
RUN adduser --system --group --no-create-home eos \
|
# Create eos user and data directories with eos user permissions
|
||||||
&& mkdir -p "${MPLCONFIGDIR}" \
|
RUN apt-get update && apt-get install -y --no-install-recommends adduser \
|
||||||
&& chown eos "${MPLCONFIGDIR}" \
|
&& adduser --system --group --no-create-home eos \
|
||||||
&& mkdir -p "${EOS_CACHE_DIR}" \
|
&& mkdir -p "${EOS_DATA_DIR}" \
|
||||||
&& chown eos "${EOS_CACHE_DIR}" \
|
&& chown -R eos:eos "${EOS_DATA_DIR}" \
|
||||||
&& mkdir -p "${EOS_OUTPUT_DIR}" \
|
&& mkdir -p "${EOS_CACHE_DIR}" "${EOS_OUTPUT_DIR}" "${EOS_CONFIG_DIR}" "${MPLCONFIGDIR}" \
|
||||||
&& chown eos "${EOS_OUTPUT_DIR}" \
|
&& chown -R eos:eos "${EOS_CACHE_DIR}" "${EOS_OUTPUT_DIR}" "${EOS_CONFIG_DIR}" "${MPLCONFIGDIR}"
|
||||||
&& mkdir -p "${EOS_CONFIG_DIR}" \
|
|
||||||
&& chown eos "${EOS_CONFIG_DIR}"
|
|
||||||
|
|
||||||
# Install requirements
|
# Install build dependencies (Debian)
|
||||||
|
# - System deps
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
python3-venv \
|
||||||
|
gcc g++ gfortran \
|
||||||
|
libopenblas-dev liblapack-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# - Create venv
|
||||||
|
RUN python3 -m venv ${VENV_PATH}
|
||||||
|
|
||||||
|
# - Upgrade pip inside venv
|
||||||
|
RUN pip install --upgrade pip setuptools wheel
|
||||||
|
|
||||||
|
# - Install deps
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Copy source
|
# Install EOS/ EOSdash
|
||||||
|
# - Copy source
|
||||||
COPY src/ ./src
|
COPY src/ ./src
|
||||||
COPY pyproject.toml .
|
COPY pyproject.toml .
|
||||||
|
|
||||||
# Create version information
|
# - Create version information
|
||||||
COPY scripts/get_version.py ./scripts/get_version.py
|
COPY scripts/get_version.py ./scripts/get_version.py
|
||||||
RUN python scripts/get_version.py > ./version.txt
|
RUN python scripts/get_version.py > ./version.txt
|
||||||
RUN rm ./scripts/get_version.py
|
RUN rm ./scripts/get_version.py
|
||||||
|
|
||||||
RUN echo "Building Akkudoktor-EOS with Python $PYTHON_VERSION"
|
RUN echo "Building Akkudoktor-EOS with Python $PYTHON_VERSION"
|
||||||
|
|
||||||
# Install akkudoktoreos package in editable form (-e)
|
# - Install akkudoktoreos package in editable form (-e)
|
||||||
# pyproject-toml will read the version from version.txt
|
# - pyproject-toml will read the version from version.txt
|
||||||
RUN pip install --no-cache-dir -e .
|
RUN pip install --no-cache-dir -e .
|
||||||
|
|
||||||
USER eos
|
|
||||||
ENTRYPOINT []
|
ENTRYPOINT []
|
||||||
|
|
||||||
EXPOSE 8503
|
|
||||||
EXPOSE 8504
|
EXPOSE 8504
|
||||||
|
EXPOSE 8503
|
||||||
|
|
||||||
# Ensure EOS and EOSdash bind to 0.0.0.0
|
# Ensure EOS and EOSdash bind to 0.0.0.0
|
||||||
CMD ["python", "-m", "akkudoktoreos.server.eos", "--host", "0.0.0.0"]
|
# EOS is started with root provileges. EOS will drop root proviledges and switch to user eos.
|
||||||
|
CMD ["python", "-m", "akkudoktoreos.server.eos", "--host", "0.0.0.0", "--run_as_user", "eos"]
|
||||||
|
|
||||||
VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"]
|
# Persistent data
|
||||||
|
# (Not recognized by home assistant add-on management, but there we have /data anyway)
|
||||||
|
VOLUME ["${EOS_DATA_DIR}"]
|
||||||
|
|||||||
60
Makefile
60
Makefile
@@ -10,30 +10,30 @@ all: help
|
|||||||
# Target to display help information
|
# Target to display help information
|
||||||
help:
|
help:
|
||||||
@echo "Available targets:"
|
@echo "Available targets:"
|
||||||
@echo " venv - Set up a Python 3 virtual environment."
|
@echo " venv - Set up a Python 3 virtual environment."
|
||||||
@echo " pip - Install dependencies from requirements.txt."
|
@echo " pip - Install dependencies from requirements.txt."
|
||||||
@echo " pip-dev - Install dependencies from requirements-dev.txt."
|
@echo " pip-dev - Install dependencies from requirements-dev.txt."
|
||||||
@echo " format - Format source code."
|
@echo " format - Format source code."
|
||||||
@echo " gitlint - Lint last commit message."
|
@echo " gitlint - Lint last commit message."
|
||||||
@echo " mypy - Run mypy."
|
@echo " mypy - Run mypy."
|
||||||
@echo " install - Install EOS in editable form (development mode) into virtual environment."
|
@echo " install - Install EOS in editable form (development mode) into virtual environment."
|
||||||
@echo " docker-run - Run entire setup on docker"
|
@echo " docker-run - Run entire setup on docker"
|
||||||
@echo " docker-build - Rebuild docker image"
|
@echo " docker-build - Rebuild docker image"
|
||||||
@echo " docs - Generate HTML documentation (in build/docs/html/)."
|
@echo " docs - Generate HTML documentation (in build/docs/html/)."
|
||||||
@echo " read-docs - Read HTML documentation in your browser."
|
@echo " read-docs - Read HTML documentation in your browser."
|
||||||
@echo " gen-docs - Generate openapi.json and docs/_generated/*."
|
@echo " gen-docs - Generate openapi.json and docs/_generated/*."
|
||||||
@echo " clean-docs - Remove generated documentation."
|
@echo " clean-docs - Remove generated documentation."
|
||||||
@echo " run - Run EOS production server in virtual environment."
|
@echo " run - Run EOS production server in virtual environment."
|
||||||
@echo " run-dev - Run EOS development server in virtual environment (automatically reloads)."
|
@echo " run-dev - Run EOS development server in virtual environment (automatically reloads)."
|
||||||
@echo " run-dash - Run EOSdash production server in virtual environment."
|
@echo " run-dash - Run EOSdash production server in virtual environment."
|
||||||
@echo " run-dash-dev - Run EOSdash development server in virtual environment (automatically reloads)."
|
@echo " run-dash-dev - Run EOSdash development server in virtual environment (automatically reloads)."
|
||||||
@echo " test - Run tests."
|
@echo " test - Run tests."
|
||||||
@echo " test-full - Run all tests (e.g. to finalize a commit)."
|
@echo " test-finalize - Run all tests (e.g. to finalize a commit)."
|
||||||
@echo " test-system - Run tests with system tests enabled."
|
@echo " test-system - Run tests with system tests enabled."
|
||||||
@echo " test-ci - Run tests as CI does. No user config file allowed."
|
@echo " test-ci - Run tests as CI does. No user config file allowed."
|
||||||
@echo " test-profile - Run single test optimization with profiling."
|
@echo " test-profile - Run single test optimization with profiling."
|
||||||
@echo " dist - Create distribution (in dist/)."
|
@echo " dist - Create distribution (in dist/)."
|
||||||
@echo " clean - Remove generated documentation, distribution and virtual environment."
|
@echo " clean - Remove generated documentation, distribution and virtual environment."
|
||||||
@echo " prepare-version - Prepare a version defined in setup.py."
|
@echo " prepare-version - Prepare a version defined in setup.py."
|
||||||
|
|
||||||
# Target to set up a Python 3 virtual environment
|
# Target to set up a Python 3 virtual environment
|
||||||
@@ -79,13 +79,13 @@ gen-docs: pip-dev version-txt
|
|||||||
|
|
||||||
# Target to build HTML documentation
|
# Target to build HTML documentation
|
||||||
docs: pip-dev
|
docs: pip-dev
|
||||||
.venv/bin/pytest --full-run tests/test_docsphinx.py
|
.venv/bin/pytest --finalize tests/test_docsphinx.py
|
||||||
@echo "Documentation build to build/docs/html/."
|
@echo "Documentation build to build/docs/html/."
|
||||||
|
|
||||||
# Target to read the HTML documentation
|
# Target to read the HTML documentation
|
||||||
read-docs:
|
read-docs:
|
||||||
@echo "Read the documentation in your browser"
|
@echo "Read the documentation in your browser"
|
||||||
.venv/bin/pytest --full-run tests/test_docsphinx.py
|
.venv/bin/pytest --finalize tests/test_docsphinx.py
|
||||||
.venv/bin/python -m webbrowser build/docs/html/index.html
|
.venv/bin/python -m webbrowser build/docs/html/index.html
|
||||||
|
|
||||||
# Clean Python bytecode
|
# Clean Python bytecode
|
||||||
@@ -108,7 +108,7 @@ clean: clean-docs
|
|||||||
|
|
||||||
run:
|
run:
|
||||||
@echo "Starting EOS production server, please wait..."
|
@echo "Starting EOS production server, please wait..."
|
||||||
.venv/bin/python -m akkudoktoreos.server.eos
|
.venv/bin/python -m akkudoktoreos.server.eos --startup_eosdash true
|
||||||
|
|
||||||
run-dev:
|
run-dev:
|
||||||
@echo "Starting EOS development server, please wait..."
|
@echo "Starting EOS development server, please wait..."
|
||||||
@@ -142,7 +142,7 @@ test-system:
|
|||||||
.venv/bin/pytest --system-test -vs --cov src --cov-report term-missing
|
.venv/bin/pytest --system-test -vs --cov src --cov-report term-missing
|
||||||
|
|
||||||
# Target to run all tests.
|
# Target to run all tests.
|
||||||
test-full:
|
test-finalize:
|
||||||
@echo "Running all tests..."
|
@echo "Running all tests..."
|
||||||
.venv/bin/pytest --finalize
|
.venv/bin/pytest --finalize
|
||||||
|
|
||||||
@@ -165,10 +165,14 @@ mypy:
|
|||||||
|
|
||||||
# Run entire setup on docker
|
# Run entire setup on docker
|
||||||
docker-run:
|
docker-run:
|
||||||
|
@echo "Build and run EOS docker container locally."
|
||||||
|
@echo "Persistent data (and config) in ${HOME}/.local/share/net.akkudoktor.eos"
|
||||||
@docker pull python:3.13.9-slim
|
@docker pull python:3.13.9-slim
|
||||||
@docker compose up --remove-orphans
|
@docker compose up --remove-orphans
|
||||||
|
|
||||||
docker-build:
|
docker-build:
|
||||||
|
@echo "Build EOS docker container locally."
|
||||||
|
@echo "Persistent data (and config) in ${HOME}/.local/share/net.akkudoktor.eos"
|
||||||
@docker pull python:3.13.9-slim
|
@docker pull python:3.13.9-slim
|
||||||
@docker compose build
|
@docker compose build
|
||||||
|
|
||||||
|
|||||||
45
README.md
45
README.md
@@ -73,6 +73,51 @@ docker run -d \
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
### Home Assistant add-on
|
||||||
|
|
||||||
|
![Supports aarch64 Architecture][aarch64-shield]
|
||||||
|
![Supports amd64 Architecture][amd64-shield]
|
||||||
|
|
||||||
|
[aarch64-shield]: https://img.shields.io/badge/aarch64-yes-green.svg
|
||||||
|
[amd64-shield]: https://img.shields.io/badge/amd64-yes-green.svg
|
||||||
|
|
||||||
|
To install the **Akkudoktor-EOS** add-on in Home Assistant:
|
||||||
|
|
||||||
|
[](https://my.home-assistant.io/redirect/supervisor_add_addon_repository/?repository_url=https%3A%2F%2Fgithub.com%2FAkkudoktor-EOS%2FEOS)
|
||||||
|
|
||||||
|
1. **Add the repository URL**:
|
||||||
|
|
||||||
|
In Home Assistant, go to:
|
||||||
|
|
||||||
|
```
|
||||||
|
Settings → Add-ons → Add-on Store → ⋮ (top-right menu) → Repositories
|
||||||
|
```
|
||||||
|
|
||||||
|
and enter the URL of this Git repository:
|
||||||
|
|
||||||
|
```
|
||||||
|
https://github.com/Akkudoktor-EOS/EOS
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Install the add-on**:
|
||||||
|
|
||||||
|
After adding the repository, the add-on will appear in the Add-on Store. Click **Install**.
|
||||||
|
|
||||||
|
3. **Start the add-on**:
|
||||||
|
|
||||||
|
Once installed, click **Start** in the add-on panel.
|
||||||
|
|
||||||
|
4. **Access the dashboard**:
|
||||||
|
|
||||||
|
Click **Open Web UI** in the add-on panel.
|
||||||
|
|
||||||
|
5. **Configure EOS** (optional):
|
||||||
|
In the dashboard, go to:
|
||||||
|
|
||||||
|
```
|
||||||
|
Config
|
||||||
|
```
|
||||||
|
|
||||||
### Docker (Recommended)
|
### Docker (Recommended)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
14
build.yaml
Normal file
14
build.yaml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Home Assistant - Add-on Build Configuration
|
||||||
|
# -------------------------------------------
|
||||||
|
# https://developers.home-assistant.io/docs/add-ons/configuration#add-on-extended-build
|
||||||
|
#
|
||||||
|
# The Akkudoktor-EOS add-on repo is special because there is only one add-on and it is in
|
||||||
|
# the root directory (no add-on folder as usual).
|
||||||
|
|
||||||
|
image: "addon-eos"
|
||||||
|
build_from:
|
||||||
|
# Debian based images only
|
||||||
|
amd64: "ghcr.io/home-assistant/amd64-base-debian:trixie"
|
||||||
|
aarch64: "ghcr.io/home-assistant/aarch64-base-debian:trixie"
|
||||||
|
args:
|
||||||
|
PYTHON_VERSION: "3.13"
|
||||||
57
config.yaml
Normal file
57
config.yaml
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# Home Assistant - Add-on Configuration
|
||||||
|
# -------------------------------------
|
||||||
|
# https://developers.home-assistant.io/docs/add-ons/configuration#add-on-configuration
|
||||||
|
#
|
||||||
|
# The Akkudoktor-EOS add-on repo is special because there is only one add-on and it is in
|
||||||
|
# the root directory (no add-on folder as usual).
|
||||||
|
|
||||||
|
name: "Akkudoktor-EOS"
|
||||||
|
version: "0.2.0.dev70048701"
|
||||||
|
slug: "eos"
|
||||||
|
description: "Akkudoktor-EOS add-on"
|
||||||
|
url: "https://github.com/Akkudoktor-EOS/EOS"
|
||||||
|
|
||||||
|
arch:
|
||||||
|
- aarch64
|
||||||
|
- amd64
|
||||||
|
|
||||||
|
startup: "application"
|
||||||
|
init: false
|
||||||
|
boot: "auto"
|
||||||
|
|
||||||
|
# Map home assistant persistent folders
|
||||||
|
# /data is automatically mapped - do not list here
|
||||||
|
map:
|
||||||
|
- share:rw
|
||||||
|
- config:rw
|
||||||
|
|
||||||
|
# API access
|
||||||
|
homeassistant: true
|
||||||
|
homeassistant_api: true
|
||||||
|
|
||||||
|
# Ports exposed by the add-on
|
||||||
|
ports:
|
||||||
|
8503/tcp: 8503
|
||||||
|
# 8504/tcp: 8504
|
||||||
|
|
||||||
|
ports_description:
|
||||||
|
8503/tcp: "EOS REST server"
|
||||||
|
# 8504/tcp: "EOSdash dashboard server"
|
||||||
|
|
||||||
|
# EOSdash interface (if not ingress)
|
||||||
|
# webui: "http://[HOST]:[PORT:8504]"
|
||||||
|
|
||||||
|
# EOSdash by ingress
|
||||||
|
ingress: true
|
||||||
|
ingress_port: 8504
|
||||||
|
ingress_stream: true
|
||||||
|
panel_icon: "mdi:web"
|
||||||
|
|
||||||
|
# EOS uses several directories under /data - config, cache, output
|
||||||
|
backup_exclude:
|
||||||
|
- /data/cache
|
||||||
|
- /data/output
|
||||||
|
|
||||||
|
# We do not use options
|
||||||
|
options: {}
|
||||||
|
schema: {}
|
||||||
37
docker-compose.ps1
Normal file
37
docker-compose.ps1
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# docker-compose.ps1
|
||||||
|
# EOS Docker Compose launcher for Windows
|
||||||
|
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
|
||||||
|
function Is-WSL2 {
|
||||||
|
try {
|
||||||
|
docker info --format '{{.OperatingSystem}}' 2>$null | Select-String -Pattern "WSL2"
|
||||||
|
} catch {
|
||||||
|
return $false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Is-WSL2) {
|
||||||
|
Write-Host "Detected Docker running on WSL2"
|
||||||
|
|
||||||
|
# Linux path inside WSL
|
||||||
|
$User = $env:USERNAME.ToLower()
|
||||||
|
$DockerComposeDataDir = "/home/$User/.local/share/net.akkudoktor.eos"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Host "Detected native Windows Docker"
|
||||||
|
|
||||||
|
$HomeDir = [Environment]::GetFolderPath("UserProfile")
|
||||||
|
$DockerComposeDataDir = Join-Path $HomeDir "AppData\Local\net.akkudoktor.eos"
|
||||||
|
$DockerComposeDataDir = $DockerComposeDataDir.Replace("\", "/")
|
||||||
|
|
||||||
|
if (-not (Test-Path $DockerComposeDataDir)) {
|
||||||
|
New-Item -ItemType Directory -Path $DockerComposeDataDir -Force | Out-Null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$env:DOCKER_COMPOSE_DATA_DIR = $DockerComposeDataDir
|
||||||
|
|
||||||
|
Write-Host "EOS data dir: '$env:DOCKER_COMPOSE_DATA_DIR'"
|
||||||
|
|
||||||
|
docker compose -f docker-compose.yml up -d
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
---
|
---
|
||||||
networks:
|
networks:
|
||||||
default:
|
default:
|
||||||
external: true
|
|
||||||
name: "eos"
|
name: "eos"
|
||||||
services:
|
services:
|
||||||
eos:
|
eos:
|
||||||
image: "akkudoktor/eos:${EOS_VERSION}"
|
image: "akkudoktor/eos:${VERSION}"
|
||||||
container_name: "akkudoktoreos"
|
container_name: "akkudoktoreos"
|
||||||
read_only: true
|
read_only: true
|
||||||
build:
|
build:
|
||||||
@@ -21,12 +20,12 @@ services:
|
|||||||
- MKL_NUM_THREADS=1
|
- MKL_NUM_THREADS=1
|
||||||
- PIP_PROGRESS_BAR=off
|
- PIP_PROGRESS_BAR=off
|
||||||
- PIP_NO_COLOR=1
|
- PIP_NO_COLOR=1
|
||||||
- EOS_CONFIG_DIR=config
|
|
||||||
- EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
- EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
||||||
- EOS_SERVER__HOST=0.0.0.0
|
- EOS_SERVER__HOST=0.0.0.0
|
||||||
- EOS_SERVER__PORT=8503
|
- EOS_SERVER__PORT=8503
|
||||||
- EOS_SERVER__EOSDASH_HOST=0.0.0.0
|
- EOS_SERVER__EOSDASH_HOST=0.0.0.0
|
||||||
- EOS_SERVER__EOSDASH_PORT=8504
|
- EOS_SERVER__EOSDASH_PORT=8504
|
||||||
|
- DOCKER_COMPOSE_DATA_DIR=${HOME}/.local/share/net.akkudoktor.eos
|
||||||
ulimits:
|
ulimits:
|
||||||
nproc: 65535
|
nproc: 65535
|
||||||
nofile: 65535
|
nofile: 65535
|
||||||
@@ -38,9 +37,6 @@ services:
|
|||||||
- "${EOS_SERVER__PORT}:8503"
|
- "${EOS_SERVER__PORT}:8503"
|
||||||
- "${EOS_SERVER__EOSDASH_PORT}:8504"
|
- "${EOS_SERVER__EOSDASH_PORT}:8504"
|
||||||
|
|
||||||
# Volume mount configuration (optional)
|
# Volume mount configuration
|
||||||
# Example volume mounts (uncomment to use):
|
volumes:
|
||||||
# volumes:
|
- ${DOCKER_COMPOSE_DATA_DIR}:/data:rw
|
||||||
# - ./config:/opt/eos/config # Mount local config directory
|
|
||||||
# - ./cache:/opt/eos/cache # Mount local cache directory
|
|
||||||
# - ./output:/opt/eos/output # Mount local output directory
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
:caption: Configuration Table
|
:caption: Configuration Table
|
||||||
|
|
||||||
|
../_generated/configadapter.md
|
||||||
../_generated/configcache.md
|
../_generated/configcache.md
|
||||||
../_generated/configdevices.md
|
../_generated/configdevices.md
|
||||||
../_generated/configelecprice.md
|
../_generated/configelecprice.md
|
||||||
|
|||||||
223
docs/_generated/configadapter.md
Normal file
223
docs/_generated/configadapter.md
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
## Adapter Configuration
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
:::{table} adapter
|
||||||
|
:widths: 10 20 10 5 5 30
|
||||||
|
:align: left
|
||||||
|
|
||||||
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
|
| homeassistant | `EOS_ADAPTER__HOMEASSISTANT` | `HomeAssistantAdapterCommonSettings` | `rw` | `required` | Home Assistant adapter settings. |
|
||||||
|
| nodered | `EOS_ADAPTER__NODERED` | `NodeREDAdapterCommonSettings` | `rw` | `required` | NodeRED adapter settings. |
|
||||||
|
| provider | `EOS_ADAPTER__PROVIDER` | `Optional[list[str]]` | `rw` | `None` | List of adapter provider id(s) of provider(s) to be used. |
|
||||||
|
| providers | | `list[str]` | `ro` | `N/A` | Available electricity price provider ids. |
|
||||||
|
:::
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Input**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"adapter": {
|
||||||
|
"provider": [
|
||||||
|
"HomeAssistant"
|
||||||
|
],
|
||||||
|
"homeassistant": {
|
||||||
|
"config_entity_ids": null,
|
||||||
|
"load_emr_entity_ids": null,
|
||||||
|
"pv_production_emr_entity_ids": null,
|
||||||
|
"device_measurement_entity_ids": null,
|
||||||
|
"device_instruction_entity_ids": null,
|
||||||
|
"solution_entity_ids": null,
|
||||||
|
"homeassistant_entity_ids": [],
|
||||||
|
"eos_solution_entity_ids": [],
|
||||||
|
"eos_device_instruction_entity_ids": []
|
||||||
|
},
|
||||||
|
"nodered": {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"port": 1880
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"adapter": {
|
||||||
|
"provider": [
|
||||||
|
"HomeAssistant"
|
||||||
|
],
|
||||||
|
"homeassistant": {
|
||||||
|
"config_entity_ids": null,
|
||||||
|
"load_emr_entity_ids": null,
|
||||||
|
"pv_production_emr_entity_ids": null,
|
||||||
|
"device_measurement_entity_ids": null,
|
||||||
|
"device_instruction_entity_ids": null,
|
||||||
|
"solution_entity_ids": null,
|
||||||
|
"homeassistant_entity_ids": [],
|
||||||
|
"eos_solution_entity_ids": [],
|
||||||
|
"eos_device_instruction_entity_ids": []
|
||||||
|
},
|
||||||
|
"nodered": {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"port": 1880
|
||||||
|
},
|
||||||
|
"providers": [
|
||||||
|
"HomeAssistant",
|
||||||
|
"NodeRED"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
### Common settings for the NodeRED adapter
|
||||||
|
|
||||||
|
The Node-RED adapter sends to HTTP IN nodes.
|
||||||
|
|
||||||
|
This is the example flow:
|
||||||
|
|
||||||
|
[HTTP In \\<URL\\>] -> [Function (parse payload)] -> [Debug] -> [HTTP Response]
|
||||||
|
|
||||||
|
There are two URLs that are used:
|
||||||
|
|
||||||
|
- GET /eos/data_aquisition
|
||||||
|
The GET is issued before the optimization.
|
||||||
|
- POST /eos/control_dispatch
|
||||||
|
The POST is issued after the optimization.
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
:::{table} adapter::nodered
|
||||||
|
:widths: 10 10 5 5 30
|
||||||
|
:align: left
|
||||||
|
|
||||||
|
| Name | Type | Read-Only | Default | Description |
|
||||||
|
| ---- | ---- | --------- | ------- | ----------- |
|
||||||
|
| host | `Optional[str]` | `rw` | `127.0.0.1` | Node-RED server IP address. Defaults to 127.0.0.1. |
|
||||||
|
| port | `Optional[int]` | `rw` | `1880` | Node-RED server IP port number. Defaults to 1880. |
|
||||||
|
:::
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Input/Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"adapter": {
|
||||||
|
"nodered": {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"port": 1880
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
### Common settings for the home assistant adapter
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
:::{table} adapter::homeassistant
|
||||||
|
:widths: 10 10 5 5 30
|
||||||
|
:align: left
|
||||||
|
|
||||||
|
| Name | Type | Read-Only | Default | Description |
|
||||||
|
| ---- | ---- | --------- | ------- | ----------- |
|
||||||
|
| config_entity_ids | `Optional[dict[str, str]]` | `rw` | `None` | Mapping of EOS config keys to Home Assistant entity IDs.
|
||||||
|
The config key has to be given by a ‘/’-separated path
|
||||||
|
e.g. devices/batteries/0/capacity_wh |
|
||||||
|
| device_instruction_entity_ids | `Optional[list[str]]` | `rw` | `None` | Entity IDs for device (resource) instructions to be updated by EOS.
|
||||||
|
The device ids (resource ids) have to be prepended by 'sensor.eos_' to build the entity_id.
|
||||||
|
E.g. The instruction for device id 'battery1' becomes the entity_id 'sensor.eos_battery1'. |
|
||||||
|
| device_measurement_entity_ids | `Optional[dict[str, str]]` | `rw` | `None` | Mapping of EOS measurement keys used by device (resource) simulations to Home Assistant entity IDs. |
|
||||||
|
| eos_device_instruction_entity_ids | `list[str]` | `ro` | `N/A` | Entity IDs for energy management instructions available at EOS. |
|
||||||
|
| eos_solution_entity_ids | `list[str]` | `ro` | `N/A` | Entity IDs for optimization solution available at EOS. |
|
||||||
|
| homeassistant_entity_ids | `list[str]` | `ro` | `N/A` | Entity IDs available at Home Assistant. |
|
||||||
|
| load_emr_entity_ids | `Optional[list[str]]` | `rw` | `None` | Entity ID(s) of load energy meter reading [kWh] |
|
||||||
|
| pv_production_emr_entity_ids | `Optional[list[str]]` | `rw` | `None` | Entity ID(s) of PV production energy meter reading [kWh] |
|
||||||
|
| solution_entity_ids | `Optional[list[str]]` | `rw` | `None` | Entity IDs for optimization solution keys to be updated by EOS.
|
||||||
|
The solution keys have to be prepended by 'sensor.eos_' to build the entity_id.
|
||||||
|
E.g. solution key 'battery1_idle_op_mode' becomes the entity_id 'sensor.eos_battery1_idle_op_mode'. |
|
||||||
|
:::
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Input**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"adapter": {
|
||||||
|
"homeassistant": {
|
||||||
|
"config_entity_ids": {
|
||||||
|
"devices/batteries/0/capacity_wh": "sensor.battery1_capacity"
|
||||||
|
},
|
||||||
|
"load_emr_entity_ids": [
|
||||||
|
"sensor.load_energy_total_kwh"
|
||||||
|
],
|
||||||
|
"pv_production_emr_entity_ids": [
|
||||||
|
"sensor.pv_energy_total_kwh"
|
||||||
|
],
|
||||||
|
"device_measurement_entity_ids": {
|
||||||
|
"ev11_soc_factor": "sensor.ev11_soc_factor",
|
||||||
|
"battery1_soc_factor": "sensor.battery1_soc_factor"
|
||||||
|
},
|
||||||
|
"device_instruction_entity_ids": [
|
||||||
|
"sensor.eos_battery1"
|
||||||
|
],
|
||||||
|
"solution_entity_ids": [
|
||||||
|
"sensor.eos_battery1_idle_mode_mode"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"adapter": {
|
||||||
|
"homeassistant": {
|
||||||
|
"config_entity_ids": {
|
||||||
|
"devices/batteries/0/capacity_wh": "sensor.battery1_capacity"
|
||||||
|
},
|
||||||
|
"load_emr_entity_ids": [
|
||||||
|
"sensor.load_energy_total_kwh"
|
||||||
|
],
|
||||||
|
"pv_production_emr_entity_ids": [
|
||||||
|
"sensor.pv_energy_total_kwh"
|
||||||
|
],
|
||||||
|
"device_measurement_entity_ids": {
|
||||||
|
"ev11_soc_factor": "sensor.ev11_soc_factor",
|
||||||
|
"battery1_soc_factor": "sensor.battery1_soc_factor"
|
||||||
|
},
|
||||||
|
"device_instruction_entity_ids": [
|
||||||
|
"sensor.eos_battery1"
|
||||||
|
],
|
||||||
|
"solution_entity_ids": [
|
||||||
|
"sensor.eos_battery1_idle_mode_mode"
|
||||||
|
],
|
||||||
|
"homeassistant_entity_ids": [],
|
||||||
|
"eos_solution_entity_ids": [],
|
||||||
|
"eos_device_instruction_entity_ids": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
| max_electric_vehicles | `EOS_DEVICES__MAX_ELECTRIC_VEHICLES` | `Optional[int]` | `rw` | `None` | Maximum number of electric vehicles that can be set |
|
| max_electric_vehicles | `EOS_DEVICES__MAX_ELECTRIC_VEHICLES` | `Optional[int]` | `rw` | `None` | Maximum number of electric vehicles that can be set |
|
||||||
| max_home_appliances | `EOS_DEVICES__MAX_HOME_APPLIANCES` | `Optional[int]` | `rw` | `None` | Maximum number of home_appliances that can be set |
|
| max_home_appliances | `EOS_DEVICES__MAX_HOME_APPLIANCES` | `Optional[int]` | `rw` | `None` | Maximum number of home_appliances that can be set |
|
||||||
| max_inverters | `EOS_DEVICES__MAX_INVERTERS` | `Optional[int]` | `rw` | `None` | Maximum number of inverters that can be set |
|
| max_inverters | `EOS_DEVICES__MAX_INVERTERS` | `Optional[int]` | `rw` | `None` | Maximum number of inverters that can be set |
|
||||||
| measurement_keys | | `Optional[list[str]]` | `ro` | `N/A` | None |
|
| measurement_keys | | `Optional[list[str]]` | `ro` | `N/A` | Return the measurement keys for the resource/ device stati that are measurements. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
@@ -36,7 +36,19 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.0,
|
"levelized_cost_of_storage_kwh": 0.0,
|
||||||
"max_charge_power_w": 5000,
|
"max_charge_power_w": 5000,
|
||||||
"min_charge_power_w": 50,
|
"min_charge_power_w": 50,
|
||||||
"charge_rates": "[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 0,
|
"min_soc_percentage": 0,
|
||||||
"max_soc_percentage": 100,
|
"max_soc_percentage": 100,
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor",
|
"measurement_key_soc_factor": "battery1-soc-factor",
|
||||||
@@ -63,7 +75,19 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.0,
|
"levelized_cost_of_storage_kwh": 0.0,
|
||||||
"max_charge_power_w": 5000,
|
"max_charge_power_w": 5000,
|
||||||
"min_charge_power_w": 50,
|
"min_charge_power_w": 50,
|
||||||
"charge_rates": "[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 0,
|
"min_soc_percentage": 0,
|
||||||
"max_soc_percentage": 100,
|
"max_soc_percentage": 100,
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor",
|
"measurement_key_soc_factor": "battery1-soc-factor",
|
||||||
@@ -107,7 +131,19 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.0,
|
"levelized_cost_of_storage_kwh": 0.0,
|
||||||
"max_charge_power_w": 5000,
|
"max_charge_power_w": 5000,
|
||||||
"min_charge_power_w": 50,
|
"min_charge_power_w": 50,
|
||||||
"charge_rates": "[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 0,
|
"min_soc_percentage": 0,
|
||||||
"max_soc_percentage": 100,
|
"max_soc_percentage": 100,
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor",
|
"measurement_key_soc_factor": "battery1-soc-factor",
|
||||||
@@ -134,7 +170,19 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.0,
|
"levelized_cost_of_storage_kwh": 0.0,
|
||||||
"max_charge_power_w": 5000,
|
"max_charge_power_w": 5000,
|
||||||
"min_charge_power_w": 50,
|
"min_charge_power_w": 50,
|
||||||
"charge_rates": "[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 0,
|
"min_soc_percentage": 0,
|
||||||
"max_soc_percentage": 100,
|
"max_soc_percentage": 100,
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor",
|
"measurement_key_soc_factor": "battery1-soc-factor",
|
||||||
@@ -185,7 +233,7 @@
|
|||||||
| battery_id | `Optional[str]` | `rw` | `None` | ID of battery controlled by this inverter. |
|
| battery_id | `Optional[str]` | `rw` | `None` | ID of battery controlled by this inverter. |
|
||||||
| device_id | `str` | `rw` | `<unknown>` | ID of device |
|
| device_id | `str` | `rw` | `<unknown>` | ID of device |
|
||||||
| max_power_w | `Optional[float]` | `rw` | `None` | Maximum power [W]. |
|
| max_power_w | `Optional[float]` | `rw` | `None` | Maximum power [W]. |
|
||||||
| measurement_keys | `Optional[list[str]]` | `ro` | `N/A` | None |
|
| measurement_keys | `Optional[list[str]]` | `ro` | `N/A` | Measurement keys for the inverter stati that are measurements. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
@@ -242,7 +290,7 @@
|
|||||||
| consumption_wh | `int` | `rw` | `required` | Energy consumption [Wh]. |
|
| consumption_wh | `int` | `rw` | `required` | Energy consumption [Wh]. |
|
||||||
| device_id | `str` | `rw` | `<unknown>` | ID of device |
|
| device_id | `str` | `rw` | `<unknown>` | ID of device |
|
||||||
| duration_h | `int` | `rw` | `required` | Usage duration in hours [0 ... 24]. |
|
| duration_h | `int` | `rw` | `required` | Usage duration in hours [0 ... 24]. |
|
||||||
| measurement_keys | `Optional[list[str]]` | `ro` | `N/A` | None |
|
| measurement_keys | `Optional[list[str]]` | `ro` | `N/A` | Measurement keys for the home appliance stati that are measurements. |
|
||||||
| time_windows | `Optional[akkudoktoreos.utils.datetimeutil.TimeWindowSequence]` | `rw` | `None` | Sequence of allowed time windows. Defaults to optimization general time window. |
|
| time_windows | `Optional[akkudoktoreos.utils.datetimeutil.TimeWindowSequence]` | `rw` | `None` | Sequence of allowed time windows. Defaults to optimization general time window. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
@@ -320,19 +368,19 @@
|
|||||||
| Name | Type | Read-Only | Default | Description |
|
| Name | Type | Read-Only | Default | Description |
|
||||||
| ---- | ---- | --------- | ------- | ----------- |
|
| ---- | ---- | --------- | ------- | ----------- |
|
||||||
| capacity_wh | `int` | `rw` | `8000` | Capacity [Wh]. |
|
| capacity_wh | `int` | `rw` | `8000` | Capacity [Wh]. |
|
||||||
| charge_rates | `Optional[numpydantic.vendor.npbase_meta_classes.NDArray]` | `rw` | `[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]` | Charge rates as factor of maximum charging power [0.00 ... 1.00]. None triggers fallback to default charge-rates. |
|
| charge_rates | `Optional[list[float]]` | `rw` | `[0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]` | Charge rates as factor of maximum charging power [0.00 ... 1.00]. None triggers fallback to default charge-rates. |
|
||||||
| charging_efficiency | `float` | `rw` | `0.88` | Charging efficiency [0.01 ... 1.00]. |
|
| charging_efficiency | `float` | `rw` | `0.88` | Charging efficiency [0.01 ... 1.00]. |
|
||||||
| device_id | `str` | `rw` | `<unknown>` | ID of device |
|
| device_id | `str` | `rw` | `<unknown>` | ID of device |
|
||||||
| discharging_efficiency | `float` | `rw` | `0.88` | Discharge efficiency [0.01 ... 1.00]. |
|
| discharging_efficiency | `float` | `rw` | `0.88` | Discharge efficiency [0.01 ... 1.00]. |
|
||||||
| levelized_cost_of_storage_kwh | `float` | `rw` | `0.0` | Levelized cost of storage (LCOS), the average lifetime cost of delivering one kWh [€/kWh]. |
|
| levelized_cost_of_storage_kwh | `float` | `rw` | `0.0` | Levelized cost of storage (LCOS), the average lifetime cost of delivering one kWh [€/kWh]. |
|
||||||
| max_charge_power_w | `Optional[float]` | `rw` | `5000` | Maximum charging power [W]. |
|
| max_charge_power_w | `Optional[float]` | `rw` | `5000` | Maximum charging power [W]. |
|
||||||
| max_soc_percentage | `int` | `rw` | `100` | Maximum state of charge (SOC) as percentage of capacity [%]. |
|
| max_soc_percentage | `int` | `rw` | `100` | Maximum state of charge (SOC) as percentage of capacity [%]. |
|
||||||
| measurement_key_power_3_phase_sym_w | `str` | `ro` | `N/A` | None |
|
| measurement_key_power_3_phase_sym_w | `str` | `ro` | `N/A` | Measurement key for the symmetric 3 phase power the battery is charged or discharged with [W]. |
|
||||||
| measurement_key_power_l1_w | `str` | `ro` | `N/A` | None |
|
| measurement_key_power_l1_w | `str` | `ro` | `N/A` | Measurement key for the L1 power the battery is charged or discharged with [W]. |
|
||||||
| measurement_key_power_l2_w | `str` | `ro` | `N/A` | None |
|
| measurement_key_power_l2_w | `str` | `ro` | `N/A` | Measurement key for the L2 power the battery is charged or discharged with [W]. |
|
||||||
| measurement_key_power_l3_w | `str` | `ro` | `N/A` | None |
|
| measurement_key_power_l3_w | `str` | `ro` | `N/A` | Measurement key for the L3 power the battery is charged or discharged with [W]. |
|
||||||
| measurement_key_soc_factor | `str` | `ro` | `N/A` | None |
|
| measurement_key_soc_factor | `str` | `ro` | `N/A` | Measurement key for the battery state of charge (SoC) as factor of total capacity [0.0 ... 1.0]. |
|
||||||
| measurement_keys | `Optional[list[str]]` | `ro` | `N/A` | None |
|
| measurement_keys | `Optional[list[str]]` | `ro` | `N/A` | Measurement keys for the battery stati that are measurements. |
|
||||||
| min_charge_power_w | `Optional[float]` | `rw` | `50` | Minimum charging power [W]. |
|
| min_charge_power_w | `Optional[float]` | `rw` | `50` | Minimum charging power [W]. |
|
||||||
| min_soc_percentage | `int` | `rw` | `0` | Minimum state of charge (SOC) as percentage of capacity [%]. This is the target SoC for charging |
|
| min_soc_percentage | `int` | `rw` | `0` | Minimum state of charge (SOC) as percentage of capacity [%]. This is the target SoC for charging |
|
||||||
:::
|
:::
|
||||||
@@ -355,7 +403,13 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.12,
|
"levelized_cost_of_storage_kwh": 0.12,
|
||||||
"max_charge_power_w": 5000.0,
|
"max_charge_power_w": 5000.0,
|
||||||
"min_charge_power_w": 50.0,
|
"min_charge_power_w": 50.0,
|
||||||
"charge_rates": "[0. 0.25 0.5 0.75 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.25,
|
||||||
|
0.5,
|
||||||
|
0.75,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 10,
|
"min_soc_percentage": 10,
|
||||||
"max_soc_percentage": 100
|
"max_soc_percentage": 100
|
||||||
}
|
}
|
||||||
@@ -382,7 +436,13 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.12,
|
"levelized_cost_of_storage_kwh": 0.12,
|
||||||
"max_charge_power_w": 5000.0,
|
"max_charge_power_w": 5000.0,
|
||||||
"min_charge_power_w": 50.0,
|
"min_charge_power_w": 50.0,
|
||||||
"charge_rates": "[0. 0.25 0.5 0.75 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.25,
|
||||||
|
0.5,
|
||||||
|
0.75,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 10,
|
"min_soc_percentage": 10,
|
||||||
"max_soc_percentage": 100,
|
"max_soc_percentage": 100,
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor",
|
"measurement_key_soc_factor": "battery1-soc-factor",
|
||||||
|
|||||||
@@ -11,12 +11,13 @@
|
|||||||
| elecpriceimport | `EOS_ELECPRICE__ELECPRICEIMPORT` | `ElecPriceImportCommonSettings` | `rw` | `required` | Import provider settings. |
|
| elecpriceimport | `EOS_ELECPRICE__ELECPRICEIMPORT` | `ElecPriceImportCommonSettings` | `rw` | `required` | Import provider settings. |
|
||||||
| energycharts | `EOS_ELECPRICE__ENERGYCHARTS` | `ElecPriceEnergyChartsCommonSettings` | `rw` | `required` | Energy Charts provider settings. |
|
| energycharts | `EOS_ELECPRICE__ENERGYCHARTS` | `ElecPriceEnergyChartsCommonSettings` | `rw` | `required` | Energy Charts provider settings. |
|
||||||
| provider | `EOS_ELECPRICE__PROVIDER` | `Optional[str]` | `rw` | `None` | Electricity price provider id of provider to be used. |
|
| provider | `EOS_ELECPRICE__PROVIDER` | `Optional[str]` | `rw` | `None` | Electricity price provider id of provider to be used. |
|
||||||
|
| providers | | `list[str]` | `ro` | `N/A` | Available electricity price provider ids. |
|
||||||
| vat_rate | `EOS_ELECPRICE__VAT_RATE` | `Optional[float]` | `rw` | `1.19` | VAT rate factor applied to electricity price when charges are used. |
|
| vat_rate | `EOS_ELECPRICE__VAT_RATE` | `Optional[float]` | `rw` | `1.19` | VAT rate factor applied to electricity price when charges are used. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
<!-- pyml disable no-emphasis-as-heading -->
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
**Example Input/Output**
|
**Example Input**
|
||||||
<!-- pyml enable no-emphasis-as-heading -->
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
@@ -38,6 +39,34 @@
|
|||||||
```
|
```
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"elecprice": {
|
||||||
|
"provider": "ElecPriceAkkudoktor",
|
||||||
|
"charges_kwh": 0.21,
|
||||||
|
"vat_rate": 1.19,
|
||||||
|
"elecpriceimport": {
|
||||||
|
"import_file_path": null,
|
||||||
|
"import_json": null
|
||||||
|
},
|
||||||
|
"energycharts": {
|
||||||
|
"bidding_zone": "DE-LU"
|
||||||
|
},
|
||||||
|
"providers": [
|
||||||
|
"ElecPriceAkkudoktor",
|
||||||
|
"ElecPriceEnergyCharts",
|
||||||
|
"ElecPriceImport"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
### Common settings for Energy Charts electricity price provider
|
### Common settings for Energy Charts electricity price provider
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
|
|||||||
@@ -3,6 +3,26 @@
|
|||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
|
"adapter": {
|
||||||
|
"provider": [
|
||||||
|
"HomeAssistant"
|
||||||
|
],
|
||||||
|
"homeassistant": {
|
||||||
|
"config_entity_ids": null,
|
||||||
|
"load_emr_entity_ids": null,
|
||||||
|
"pv_production_emr_entity_ids": null,
|
||||||
|
"device_measurement_entity_ids": null,
|
||||||
|
"device_instruction_entity_ids": null,
|
||||||
|
"solution_entity_ids": null,
|
||||||
|
"homeassistant_entity_ids": [],
|
||||||
|
"eos_solution_entity_ids": [],
|
||||||
|
"eos_device_instruction_entity_ids": []
|
||||||
|
},
|
||||||
|
"nodered": {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"port": 1880
|
||||||
|
}
|
||||||
|
},
|
||||||
"cache": {
|
"cache": {
|
||||||
"subpath": "cache",
|
"subpath": "cache",
|
||||||
"cleanup_interval": 300.0
|
"cleanup_interval": 300.0
|
||||||
@@ -17,7 +37,19 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.0,
|
"levelized_cost_of_storage_kwh": 0.0,
|
||||||
"max_charge_power_w": 5000,
|
"max_charge_power_w": 5000,
|
||||||
"min_charge_power_w": 50,
|
"min_charge_power_w": 50,
|
||||||
"charge_rates": "[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 0,
|
"min_soc_percentage": 0,
|
||||||
"max_soc_percentage": 100,
|
"max_soc_percentage": 100,
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor",
|
"measurement_key_soc_factor": "battery1-soc-factor",
|
||||||
@@ -44,7 +76,19 @@
|
|||||||
"levelized_cost_of_storage_kwh": 0.0,
|
"levelized_cost_of_storage_kwh": 0.0,
|
||||||
"max_charge_power_w": 5000,
|
"max_charge_power_w": 5000,
|
||||||
"min_charge_power_w": 50,
|
"min_charge_power_w": 50,
|
||||||
"charge_rates": "[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]",
|
"charge_rates": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"min_soc_percentage": 0,
|
"min_soc_percentage": 0,
|
||||||
"max_soc_percentage": 100,
|
"max_soc_percentage": 100,
|
||||||
"measurement_key_soc_factor": "battery1-soc-factor",
|
"measurement_key_soc_factor": "battery1-soc-factor",
|
||||||
@@ -92,7 +136,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"general": {
|
"general": {
|
||||||
"version": "0.2.0+dev.4dbc2d",
|
"version": "0.2.0.dev70048701",
|
||||||
"data_folder_path": null,
|
"data_folder_path": null,
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
|
|||||||
@@ -9,11 +9,12 @@
|
|||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| provider | `EOS_FEEDINTARIFF__PROVIDER` | `Optional[str]` | `rw` | `None` | Feed in tariff provider id of provider to be used. |
|
| provider | `EOS_FEEDINTARIFF__PROVIDER` | `Optional[str]` | `rw` | `None` | Feed in tariff provider id of provider to be used. |
|
||||||
| provider_settings | `EOS_FEEDINTARIFF__PROVIDER_SETTINGS` | `FeedInTariffCommonProviderSettings` | `rw` | `required` | Provider settings |
|
| provider_settings | `EOS_FEEDINTARIFF__PROVIDER_SETTINGS` | `FeedInTariffCommonProviderSettings` | `rw` | `required` | Provider settings |
|
||||||
|
| providers | | `list[str]` | `ro` | `N/A` | Available feed in tariff provider ids. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
<!-- pyml disable no-emphasis-as-heading -->
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
**Example Input/Output**
|
**Example Input**
|
||||||
<!-- pyml enable no-emphasis-as-heading -->
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
@@ -30,6 +31,28 @@
|
|||||||
```
|
```
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"feedintariff": {
|
||||||
|
"provider": "FeedInTariffFixed",
|
||||||
|
"provider_settings": {
|
||||||
|
"FeedInTariffFixed": null,
|
||||||
|
"FeedInTariffImport": null
|
||||||
|
},
|
||||||
|
"providers": [
|
||||||
|
"FeedInTariffFixed",
|
||||||
|
"FeedInTariffImport"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
### Common settings for feed in tariff data import from file or JSON string
|
### Common settings for feed in tariff data import from file or JSON string
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
|
|||||||
@@ -1,17 +1,4 @@
|
|||||||
## Settings for common configuration
|
## General settings
|
||||||
|
|
||||||
General configuration to set directories of cache and output files and system location (latitude
|
|
||||||
and longitude).
|
|
||||||
Validators ensure each parameter is within a specified range. A computed property, `timezone`,
|
|
||||||
determines the time zone based on latitude and longitude.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
|
||||||
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
|
||||||
|
|
||||||
Properties:
|
|
||||||
timezone (Optional[str]): Computed time zone string based on the specified latitude
|
|
||||||
and longitude.
|
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
:::{table} general
|
:::{table} general
|
||||||
@@ -20,15 +7,16 @@ Properties:
|
|||||||
|
|
||||||
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
| Name | Environment Variable | Type | Read-Only | Default | Description |
|
||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| config_file_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | None |
|
| config_file_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Path to EOS configuration file. |
|
||||||
| config_folder_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | None |
|
| config_folder_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Path to EOS configuration directory. |
|
||||||
| data_folder_path | `EOS_GENERAL__DATA_FOLDER_PATH` | `Optional[pathlib.Path]` | `rw` | `None` | Path to EOS data directory. |
|
| data_folder_path | `EOS_GENERAL__DATA_FOLDER_PATH` | `Optional[pathlib.Path]` | `rw` | `None` | Path to EOS data directory. |
|
||||||
| data_output_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | None |
|
| data_output_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Computed data_output_path based on data_folder_path. |
|
||||||
| data_output_subpath | `EOS_GENERAL__DATA_OUTPUT_SUBPATH` | `Optional[pathlib.Path]` | `rw` | `output` | Sub-path for the EOS output data directory. |
|
| data_output_subpath | `EOS_GENERAL__DATA_OUTPUT_SUBPATH` | `Optional[pathlib.Path]` | `rw` | `output` | Sub-path for the EOS output data directory. |
|
||||||
| latitude | `EOS_GENERAL__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°) |
|
| home_assistant_addon | | `bool` | `ro` | `N/A` | EOS is running as home assistant add-on. |
|
||||||
| longitude | `EOS_GENERAL__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees, within -180 to 180 (°) |
|
| latitude | `EOS_GENERAL__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees between -90 and 90. North is positive (ISO 19115) (°) |
|
||||||
| timezone | | `Optional[str]` | `ro` | `N/A` | None |
|
| longitude | `EOS_GENERAL__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees within -180 to 180 (°) |
|
||||||
| version | `EOS_GENERAL__VERSION` | `str` | `rw` | `0.2.0+dev.4dbc2d` | Configuration file version. Used to check compatibility. |
|
| timezone | | `Optional[str]` | `ro` | `N/A` | Computed timezone based on latitude and longitude. |
|
||||||
|
| version | `EOS_GENERAL__VERSION` | `str` | `rw` | `0.2.0.dev70048701` | Configuration file version. Used to check compatibility. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
@@ -40,7 +28,7 @@ Properties:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"general": {
|
"general": {
|
||||||
"version": "0.2.0+dev.4dbc2d",
|
"version": "0.2.0.dev70048701",
|
||||||
"data_folder_path": null,
|
"data_folder_path": null,
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
@@ -58,7 +46,7 @@ Properties:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"general": {
|
"general": {
|
||||||
"version": "0.2.0+dev.4dbc2d",
|
"version": "0.2.0.dev70048701",
|
||||||
"data_folder_path": null,
|
"data_folder_path": null,
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
@@ -66,7 +54,8 @@ Properties:
|
|||||||
"timezone": "Europe/Berlin",
|
"timezone": "Europe/Berlin",
|
||||||
"data_output_path": null,
|
"data_output_path": null,
|
||||||
"config_folder_path": "/home/user/.config/net.akkudoktoreos.net",
|
"config_folder_path": "/home/user/.config/net.akkudoktoreos.net",
|
||||||
"config_file_path": "/home/user/.config/net.akkudoktoreos.net/EOS.config.json"
|
"config_file_path": "/home/user/.config/net.akkudoktoreos.net/EOS.config.json",
|
||||||
|
"home_assistant_addon": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -9,11 +9,12 @@
|
|||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| provider | `EOS_LOAD__PROVIDER` | `Optional[str]` | `rw` | `None` | Load provider id of provider to be used. |
|
| provider | `EOS_LOAD__PROVIDER` | `Optional[str]` | `rw` | `None` | Load provider id of provider to be used. |
|
||||||
| provider_settings | `EOS_LOAD__PROVIDER_SETTINGS` | `LoadCommonProviderSettings` | `rw` | `required` | Provider settings |
|
| provider_settings | `EOS_LOAD__PROVIDER_SETTINGS` | `LoadCommonProviderSettings` | `rw` | `required` | Provider settings |
|
||||||
|
| providers | | `list[str]` | `ro` | `N/A` | Available load provider ids. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
<!-- pyml disable no-emphasis-as-heading -->
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
**Example Input/Output**
|
**Example Input**
|
||||||
<!-- pyml enable no-emphasis-as-heading -->
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
@@ -31,6 +32,31 @@
|
|||||||
```
|
```
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"load": {
|
||||||
|
"provider": "LoadAkkudoktor",
|
||||||
|
"provider_settings": {
|
||||||
|
"LoadAkkudoktor": null,
|
||||||
|
"LoadVrm": null,
|
||||||
|
"LoadImport": null
|
||||||
|
},
|
||||||
|
"providers": [
|
||||||
|
"LoadAkkudoktor",
|
||||||
|
"LoadAkkudoktorAdjusted",
|
||||||
|
"LoadVrm",
|
||||||
|
"LoadImport"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
### Common settings for load data import from file or JSON string
|
### Common settings for load data import from file or JSON string
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
@@ -64,7 +90,7 @@
|
|||||||
```
|
```
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
### Common settings for VRM API
|
### Common settings for load forecast VRM API
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
:::{table} load::provider_settings::LoadVrm
|
:::{table} load::provider_settings::LoadVrm
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| console_level | `EOS_LOGGING__CONSOLE_LEVEL` | `Optional[str]` | `rw` | `None` | Logging level when logging to console. |
|
| console_level | `EOS_LOGGING__CONSOLE_LEVEL` | `Optional[str]` | `rw` | `None` | Logging level when logging to console. |
|
||||||
| file_level | `EOS_LOGGING__FILE_LEVEL` | `Optional[str]` | `rw` | `None` | Logging level when logging to file. |
|
| file_level | `EOS_LOGGING__FILE_LEVEL` | `Optional[str]` | `rw` | `None` | Logging level when logging to file. |
|
||||||
| file_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | None |
|
| file_path | | `Optional[pathlib.Path]` | `ro` | `N/A` | Computed log file path based on data output path. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| grid_export_emr_keys | `EOS_MEASUREMENT__GRID_EXPORT_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are energy meter readings of energy export to grid [kWh]. |
|
| grid_export_emr_keys | `EOS_MEASUREMENT__GRID_EXPORT_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are energy meter readings of energy export to grid [kWh]. |
|
||||||
| grid_import_emr_keys | `EOS_MEASUREMENT__GRID_IMPORT_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are energy meter readings of energy import from grid [kWh]. |
|
| grid_import_emr_keys | `EOS_MEASUREMENT__GRID_IMPORT_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are energy meter readings of energy import from grid [kWh]. |
|
||||||
| keys | | `list[str]` | `ro` | `N/A` | None |
|
| keys | | `list[str]` | `ro` | `N/A` | The keys of the measurements that can be stored. |
|
||||||
| load_emr_keys | `EOS_MEASUREMENT__LOAD_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are energy meter readings of a load [kWh]. |
|
| load_emr_keys | `EOS_MEASUREMENT__LOAD_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are energy meter readings of a load [kWh]. |
|
||||||
| pv_production_emr_keys | `EOS_MEASUREMENT__PV_PRODUCTION_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are PV production energy meter readings [kWh]. |
|
| pv_production_emr_keys | `EOS_MEASUREMENT__PV_PRODUCTION_EMR_KEYS` | `Optional[list[str]]` | `rw` | `None` | The keys of the measurements that are PV production energy meter readings [kWh]. |
|
||||||
:::
|
:::
|
||||||
|
|||||||
@@ -11,11 +11,12 @@
|
|||||||
| genetic | `EOS_OPTIMIZATION__GENETIC` | `Optional[akkudoktoreos.optimization.optimization.GeneticCommonSettings]` | `rw` | `None` | Genetic optimization algorithm configuration. |
|
| genetic | `EOS_OPTIMIZATION__GENETIC` | `Optional[akkudoktoreos.optimization.optimization.GeneticCommonSettings]` | `rw` | `None` | Genetic optimization algorithm configuration. |
|
||||||
| horizon_hours | `EOS_OPTIMIZATION__HORIZON_HOURS` | `Optional[int]` | `rw` | `24` | The general time window within which the energy optimization goal shall be achieved [h]. Defaults to 24 hours. |
|
| horizon_hours | `EOS_OPTIMIZATION__HORIZON_HOURS` | `Optional[int]` | `rw` | `24` | The general time window within which the energy optimization goal shall be achieved [h]. Defaults to 24 hours. |
|
||||||
| interval | `EOS_OPTIMIZATION__INTERVAL` | `Optional[int]` | `rw` | `3600` | The optimization interval [sec]. |
|
| interval | `EOS_OPTIMIZATION__INTERVAL` | `Optional[int]` | `rw` | `3600` | The optimization interval [sec]. |
|
||||||
|
| keys | | `list[str]` | `ro` | `N/A` | The keys of the solution. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
<!-- pyml disable no-emphasis-as-heading -->
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
**Example Input/Output**
|
**Example Input**
|
||||||
<!-- pyml enable no-emphasis-as-heading -->
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
@@ -38,6 +39,31 @@
|
|||||||
```
|
```
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"optimization": {
|
||||||
|
"horizon_hours": 24,
|
||||||
|
"interval": 3600,
|
||||||
|
"algorithm": "GENETIC",
|
||||||
|
"genetic": {
|
||||||
|
"individuals": 400,
|
||||||
|
"generations": 400,
|
||||||
|
"seed": null,
|
||||||
|
"penalties": {
|
||||||
|
"ev_soc_miss": 10
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"keys": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
### General Genetic Optimization Algorithm Configuration
|
### General Genetic Optimization Algorithm Configuration
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
|
|||||||
@@ -1,19 +1,5 @@
|
|||||||
## General Prediction Configuration
|
## General Prediction Configuration
|
||||||
|
|
||||||
This class provides configuration for prediction settings, allowing users to specify
|
|
||||||
parameters such as the forecast duration (in hours).
|
|
||||||
Validators ensure each parameter is within a specified range.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
hours (Optional[int]): Number of hours into the future for predictions.
|
|
||||||
Must be non-negative.
|
|
||||||
historic_hours (Optional[int]): Number of hours into the past for historical data.
|
|
||||||
Must be non-negative.
|
|
||||||
|
|
||||||
Validators:
|
|
||||||
validate_hours (int): Ensures `hours` is a non-negative integer.
|
|
||||||
validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.
|
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
:::{table} prediction
|
:::{table} prediction
|
||||||
:widths: 10 20 10 5 5 30
|
:widths: 10 20 10 5 5 30
|
||||||
|
|||||||
@@ -9,13 +9,14 @@
|
|||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| max_planes | `EOS_PVFORECAST__MAX_PLANES` | `Optional[int]` | `rw` | `0` | Maximum number of planes that can be set |
|
| max_planes | `EOS_PVFORECAST__MAX_PLANES` | `Optional[int]` | `rw` | `0` | Maximum number of planes that can be set |
|
||||||
| planes | `EOS_PVFORECAST__PLANES` | `Optional[list[akkudoktoreos.prediction.pvforecast.PVForecastPlaneSetting]]` | `rw` | `None` | Plane configuration. |
|
| planes | `EOS_PVFORECAST__PLANES` | `Optional[list[akkudoktoreos.prediction.pvforecast.PVForecastPlaneSetting]]` | `rw` | `None` | Plane configuration. |
|
||||||
| planes_azimuth | | `List[float]` | `ro` | `N/A` | None |
|
| planes_azimuth | | `List[float]` | `ro` | `N/A` | Compute a list of the azimuths per active planes. |
|
||||||
| planes_inverter_paco | | `Any` | `ro` | `N/A` | None |
|
| planes_inverter_paco | | `Any` | `ro` | `N/A` | Compute a list of the maximum power rating of the inverter per active planes. |
|
||||||
| planes_peakpower | | `List[float]` | `ro` | `N/A` | None |
|
| planes_peakpower | | `List[float]` | `ro` | `N/A` | Compute a list of the peak power per active planes. |
|
||||||
| planes_tilt | | `List[float]` | `ro` | `N/A` | None |
|
| planes_tilt | | `List[float]` | `ro` | `N/A` | Compute a list of the tilts per active planes. |
|
||||||
| planes_userhorizon | | `Any` | `ro` | `N/A` | None |
|
| planes_userhorizon | | `Any` | `ro` | `N/A` | Compute a list of the user horizon per active planes. |
|
||||||
| provider | `EOS_PVFORECAST__PROVIDER` | `Optional[str]` | `rw` | `None` | PVForecast provider id of provider to be used. |
|
| provider | `EOS_PVFORECAST__PROVIDER` | `Optional[str]` | `rw` | `None` | PVForecast provider id of provider to be used. |
|
||||||
| provider_settings | `EOS_PVFORECAST__PROVIDER_SETTINGS` | `PVForecastCommonProviderSettings` | `rw` | `required` | Provider settings |
|
| provider_settings | `EOS_PVFORECAST__PROVIDER_SETTINGS` | `PVForecastCommonProviderSettings` | `rw` | `required` | Provider settings |
|
||||||
|
| providers | | `list[str]` | `ro` | `N/A` | Available PVForecast provider ids. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
@@ -144,6 +145,11 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"max_planes": 1,
|
"max_planes": 1,
|
||||||
|
"providers": [
|
||||||
|
"PVForecastAkkudoktor",
|
||||||
|
"PVForecastVrm",
|
||||||
|
"PVForecastImport"
|
||||||
|
],
|
||||||
"planes_peakpower": [
|
"planes_peakpower": [
|
||||||
5.0,
|
5.0,
|
||||||
3.5
|
3.5
|
||||||
@@ -177,7 +183,7 @@
|
|||||||
```
|
```
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
### Common settings for VRM API
|
### Common settings for PV forecast VRM API
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
:::{table} pvforecast::provider_settings::PVForecastVrm
|
:::{table} pvforecast::provider_settings::PVForecastVrm
|
||||||
|
|||||||
@@ -9,11 +9,12 @@
|
|||||||
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
| ---- | -------------------- | ---- | --------- | ------- | ----------- |
|
||||||
| provider | `EOS_WEATHER__PROVIDER` | `Optional[str]` | `rw` | `None` | Weather provider id of provider to be used. |
|
| provider | `EOS_WEATHER__PROVIDER` | `Optional[str]` | `rw` | `None` | Weather provider id of provider to be used. |
|
||||||
| provider_settings | `EOS_WEATHER__PROVIDER_SETTINGS` | `WeatherCommonProviderSettings` | `rw` | `required` | Provider settings |
|
| provider_settings | `EOS_WEATHER__PROVIDER_SETTINGS` | `WeatherCommonProviderSettings` | `rw` | `required` | Provider settings |
|
||||||
|
| providers | | `list[str]` | `ro` | `N/A` | Available weather provider ids. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
<!-- pyml disable no-emphasis-as-heading -->
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
**Example Input/Output**
|
**Example Input**
|
||||||
<!-- pyml enable no-emphasis-as-heading -->
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
@@ -29,6 +30,28 @@
|
|||||||
```
|
```
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
|
<!-- pyml disable no-emphasis-as-heading -->
|
||||||
|
**Example Output**
|
||||||
|
<!-- pyml enable no-emphasis-as-heading -->
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"weather": {
|
||||||
|
"provider": "WeatherImport",
|
||||||
|
"provider_settings": {
|
||||||
|
"WeatherImport": null
|
||||||
|
},
|
||||||
|
"providers": [
|
||||||
|
"BrightSky",
|
||||||
|
"ClearOutside",
|
||||||
|
"WeatherImport"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
### Common settings for weather data import from file or JSON string
|
### Common settings for weather data import from file or JSON string
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Akkudoktor-EOS
|
# Akkudoktor-EOS
|
||||||
|
|
||||||
**Version**: `v0.2.0+dev.4dbc2d`
|
**Version**: `v0.2.0.dev70048701`
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
**Description**: This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.
|
**Description**: This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.
|
||||||
|
|||||||
BIN
docs/_static/azimuth.gif
vendored
Normal file
BIN
docs/_static/azimuth.gif
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 28 KiB |
BIN
docs/_static/horizon_eyefish_en.png
vendored
Normal file
BIN
docs/_static/horizon_eyefish_en.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 406 KiB |
BIN
docs/_static/slope.gif
vendored
Normal file
BIN
docs/_static/slope.gif
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 24 KiB |
23
docs/akkudoktoreos/adapter.md
Normal file
23
docs/akkudoktoreos/adapter.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
|
(adapter-page)=
|
||||||
|
|
||||||
|
# Adapter
|
||||||
|
|
||||||
|
Adapters provide simplyfied integrations for home energy management systems. Besides
|
||||||
|
the standard REST interface of EOS, the adapters extend EOS by specific integration
|
||||||
|
interfaces for Home Assistant and NodeRED.
|
||||||
|
|
||||||
|
:::{admonition} Warning
|
||||||
|
:class: warning
|
||||||
|
Adapter execution is part of the energy management run. The adapters are only working
|
||||||
|
properly if cyclic energy management runs are configured.
|
||||||
|
:::
|
||||||
|
|
||||||
|
```{toctree}
|
||||||
|
:maxdepth: 2
|
||||||
|
:caption: Adapters
|
||||||
|
|
||||||
|
adapter/adapterhomeassistant.md
|
||||||
|
adapter/adapternodered.md
|
||||||
|
|
||||||
|
```
|
||||||
126
docs/akkudoktoreos/adapter/adapterhomeassistant.md
Normal file
126
docs/akkudoktoreos/adapter/adapterhomeassistant.md
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
|
(adapter-homeassistant-page)=
|
||||||
|
|
||||||
|
# Home Assistant Adapter
|
||||||
|
|
||||||
|
The Home Assistant adapter provides a bidirectional interface between
|
||||||
|
**Home Assistant (HA)** and the **Akkudoktor-EOS (EOS)** energy optimisation system.
|
||||||
|
|
||||||
|
It allows EOS to:
|
||||||
|
|
||||||
|
- **Read** entity states and attributes from Home Assistant
|
||||||
|
- **Write** optimisation results and control instructions back to Home Assistant
|
||||||
|
|
||||||
|
This enables EOS to integrate seamlessly with Home Assistant–managed devices,
|
||||||
|
sensors, and energy meters, while keeping EOS device simulations and optimisation
|
||||||
|
logic decoupled from HA-specific implementations.
|
||||||
|
|
||||||
|
## Configuration entity IDs
|
||||||
|
|
||||||
|
EOS can synchronise parts of its configuration from Home Assistant entity states.
|
||||||
|
This is particularly useful for **device (resource) parameters** that are already
|
||||||
|
provided by Home Assistant integrations, such as:
|
||||||
|
|
||||||
|
- Battery capacity
|
||||||
|
- Maximum charge or discharge power
|
||||||
|
- Nominal device ratings
|
||||||
|
|
||||||
|
These configuration values are typically consumed by EOS **device simulations**
|
||||||
|
during optimisation.
|
||||||
|
|
||||||
|
### Entity state conversion rules
|
||||||
|
|
||||||
|
When reading configuration values from entity states, the adapter applies the
|
||||||
|
following heuristics to convert the HA state into a suitable EOS value:
|
||||||
|
|
||||||
|
- **Boolean `True`**: `["y", "yes", "on", "true", "home", "open"]`
|
||||||
|
- **Boolean `False`**: `["n", "no", "off", "false", "closed"]`
|
||||||
|
- **`None`**: `["unavailable", "none"]`
|
||||||
|
- **`float`**: if the value can be converted to a floating-point number
|
||||||
|
- **`str`**: if none of the above apply
|
||||||
|
|
||||||
|
## Device instruction entity IDs
|
||||||
|
|
||||||
|
After each energy optimisation run, EOS produces **device instructions** for the
|
||||||
|
controlled resources. These instructions are written back to Home Assistant via
|
||||||
|
dedicated entities.
|
||||||
|
|
||||||
|
- The **entity state** represents the selected **operation mode** of the device.
|
||||||
|
- **Entity attributes** provide additional parameters for the operation mode, such as:
|
||||||
|
|
||||||
|
- `operation_mode_factor`
|
||||||
|
- Power or rate limits
|
||||||
|
- Mode-specific control parameters
|
||||||
|
|
||||||
|
Home Assistant automations or device integrations can then react to these entity
|
||||||
|
updates to perform the actual control actions.
|
||||||
|
|
||||||
|
## Device measurement entity IDs
|
||||||
|
|
||||||
|
Before starting an energy optimisation run, EOS retrieves **measurement values**
|
||||||
|
from Home Assistant that describe the *current state* of devices.
|
||||||
|
|
||||||
|
Typical examples include:
|
||||||
|
|
||||||
|
- Battery state of charge (SoC)
|
||||||
|
- Current power or energy levels
|
||||||
|
- Device availability or readiness indicators
|
||||||
|
|
||||||
|
These measurements are used as input for EOS **device simulations** and strongly
|
||||||
|
influence optimisation results.
|
||||||
|
|
||||||
|
## Load EMR entity IDs
|
||||||
|
|
||||||
|
Load **Energy Meter Readings (EMR)** are used to adapt and refine the **load
|
||||||
|
prediction**.
|
||||||
|
|
||||||
|
EOS retrieves these readings from Home Assistant **before** each energy management
|
||||||
|
run to align forecasts with actual consumption.
|
||||||
|
|
||||||
|
## PV production EMR entity IDs
|
||||||
|
|
||||||
|
PV production **Energy Meter Readings (EMR)** are used to adapt and refine the
|
||||||
|
**photovoltaic generation forecast**.
|
||||||
|
|
||||||
|
EOS retrieves these readings from Home Assistant **before** each optimisation run
|
||||||
|
to improve forecast accuracy based on real production data.
|
||||||
|
|
||||||
|
## Solution entity IDs
|
||||||
|
|
||||||
|
Each energy management run produces an **optimisation solution**.
|
||||||
|
|
||||||
|
In addition to device-level instructions, EOS can publish solution-level details to
|
||||||
|
dedicated Home Assistant entities. These entities are useful for:
|
||||||
|
|
||||||
|
- Debugging and validation
|
||||||
|
- Visualisation and dashboards
|
||||||
|
- Gaining deeper insight into optimisation decisions
|
||||||
|
|
||||||
|
EOS updates these entities **after** each energy management run.
|
||||||
|
|
||||||
|
## Entity state and value conversion
|
||||||
|
|
||||||
|
To adapt, scale, or transform Home Assistant entity values to match EOS
|
||||||
|
expectations, it is recommended to use
|
||||||
|
[template sensors](https://www.home-assistant.io/integrations/template/#sensor).
|
||||||
|
|
||||||
|
This allows value conversion to remain fully within Home Assistant, keeping the EOS
|
||||||
|
configuration clean and consistent.
|
||||||
|
|
||||||
|
### Example: Battery SoC conversion
|
||||||
|
|
||||||
|
Convert a battery state of charge from percentage `[0..100]` to a normalised factor
|
||||||
|
`[0.0..1.0]`:
|
||||||
|
|
||||||
|
<!-- pyml disable line-length -->
|
||||||
|
```yaml
|
||||||
|
template:
|
||||||
|
- sensor:
|
||||||
|
- name: "Battery1 SoC Factor"
|
||||||
|
unique_id: "battery1_soc_factor"
|
||||||
|
state: >
|
||||||
|
{% set bat_charge_soc = states('sensor.battery1_soc_percent') | float(100) -%}
|
||||||
|
{{ bat_charge_soc / 100.0 }}
|
||||||
|
state_class: measurement
|
||||||
|
```
|
||||||
|
<!-- pyml enable line-length -->
|
||||||
4
docs/akkudoktoreos/adapter/adapternodered.md
Normal file
4
docs/akkudoktoreos/adapter/adapternodered.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
|
(adapter-nodered-page)=
|
||||||
|
|
||||||
|
# NodeRED Adapter
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
(configuration-page)=
|
(configuration-page)=
|
||||||
|
|
||||||
# Configuration Guideline
|
# Configuration Guide
|
||||||
|
|
||||||
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy
|
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy
|
||||||
management.
|
management.
|
||||||
|
|||||||
@@ -36,7 +36,8 @@ Through an iterative process of selection, crossover, and mutation, the algorith
|
|||||||
more effective solutions. The final result is an optimized control strategy that balances multiple
|
more effective solutions. The final result is an optimized control strategy that balances multiple
|
||||||
system goals within the constraints of the input data and configuration.
|
system goals within the constraints of the input data and configuration.
|
||||||
|
|
||||||
:::{note}
|
:::{admonition} Note
|
||||||
|
:class: note
|
||||||
You don’t need to understand the internal workings of the genetic algorithm to benefit from
|
You don’t need to understand the internal workings of the genetic algorithm to benefit from
|
||||||
automatic optimization. EOS handles everything behind the scenes based on your configuration.
|
automatic optimization. EOS handles everything behind the scenes based on your configuration.
|
||||||
However, advanced users can fine-tune the optimization behavior using additional settings like
|
However, advanced users can fine-tune the optimization behavior using additional settings like
|
||||||
|
|||||||
@@ -51,13 +51,16 @@ A dictionary with the following structure:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"start_datetime": "2024-01-01 00:00:00",
|
"start_datetime": "2024-01-01 00:00:00",
|
||||||
"interval": "1 Hour",
|
"interval": "1 hour",
|
||||||
"<prediction key>": [value, value, ...],
|
"<prediction key>": [value, value, ...],
|
||||||
"<prediction key>": [value, value, ...],
|
"<prediction key>": [value, value, ...],
|
||||||
...
|
...
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If `start_datetime` is not provided EOS defaults to the `start_datetime` of the current energy
|
||||||
|
management run. If `interval` is not provided EOS defaults to one hour.
|
||||||
|
|
||||||
#### 2. DateTimeDataFrame
|
#### 2. DateTimeDataFrame
|
||||||
|
|
||||||
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) dataframe with a
|
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) dataframe with a
|
||||||
@@ -402,10 +405,11 @@ represent equal angular distance around the horizon. For instance, if you have 3
|
|||||||
point is due north, the next is 10 degrees east of north, and so on, until the last point, 10
|
point is due north, the next is 10 degrees east of north, and so on, until the last point, 10
|
||||||
degrees west of north.
|
degrees west of north.
|
||||||
|
|
||||||
---
|

|
||||||
|
|
||||||
Most of the configuration options are in line with the
|
Most of the configuration options are in line with the
|
||||||
[PVLib](https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html) definition for PVGIS data.
|
[PVLib](https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html) definition
|
||||||
|
for PVGIS data.
|
||||||
|
|
||||||
Detailed definitions from **PVLib** for PVGIS data.
|
Detailed definitions from **PVLib** for PVGIS data.
|
||||||
|
|
||||||
@@ -413,12 +417,14 @@ Detailed definitions from **PVLib** for PVGIS data.
|
|||||||
|
|
||||||
Tilt angle from horizontal plane.
|
Tilt angle from horizontal plane.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
- `surface_azimuth`
|
- `surface_azimuth`
|
||||||
|
|
||||||
Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180,
|
Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180,
|
||||||
west=270). This is offset 180 degrees from the convention used by PVGIS.
|
west=270). This is offset 180 degrees from the convention used by PVGIS.
|
||||||
|
|
||||||
---
|

|
||||||
|
|
||||||
### PVForecastAkkudoktor Provider
|
### PVForecastAkkudoktor Provider
|
||||||
|
|
||||||
|
|||||||
@@ -253,6 +253,6 @@ the home appliance can be operated in two operation modes:
|
|||||||
|-----------------------|-------------------------------------------------------------------------|
|
|-----------------------|-------------------------------------------------------------------------|
|
||||||
| **RUN** | The home appliance is started and runs until the end of it's power |
|
| **RUN** | The home appliance is started and runs until the end of it's power |
|
||||||
| | sequence. |
|
| | sequence. |
|
||||||
| **IDLE** | The home appliance does not run. |
|
| **OFF** | The home appliance does not run. |
|
||||||
|
|
||||||
The **operation mode factor** (0.0–1.0) is ignored.
|
The **operation mode factor** (0.0–1.0) is ignored.
|
||||||
|
|||||||
@@ -328,8 +328,11 @@ For detailed Docker instructions, refer to [Installation Guideline](install-page
|
|||||||
|
|
||||||
#### Step 4.1 - Create a development branch
|
#### Step 4.1 - Create a development branch
|
||||||
|
|
||||||
|
Create a local development branch and make it know on your GitHub repo.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git checkout -b <MY_DEVELOPMENT_BRANCH>
|
git checkout -b <MY_DEVELOPMENT_BRANCH>
|
||||||
|
git push --set-upstream origin <MY_DEVELOPMENT_BRANCH>
|
||||||
```
|
```
|
||||||
|
|
||||||
Replace `<MY_DEVELOPMENT_BRANCH>` with the development branch name. The branch name shall be of the
|
Replace `<MY_DEVELOPMENT_BRANCH>` with the development branch name. The branch name shall be of the
|
||||||
@@ -591,6 +594,10 @@ Ensure your changes do not break existing functionality:
|
|||||||
|
|
||||||
Keep your code consistent with existing style and conventions.
|
Keep your code consistent with existing style and conventions.
|
||||||
|
|
||||||
|
#### Keep Python Docstrings RST Compatible
|
||||||
|
|
||||||
|
The docstrings will be parsed by Sphinx in automatic documentation generation.
|
||||||
|
|
||||||
### Use Issues for Discussion
|
### Use Issues for Discussion
|
||||||
|
|
||||||
Before making major changes, open an issue or discuss with maintainers.
|
Before making major changes, open an issue or discuss with maintainers.
|
||||||
@@ -598,3 +605,100 @@ Before making major changes, open an issue or discuss with maintainers.
|
|||||||
### Document Changes
|
### Document Changes
|
||||||
|
|
||||||
Update docstrings, comments, and any relevant documentation.
|
Update docstrings, comments, and any relevant documentation.
|
||||||
|
|
||||||
|
### Start or Reopen the Home Assistant Dev Container in VS Code
|
||||||
|
|
||||||
|
### 1. Open Visual Studio Code
|
||||||
|
|
||||||
|
Start Visual Studio Code.
|
||||||
|
|
||||||
|
### 2. Open the Command Palette
|
||||||
|
|
||||||
|
Open the Command Palette:
|
||||||
|
|
||||||
|
- **Windows / Linux:** `Ctrl + Shift + P`
|
||||||
|
- **macOS:** `Cmd + Shift + P`
|
||||||
|
|
||||||
|
### 3. Reopen the Workspace in the Dev Container
|
||||||
|
|
||||||
|
In the Command Palette, select:
|
||||||
|
|
||||||
|
```text
|
||||||
|
Dev Containers: Reopen in Container
|
||||||
|
```
|
||||||
|
|
||||||
|
VS Code will:
|
||||||
|
|
||||||
|
- Build the dev container (if required)
|
||||||
|
- Start the container
|
||||||
|
- Reopen the workspace inside the container
|
||||||
|
|
||||||
|
### 4. Start Home Assistant
|
||||||
|
|
||||||
|
Open the Command Palette again and select:
|
||||||
|
|
||||||
|
```text
|
||||||
|
Dev Terminal: Run Task... → Start Home Assistant
|
||||||
|
```
|
||||||
|
|
||||||
|
:::{admonition} Note
|
||||||
|
:class: note
|
||||||
|
Startup may take several minutes while the Home Assistant Supervisor initializes.
|
||||||
|
:::
|
||||||
|
|
||||||
|
If startup fails you may retry with container rebuild before:
|
||||||
|
|
||||||
|
```text
|
||||||
|
Dev Containers: Rebuild Container without Cache
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Open Home Assistant
|
||||||
|
|
||||||
|
Once startup is complete, open your browser and navigate to:
|
||||||
|
|
||||||
|
```text
|
||||||
|
http://localhost:7123/
|
||||||
|
```
|
||||||
|
|
||||||
|
If this is your first start, complete the standard Home Assistant onboarding process.
|
||||||
|
|
||||||
|
### 6. Install the Local Akkudoktor-EOS Add-on
|
||||||
|
|
||||||
|
#### 6.1 Open the Add-on Store
|
||||||
|
|
||||||
|
In Home Assistant, navigate to:
|
||||||
|
|
||||||
|
```text
|
||||||
|
Settings → Add-ons → Add-on Store
|
||||||
|
```
|
||||||
|
|
||||||
|
Open the top-right menu (⋮), then select:
|
||||||
|
|
||||||
|
```text
|
||||||
|
Repositories → Local add-ons
|
||||||
|
```
|
||||||
|
|
||||||
|
Choose **Akkudoktor-EOS**.
|
||||||
|
|
||||||
|
#### 6.2 Install the Add-on
|
||||||
|
|
||||||
|
The Akkudoktor-EOS add-on is automatically available.
|
||||||
|
Click **Install** to begin installation.
|
||||||
|
|
||||||
|
#### 6.3 Start the Add-on
|
||||||
|
|
||||||
|
After installation completes, click **Start** in the add-on panel.
|
||||||
|
|
||||||
|
#### 6.4 Open the EOS Web Interface
|
||||||
|
|
||||||
|
In the add-on panel, click **Open Web UI** to access the EOS dashboard.
|
||||||
|
|
||||||
|
#### 6.5 Configure EOS (Optional)
|
||||||
|
|
||||||
|
In the EOS dashboard, navigate to:
|
||||||
|
|
||||||
|
```text
|
||||||
|
Config
|
||||||
|
```
|
||||||
|
|
||||||
|
to adjust configuration settings as needed.
|
||||||
|
|||||||
@@ -3,12 +3,13 @@
|
|||||||
|
|
||||||
# Installation Guide
|
# Installation Guide
|
||||||
|
|
||||||
This guide provides different methods to install AkkudoktorEOS:
|
This guide provides different methods to install Akkudoktor-EOS:
|
||||||
|
|
||||||
- Installation from Source (GitHub) (M1)
|
- Installation from Source (GitHub) (M1)
|
||||||
- Installation from Release Package (GitHub) (M2)
|
- Installation from Release Package (GitHub) (M2)
|
||||||
- Installation with Docker (DockerHub) (M3)
|
- Installation with Docker (DockerHub) (M3)
|
||||||
- Installation with Docker (docker-compose) (M4)
|
- Installation with Docker (docker-compose) (M4)
|
||||||
|
- Installation in Home Assistant (M5)
|
||||||
|
|
||||||
Choose the method that best suits your needs.
|
Choose the method that best suits your needs.
|
||||||
|
|
||||||
@@ -22,20 +23,34 @@ release see the [Revert Guideline](revert-page).
|
|||||||
|
|
||||||
Before installing, ensure you have the following:
|
Before installing, ensure you have the following:
|
||||||
|
|
||||||
### For Source / Release Installation
|
### For Source / Release Installation (M1/M2)
|
||||||
|
|
||||||
- Python 3.10 or higher
|
- Python 3.10 or higher
|
||||||
- pip
|
- pip
|
||||||
- Git (only for source)
|
- Git (only for source)
|
||||||
- Tar/Zip (for release package)
|
- Tar/Zip (for release package)
|
||||||
|
|
||||||
### For Docker Installation
|
### For Docker Installation (M3/M4)
|
||||||
|
|
||||||
- Docker Engine 20.10 or higher
|
- Docker Engine 20.10 or higher
|
||||||
- Docker Compose (optional, recommended)
|
- Docker Compose (optional, recommended)
|
||||||
|
|
||||||
|
:::{admonition} Tip
|
||||||
|
:class: Note
|
||||||
See [Install Docker Engine](https://docs.docker.com/engine/install/) on how to install docker on
|
See [Install Docker Engine](https://docs.docker.com/engine/install/) on how to install docker on
|
||||||
your Linux distro.
|
your Linux distro.
|
||||||
|
:::
|
||||||
|
|
||||||
|
### For Installation in Home Assistant (M5)
|
||||||
|
|
||||||
|
- [Home Assistant Operating System](https://www.home-assistant.io/installation/)
|
||||||
|
|
||||||
|
:::{admonition} Warning
|
||||||
|
:class: Warning
|
||||||
|
Akkudoktor-EOS is a [Home Assistant add-on](https://www.home-assistant.io/addons/).
|
||||||
|
[Home Assistant Container](https://www.home-assistant.io/installation/) installations don’t
|
||||||
|
have access to add-ons.
|
||||||
|
:::
|
||||||
|
|
||||||
## Installation from Source (GitHub) (M1)
|
## Installation from Source (GitHub) (M1)
|
||||||
|
|
||||||
@@ -214,7 +229,12 @@ should be available at [http://localhost:8504](http://localhost:8504).
|
|||||||
|
|
||||||
### 4) Configure EOS (M3)
|
### 4) Configure EOS (M3)
|
||||||
|
|
||||||
Use EOSdash at [http://localhost:8504](http://localhost:8504) to configure EOS.
|
Use EOSdash at [http://localhost:8504](http://localhost:8504) to configure EOS. In the dashboard,
|
||||||
|
go to:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
Config
|
||||||
|
```
|
||||||
|
|
||||||
## Installation with Docker (docker-compose) (M4)
|
## Installation with Docker (docker-compose) (M4)
|
||||||
|
|
||||||
@@ -251,37 +271,85 @@ docker logs akkudoktoreos
|
|||||||
EOS should now be accessible at [http://localhost:8503/docs](http://localhost:8503/docs) and EOSdash
|
EOS should now be accessible at [http://localhost:8503/docs](http://localhost:8503/docs) and EOSdash
|
||||||
should be available at [http://localhost:8504](http://localhost:8504).
|
should be available at [http://localhost:8504](http://localhost:8504).
|
||||||
|
|
||||||
### 4) Configure EOS
|
The configuration file is in `${HOME}/.local/share/net.akkudoktor.eos/config/EOS.config.json`.
|
||||||
|
|
||||||
Use EOSdash at [http://localhost:8504](http://localhost:8504) to configure EOS.
|
### 4) Configure EOS (M4)
|
||||||
|
|
||||||
|
Use EOSdash at [http://localhost:8504](http://localhost:8504) to configure EOS. In the dashboard,
|
||||||
|
go to:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
Config
|
||||||
|
```
|
||||||
|
|
||||||
|
You may edit the configuration file directly at
|
||||||
|
`${HOME}/.local/share/net.akkudoktor.eos/config/EOS.config.json`.
|
||||||
|
|
||||||
|
## Installation in Home Assistant (M5)
|
||||||
|
|
||||||
|
[](https://my.home-assistant.io/redirect/supervisor_add_addon_repository/?repository_url=https%3A%2F%2Fgithub.com%2FAkkudoktor-EOS%2FEOS)
|
||||||
|
|
||||||
|
### 1) Add the repository URL (M5)
|
||||||
|
|
||||||
|
In Home Assistant, go to:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
Settings → Add-ons → Add-on Store → ⋮ (top-right menu) → Repositories
|
||||||
|
```
|
||||||
|
|
||||||
|
and enter the URL of this Git repository:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
https://github.com/Akkudoktor-EOS/EOS
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2) Install the add-on (M5)
|
||||||
|
|
||||||
|
After adding the repository, the add-on will appear in the Add-on Store. Click `Install`.
|
||||||
|
|
||||||
|
### 3) Start the add-on (M5)
|
||||||
|
|
||||||
|
Once installed, click `Start` in the add-on panel.
|
||||||
|
|
||||||
|
### 4) Access the dashboard (M5)
|
||||||
|
|
||||||
|
Click `Open Web UI` in the add-on panel.
|
||||||
|
|
||||||
|
### 5) Configure EOS (M5)
|
||||||
|
|
||||||
|
In the dashboard, go to:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
Config
|
||||||
|
```
|
||||||
|
|
||||||
## Helpful Docker Commands
|
## Helpful Docker Commands
|
||||||
|
|
||||||
**View logs:**
|
### View logs
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker logs -f akkudoktoreos
|
docker logs -f akkudoktoreos
|
||||||
```
|
```
|
||||||
|
|
||||||
**Stop the container:**
|
### Stop the container
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker stop akkudoktoreos
|
docker stop akkudoktoreos
|
||||||
```
|
```
|
||||||
|
|
||||||
**Start the container:**
|
### Start the container
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker start akkudoktoreos
|
docker start akkudoktoreos
|
||||||
```
|
```
|
||||||
|
|
||||||
**Remove the container:**
|
### Remove the container
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker rm -f akkudoktoreos
|
docker rm -f akkudoktoreos
|
||||||
```
|
```
|
||||||
|
|
||||||
**Update to latest version:**
|
### Update to latest version
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker pull Akkudoktor-EOS/EOS:latest
|
docker pull Akkudoktor-EOS/EOS:latest
|
||||||
@@ -289,3 +357,29 @@ docker stop akkudoktoreos
|
|||||||
docker rm akkudoktoreos
|
docker rm akkudoktoreos
|
||||||
# Then run the container again with the run command
|
# Then run the container again with the run command
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Solve docker DNS not working
|
||||||
|
|
||||||
|
Switch Docker to use the real resolv.conf, not the stub.
|
||||||
|
|
||||||
|
1️⃣ Replace /etc/resolv.conf symlink
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo ln -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
This file contains the actual upstream DNS servers (e.g. your Fritz!Box).
|
||||||
|
|
||||||
|
2️⃣ Restart Docker
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo systemctl restart docker
|
||||||
|
```
|
||||||
|
|
||||||
|
3️⃣ Verify
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run --rm busybox nslookup registry-1.docker.io
|
||||||
|
```
|
||||||
|
|
||||||
|
You should now see a valid IP address.
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ and how to set a **development version** after the release.
|
|||||||
|
|
||||||
| Step | Actor | Action |
|
| Step | Actor | Action |
|
||||||
|------|-------------|--------|
|
|------|-------------|--------|
|
||||||
| 1 | Contributor | Prepare a release branch **in your fork** using Commitizen |
|
| 1 | Contributor | Prepare a release branch **in your fork** |
|
||||||
| 2 | Contributor | Open a **Pull Request to upstream** (`Akkudoktor-EOS/EOS`) |
|
| 2 | Contributor | Open a **Pull Request to upstream** (`Akkudoktor-EOS/EOS`) |
|
||||||
| 3 | Maintainer | Review and **merge the release PR** |
|
| 3 | Maintainer | Review and **merge the release PR** |
|
||||||
| 4 | CI | Create the **GitHub Release and tag** |
|
| 4 | CI | Create the **GitHub Release and tag** |
|
||||||
@@ -48,7 +48,7 @@ __version__ = 0.3.0
|
|||||||
|
|
||||||
Prepare version by updating versioned files, e.g.:
|
Prepare version by updating versioned files, e.g.:
|
||||||
|
|
||||||
- haaddon/config.yaml
|
- config.yaml
|
||||||
|
|
||||||
and the generated documentation:
|
and the generated documentation:
|
||||||
|
|
||||||
@@ -132,7 +132,7 @@ See `.github/workflwows/bump-version.yml`for details.
|
|||||||
|
|
||||||
### 5️⃣ CI: Prepare the Development Version Marker
|
### 5️⃣ CI: Prepare the Development Version Marker
|
||||||
|
|
||||||
The development version marker will automatically be set by the GitHub CI action.
|
The development version marker `.dev` will automatically be set by the GitHub CI action.
|
||||||
|
|
||||||
See `.github/workflwows/bump-version.yml`for details.
|
See `.github/workflwows/bump-version.yml`for details.
|
||||||
|
|
||||||
|
|||||||
109
docs/develop/update.md
Normal file
109
docs/develop/update.md
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
|
(update-page)=
|
||||||
|
|
||||||
|
# Update Guide
|
||||||
|
|
||||||
|
This guide explains how to update AkkudoktorEOS to a newer version.
|
||||||
|
|
||||||
|
- Updating from Source (M1)
|
||||||
|
- Updating from Release Package (M2)
|
||||||
|
- Updating Docker Installation (M3)
|
||||||
|
- Updating Docker Compose Installation (M4)
|
||||||
|
- Updating Home Assistant Add-on Installation (M5)
|
||||||
|
|
||||||
|
Choose the section based on how you originally [installed EOS](install-page).
|
||||||
|
|
||||||
|
:::{admonition} Tip
|
||||||
|
:class: Note
|
||||||
|
If you need to revert instead, see the see the [Revert Guideline](revert-page).
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Updating from Source (M1)
|
||||||
|
|
||||||
|
```{eval-rst}
|
||||||
|
.. tabs::
|
||||||
|
|
||||||
|
.. tab:: Windows
|
||||||
|
|
||||||
|
.. code-block:: powershell
|
||||||
|
|
||||||
|
git pull origin main
|
||||||
|
.venv\Scripts\pip install -r requirements.txt --upgrade
|
||||||
|
|
||||||
|
.. tab:: Linux
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git pull origin main
|
||||||
|
.venv/bin/pip install -r requirements.txt --upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
Restart EOS normally.
|
||||||
|
|
||||||
|
## Updating from Release Package (M2)
|
||||||
|
|
||||||
|
1. Download new release
|
||||||
|
2. Extract to a new directory
|
||||||
|
3. Recreate virtual environment & reinstall dependencies
|
||||||
|
4. Optionally remove previous directory
|
||||||
|
|
||||||
|
Follow steps from [Installation from Release Package (GitHub) (M2)](install-page).
|
||||||
|
|
||||||
|
## Updating Docker Installation (M3)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker pull akkudoktor/eos:latest
|
||||||
|
docker stop akkudoktoreos
|
||||||
|
docker rm akkudoktoreos
|
||||||
|
```
|
||||||
|
|
||||||
|
Then start the container again using your normal `docker run` command.
|
||||||
|
|
||||||
|
## Updating Docker Compose Installation (M4)
|
||||||
|
|
||||||
|
1. Stop & remove existing container
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker stop akkudoktoreos
|
||||||
|
docker rm akkudoktoreos
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Update source (if using source checkout) — see M1 or M2
|
||||||
|
3. Rebuild & start
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose up --build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verify Docker Update (M3/M4)
|
||||||
|
|
||||||
|
Check logs:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker logs akkudoktoreos
|
||||||
|
```
|
||||||
|
|
||||||
|
Then visit:
|
||||||
|
|
||||||
|
- API: [http://localhost:8503/docs](http://localhost:8503/docs)
|
||||||
|
- UI: [http://localhost:8504](http://localhost:8504)
|
||||||
|
|
||||||
|
## Updating Home Assistant Add-on Installation (M5)
|
||||||
|
|
||||||
|
1. Open 'Home Assistant' and navigate to 'Settings → Add-ons'.
|
||||||
|
2. Select the 'Akkudoktor-EOS' add-on from your installed add-ons.
|
||||||
|
3. If an update is available, click 'Update'.
|
||||||
|
4. Wait for the update process to finish, then restart the add-on if prompted.
|
||||||
|
|
||||||
|
If you installed Akkudoktor-EOS from a custom repository and no update appears, open the Add-on
|
||||||
|
Store, click the '⋮' menu in the top right, and choose 'Reload' to refresh the repository.
|
||||||
|
|
||||||
|
## Backup Recommendation
|
||||||
|
|
||||||
|
Before updating, back up your config:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
EOS.config.json
|
||||||
|
```
|
||||||
|
|
||||||
|
EOS also maintains internal configuration backups.
|
||||||
@@ -28,6 +28,7 @@ develop/getting_started.md
|
|||||||
|
|
||||||
develop/CONTRIBUTING.md
|
develop/CONTRIBUTING.md
|
||||||
develop/install.md
|
develop/install.md
|
||||||
|
akkudoktoreos/configuration.md
|
||||||
develop/update.md
|
develop/update.md
|
||||||
develop/revert.md
|
develop/revert.md
|
||||||
|
|
||||||
|
|||||||
728
openapi.json
728
openapi.json
@@ -3,7 +3,7 @@
|
|||||||
"info": {
|
"info": {
|
||||||
"title": "Akkudoktor-EOS",
|
"title": "Akkudoktor-EOS",
|
||||||
"description": "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.",
|
"description": "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.",
|
||||||
"version": "v0.2.0+dev.4dbc2d"
|
"version": "v0.2.0.dev70048701"
|
||||||
},
|
},
|
||||||
"paths": {
|
"paths": {
|
||||||
"/v1/admin/cache/clear": {
|
"/v1/admin/cache/clear": {
|
||||||
@@ -176,6 +176,9 @@
|
|||||||
},
|
},
|
||||||
"/v1/health": {
|
"/v1/health": {
|
||||||
"get": {
|
"get": {
|
||||||
|
"tags": [
|
||||||
|
"health"
|
||||||
|
],
|
||||||
"summary": "Fastapi Health Get",
|
"summary": "Fastapi Health Get",
|
||||||
"description": "Health check endpoint to verify that the EOS server is alive.",
|
"description": "Health check endpoint to verify that the EOS server is alive.",
|
||||||
"operationId": "fastapi_health_get_v1_health_get",
|
"operationId": "fastapi_health_get_v1_health_get",
|
||||||
@@ -2020,6 +2023,96 @@
|
|||||||
},
|
},
|
||||||
"components": {
|
"components": {
|
||||||
"schemas": {
|
"schemas": {
|
||||||
|
"AdapterCommonSettings-Input": {
|
||||||
|
"properties": {
|
||||||
|
"provider": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Provider",
|
||||||
|
"description": "List of adapter provider id(s) of provider(s) to be used.",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"HomeAssistant"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"HomeAssistant",
|
||||||
|
"NodeRED"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"homeassistant": {
|
||||||
|
"$ref": "#/components/schemas/HomeAssistantAdapterCommonSettings-Input",
|
||||||
|
"description": "Home Assistant adapter settings."
|
||||||
|
},
|
||||||
|
"nodered": {
|
||||||
|
"$ref": "#/components/schemas/NodeREDAdapterCommonSettings",
|
||||||
|
"description": "NodeRED adapter settings."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"title": "AdapterCommonSettings",
|
||||||
|
"description": "Adapter Configuration."
|
||||||
|
},
|
||||||
|
"AdapterCommonSettings-Output": {
|
||||||
|
"properties": {
|
||||||
|
"provider": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Provider",
|
||||||
|
"description": "List of adapter provider id(s) of provider(s) to be used.",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"HomeAssistant"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"HomeAssistant",
|
||||||
|
"NodeRED"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"homeassistant": {
|
||||||
|
"$ref": "#/components/schemas/HomeAssistantAdapterCommonSettings-Output",
|
||||||
|
"description": "Home Assistant adapter settings."
|
||||||
|
},
|
||||||
|
"nodered": {
|
||||||
|
"$ref": "#/components/schemas/NodeREDAdapterCommonSettings",
|
||||||
|
"description": "NodeRED adapter settings."
|
||||||
|
},
|
||||||
|
"providers": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Providers",
|
||||||
|
"description": "Available electricity price provider ids.",
|
||||||
|
"readOnly": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"providers"
|
||||||
|
],
|
||||||
|
"title": "AdapterCommonSettings",
|
||||||
|
"description": "Adapter Configuration."
|
||||||
|
},
|
||||||
"BatteriesCommonSettings-Input": {
|
"BatteriesCommonSettings-Input": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"device_id": {
|
"device_id": {
|
||||||
@@ -2123,6 +2216,19 @@
|
|||||||
],
|
],
|
||||||
"title": "Charge Rates",
|
"title": "Charge Rates",
|
||||||
"description": "Charge rates as factor of maximum charging power [0.00 ... 1.00]. None triggers fallback to default charge-rates.",
|
"description": "Charge rates as factor of maximum charging power [0.00 ... 1.00]. None triggers fallback to default charge-rates.",
|
||||||
|
"default": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"examples": [
|
"examples": [
|
||||||
[
|
[
|
||||||
0.0,
|
0.0,
|
||||||
@@ -2264,6 +2370,19 @@
|
|||||||
],
|
],
|
||||||
"title": "Charge Rates",
|
"title": "Charge Rates",
|
||||||
"description": "Charge rates as factor of maximum charging power [0.00 ... 1.00]. None triggers fallback to default charge-rates.",
|
"description": "Charge rates as factor of maximum charging power [0.00 ... 1.00]. None triggers fallback to default charge-rates.",
|
||||||
|
"default": [
|
||||||
|
0.0,
|
||||||
|
0.1,
|
||||||
|
0.2,
|
||||||
|
0.3,
|
||||||
|
0.4,
|
||||||
|
0.5,
|
||||||
|
0.6,
|
||||||
|
0.7,
|
||||||
|
0.8,
|
||||||
|
0.9,
|
||||||
|
1.0
|
||||||
|
],
|
||||||
"examples": [
|
"examples": [
|
||||||
[
|
[
|
||||||
0.0,
|
0.0,
|
||||||
@@ -2340,7 +2459,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"title": "Measurement Keys",
|
"title": "Measurement Keys",
|
||||||
"description": "Measurement keys for the battery stati that are measurements.\n\nBattery SoC, power.",
|
"description": "Measurement keys for the battery stati that are measurements.",
|
||||||
"readOnly": true
|
"readOnly": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -2406,13 +2525,14 @@
|
|||||||
"general": {
|
"general": {
|
||||||
"$ref": "#/components/schemas/GeneralSettings-Output",
|
"$ref": "#/components/schemas/GeneralSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
"version": "0.2.0+dev.4dbc2d",
|
"version": "0.2.0.dev70048701",
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
"timezone": "Europe/Berlin",
|
"timezone": "Europe/Berlin",
|
||||||
"config_folder_path": "/home/user/.config/net.akkudoktoreos.net",
|
"config_folder_path": "/home/user/.config/net.akkudoktoreos.net",
|
||||||
"config_file_path": "/home/user/.config/net.akkudoktoreos.net/EOS.config.json"
|
"config_file_path": "/home/user/.config/net.akkudoktoreos.net/EOS.config.json",
|
||||||
|
"home_assistant_addon": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"cache": {
|
"cache": {
|
||||||
@@ -2447,7 +2567,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"optimization": {
|
"optimization": {
|
||||||
"$ref": "#/components/schemas/OptimizationCommonSettings",
|
"$ref": "#/components/schemas/OptimizationCommonSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
"horizon_hours": 24,
|
"horizon_hours": 24,
|
||||||
"interval": 3600,
|
"interval": 3600,
|
||||||
@@ -2455,7 +2575,8 @@
|
|||||||
"genetic": {
|
"genetic": {
|
||||||
"generations": 400,
|
"generations": 400,
|
||||||
"individuals": 300
|
"individuals": 300
|
||||||
}
|
},
|
||||||
|
"keys": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"prediction": {
|
"prediction": {
|
||||||
@@ -2472,19 +2593,34 @@
|
|||||||
"elecpriceimport": {},
|
"elecpriceimport": {},
|
||||||
"energycharts": {
|
"energycharts": {
|
||||||
"bidding_zone": "DE-LU"
|
"bidding_zone": "DE-LU"
|
||||||
}
|
},
|
||||||
|
"providers": [
|
||||||
|
"ElecPriceAkkudoktor",
|
||||||
|
"ElecPriceEnergyCharts",
|
||||||
|
"ElecPriceImport"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"feedintariff": {
|
"feedintariff": {
|
||||||
"$ref": "#/components/schemas/FeedInTariffCommonSettings-Output",
|
"$ref": "#/components/schemas/FeedInTariffCommonSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
"provider_settings": {}
|
"provider_settings": {},
|
||||||
|
"providers": [
|
||||||
|
"FeedInTariffFixed",
|
||||||
|
"FeedInTariffImport"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"load": {
|
"load": {
|
||||||
"$ref": "#/components/schemas/LoadCommonSettings-Output",
|
"$ref": "#/components/schemas/LoadCommonSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
"provider_settings": {}
|
"provider_settings": {},
|
||||||
|
"providers": [
|
||||||
|
"LoadAkkudoktor",
|
||||||
|
"LoadAkkudoktorAdjusted",
|
||||||
|
"LoadVrm",
|
||||||
|
"LoadImport"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
@@ -2492,6 +2628,11 @@
|
|||||||
"default": {
|
"default": {
|
||||||
"provider_settings": {},
|
"provider_settings": {},
|
||||||
"max_planes": 0,
|
"max_planes": 0,
|
||||||
|
"providers": [
|
||||||
|
"PVForecastAkkudoktor",
|
||||||
|
"PVForecastVrm",
|
||||||
|
"PVForecastImport"
|
||||||
|
],
|
||||||
"planes_peakpower": [],
|
"planes_peakpower": [],
|
||||||
"planes_azimuth": [],
|
"planes_azimuth": [],
|
||||||
"planes_tilt": [],
|
"planes_tilt": [],
|
||||||
@@ -2502,7 +2643,12 @@
|
|||||||
"weather": {
|
"weather": {
|
||||||
"$ref": "#/components/schemas/WeatherCommonSettings-Output",
|
"$ref": "#/components/schemas/WeatherCommonSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
"provider_settings": {}
|
"provider_settings": {},
|
||||||
|
"providers": [
|
||||||
|
"BrightSky",
|
||||||
|
"ClearOutside",
|
||||||
|
"WeatherImport"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"server": {
|
"server": {
|
||||||
@@ -2517,6 +2663,24 @@
|
|||||||
"utils": {
|
"utils": {
|
||||||
"$ref": "#/components/schemas/UtilsCommonSettings",
|
"$ref": "#/components/schemas/UtilsCommonSettings",
|
||||||
"default": {}
|
"default": {}
|
||||||
|
},
|
||||||
|
"adapter": {
|
||||||
|
"$ref": "#/components/schemas/AdapterCommonSettings-Output",
|
||||||
|
"default": {
|
||||||
|
"homeassistant": {
|
||||||
|
"eos_device_instruction_entity_ids": [],
|
||||||
|
"eos_solution_entity_ids": [],
|
||||||
|
"homeassistant_entity_ids": []
|
||||||
|
},
|
||||||
|
"nodered": {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"port": 1880
|
||||||
|
},
|
||||||
|
"providers": [
|
||||||
|
"HomeAssistant",
|
||||||
|
"NodeRED"
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
@@ -3098,9 +3262,21 @@
|
|||||||
"energycharts": {
|
"energycharts": {
|
||||||
"$ref": "#/components/schemas/ElecPriceEnergyChartsCommonSettings",
|
"$ref": "#/components/schemas/ElecPriceEnergyChartsCommonSettings",
|
||||||
"description": "Energy Charts provider settings."
|
"description": "Energy Charts provider settings."
|
||||||
|
},
|
||||||
|
"providers": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Providers",
|
||||||
|
"description": "Available electricity price provider ids.",
|
||||||
|
"readOnly": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"providers"
|
||||||
|
],
|
||||||
"title": "ElecPriceCommonSettings",
|
"title": "ElecPriceCommonSettings",
|
||||||
"description": "Electricity Price Prediction Configuration."
|
"description": "Electricity Price Prediction Configuration."
|
||||||
},
|
},
|
||||||
@@ -3976,9 +4152,21 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"providers": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Providers",
|
||||||
|
"description": "Available feed in tariff provider ids.",
|
||||||
|
"readOnly": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"providers"
|
||||||
|
],
|
||||||
"title": "FeedInTariffCommonSettings",
|
"title": "FeedInTariffCommonSettings",
|
||||||
"description": "Feed In Tariff Prediction Configuration."
|
"description": "Feed In Tariff Prediction Configuration."
|
||||||
},
|
},
|
||||||
@@ -4084,7 +4272,7 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"title": "Version",
|
"title": "Version",
|
||||||
"description": "Configuration file version. Used to check compatibility.",
|
"description": "Configuration file version. Used to check compatibility.",
|
||||||
"default": "0.2.0+dev.4dbc2d"
|
"default": "0.2.0.dev70048701"
|
||||||
},
|
},
|
||||||
"data_folder_path": {
|
"data_folder_path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
@@ -4129,7 +4317,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"title": "Latitude",
|
"title": "Latitude",
|
||||||
"description": "Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (\u00b0)",
|
"description": "Latitude in decimal degrees between -90 and 90. North is positive (ISO 19115) (\u00b0)",
|
||||||
"default": 52.52
|
"default": 52.52
|
||||||
},
|
},
|
||||||
"longitude": {
|
"longitude": {
|
||||||
@@ -4144,13 +4332,13 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"title": "Longitude",
|
"title": "Longitude",
|
||||||
"description": "Longitude in decimal degrees, within -180 to 180 (\u00b0)",
|
"description": "Longitude in decimal degrees within -180 to 180 (\u00b0)",
|
||||||
"default": 13.405
|
"default": 13.405
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "GeneralSettings",
|
"title": "GeneralSettings",
|
||||||
"description": "Settings for common configuration.\n\nGeneral configuration to set directories of cache and output files and system location (latitude\nand longitude).\nValidators ensure each parameter is within a specified range. A computed property, `timezone`,\ndetermines the time zone based on latitude and longitude.\n\nAttributes:\n latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.\n longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.\n\nProperties:\n timezone (Optional[str]): Computed time zone string based on the specified latitude\n and longitude."
|
"description": "General settings."
|
||||||
},
|
},
|
||||||
"GeneralSettings-Output": {
|
"GeneralSettings-Output": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -4158,7 +4346,7 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"title": "Version",
|
"title": "Version",
|
||||||
"description": "Configuration file version. Used to check compatibility.",
|
"description": "Configuration file version. Used to check compatibility.",
|
||||||
"default": "0.2.0+dev.4dbc2d"
|
"default": "0.2.0.dev70048701"
|
||||||
},
|
},
|
||||||
"data_folder_path": {
|
"data_folder_path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
@@ -4203,7 +4391,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"title": "Latitude",
|
"title": "Latitude",
|
||||||
"description": "Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (\u00b0)",
|
"description": "Latitude in decimal degrees between -90 and 90. North is positive (ISO 19115) (\u00b0)",
|
||||||
"default": 52.52
|
"default": 52.52
|
||||||
},
|
},
|
||||||
"longitude": {
|
"longitude": {
|
||||||
@@ -4218,7 +4406,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"title": "Longitude",
|
"title": "Longitude",
|
||||||
"description": "Longitude in decimal degrees, within -180 to 180 (\u00b0)",
|
"description": "Longitude in decimal degrees within -180 to 180 (\u00b0)",
|
||||||
"default": 13.405
|
"default": 13.405
|
||||||
},
|
},
|
||||||
"timezone": {
|
"timezone": {
|
||||||
@@ -4231,7 +4419,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"title": "Timezone",
|
"title": "Timezone",
|
||||||
"description": "Compute timezone based on latitude and longitude.",
|
"description": "Computed timezone based on latitude and longitude.",
|
||||||
"readOnly": true
|
"readOnly": true
|
||||||
},
|
},
|
||||||
"data_output_path": {
|
"data_output_path": {
|
||||||
@@ -4245,7 +4433,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"title": "Data Output Path",
|
"title": "Data Output Path",
|
||||||
"description": "Compute data_output_path based on data_folder_path.",
|
"description": "Computed data_output_path based on data_folder_path.",
|
||||||
"readOnly": true
|
"readOnly": true
|
||||||
},
|
},
|
||||||
"config_folder_path": {
|
"config_folder_path": {
|
||||||
@@ -4275,6 +4463,12 @@
|
|||||||
"title": "Config File Path",
|
"title": "Config File Path",
|
||||||
"description": "Path to EOS configuration file.",
|
"description": "Path to EOS configuration file.",
|
||||||
"readOnly": true
|
"readOnly": true
|
||||||
|
},
|
||||||
|
"home_assistant_addon": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Home Assistant Addon",
|
||||||
|
"description": "EOS is running as home assistant add-on.",
|
||||||
|
"readOnly": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -4282,10 +4476,11 @@
|
|||||||
"timezone",
|
"timezone",
|
||||||
"data_output_path",
|
"data_output_path",
|
||||||
"config_folder_path",
|
"config_folder_path",
|
||||||
"config_file_path"
|
"config_file_path",
|
||||||
|
"home_assistant_addon"
|
||||||
],
|
],
|
||||||
"title": "GeneralSettings",
|
"title": "GeneralSettings",
|
||||||
"description": "Settings for common configuration.\n\nGeneral configuration to set directories of cache and output files and system location (latitude\nand longitude).\nValidators ensure each parameter is within a specified range. A computed property, `timezone`,\ndetermines the time zone based on latitude and longitude.\n\nAttributes:\n latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.\n longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.\n\nProperties:\n timezone (Optional[str]): Computed time zone string based on the specified latitude\n and longitude."
|
"description": "General settings."
|
||||||
},
|
},
|
||||||
"GeneticCommonSettings": {
|
"GeneticCommonSettings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -5010,6 +5205,310 @@
|
|||||||
"title": "HomeApplianceParameters",
|
"title": "HomeApplianceParameters",
|
||||||
"description": "Home Appliance Device Simulation Configuration."
|
"description": "Home Appliance Device Simulation Configuration."
|
||||||
},
|
},
|
||||||
|
"HomeAssistantAdapterCommonSettings-Input": {
|
||||||
|
"properties": {
|
||||||
|
"config_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Config Entity Ids",
|
||||||
|
"description": "Mapping of EOS config keys to Home Assistant entity IDs.\nThe config key has to be given by a \u2018/\u2019-separated path\ne.g. devices/batteries/0/capacity_wh",
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"devices/batteries/0/capacity_wh": "sensor.battery1_capacity"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"load_emr_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Load Emr Entity Ids",
|
||||||
|
"description": "Entity ID(s) of load energy meter reading [kWh]",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.load_energy_total_kwh"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sensor.load_emr1_kwh",
|
||||||
|
"sensor.load_emr2_kwh"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pv_production_emr_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Pv Production Emr Entity Ids",
|
||||||
|
"description": "Entity ID(s) of PV production energy meter reading [kWh]",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.pv_energy_total_kwh"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sensor.pv_emr1_kwh",
|
||||||
|
"sensor.pv_emr2_kwh"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"device_measurement_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Device Measurement Entity Ids",
|
||||||
|
"description": "Mapping of EOS measurement keys used by device (resource) simulations to Home Assistant entity IDs.",
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"battery1_soc_factor": "sensor.battery1_soc_factor",
|
||||||
|
"ev11_soc_factor": "sensor.ev11_soc_factor"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"device_instruction_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Device Instruction Entity Ids",
|
||||||
|
"description": "Entity IDs for device (resource) instructions to be updated by EOS.\nThe device ids (resource ids) have to be prepended by 'sensor.eos_' to build the entity_id.\nE.g. The instruction for device id 'battery1' becomes the entity_id 'sensor.eos_battery1'.",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.eos_battery1"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"solution_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Solution Entity Ids",
|
||||||
|
"description": "Entity IDs for optimization solution keys to be updated by EOS.\nThe solution keys have to be prepended by 'sensor.eos_' to build the entity_id.\nE.g. solution key 'battery1_idle_op_mode' becomes the entity_id 'sensor.eos_battery1_idle_op_mode'.",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.eos_battery1_idle_mode_mode"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"title": "HomeAssistantAdapterCommonSettings",
|
||||||
|
"description": "Common settings for the home assistant adapter."
|
||||||
|
},
|
||||||
|
"HomeAssistantAdapterCommonSettings-Output": {
|
||||||
|
"properties": {
|
||||||
|
"config_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Config Entity Ids",
|
||||||
|
"description": "Mapping of EOS config keys to Home Assistant entity IDs.\nThe config key has to be given by a \u2018/\u2019-separated path\ne.g. devices/batteries/0/capacity_wh",
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"devices/batteries/0/capacity_wh": "sensor.battery1_capacity"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"load_emr_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Load Emr Entity Ids",
|
||||||
|
"description": "Entity ID(s) of load energy meter reading [kWh]",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.load_energy_total_kwh"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sensor.load_emr1_kwh",
|
||||||
|
"sensor.load_emr2_kwh"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pv_production_emr_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Pv Production Emr Entity Ids",
|
||||||
|
"description": "Entity ID(s) of PV production energy meter reading [kWh]",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.pv_energy_total_kwh"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sensor.pv_emr1_kwh",
|
||||||
|
"sensor.pv_emr2_kwh"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"device_measurement_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Device Measurement Entity Ids",
|
||||||
|
"description": "Mapping of EOS measurement keys used by device (resource) simulations to Home Assistant entity IDs.",
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"battery1_soc_factor": "sensor.battery1_soc_factor",
|
||||||
|
"ev11_soc_factor": "sensor.ev11_soc_factor"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"device_instruction_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Device Instruction Entity Ids",
|
||||||
|
"description": "Entity IDs for device (resource) instructions to be updated by EOS.\nThe device ids (resource ids) have to be prepended by 'sensor.eos_' to build the entity_id.\nE.g. The instruction for device id 'battery1' becomes the entity_id 'sensor.eos_battery1'.",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.eos_battery1"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"solution_entity_ids": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Solution Entity Ids",
|
||||||
|
"description": "Entity IDs for optimization solution keys to be updated by EOS.\nThe solution keys have to be prepended by 'sensor.eos_' to build the entity_id.\nE.g. solution key 'battery1_idle_op_mode' becomes the entity_id 'sensor.eos_battery1_idle_op_mode'.",
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"sensor.eos_battery1_idle_mode_mode"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"homeassistant_entity_ids": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Homeassistant Entity Ids",
|
||||||
|
"description": "Entity IDs available at Home Assistant.",
|
||||||
|
"readOnly": true
|
||||||
|
},
|
||||||
|
"eos_solution_entity_ids": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Eos Solution Entity Ids",
|
||||||
|
"description": "Entity IDs for optimization solution available at EOS.",
|
||||||
|
"readOnly": true
|
||||||
|
},
|
||||||
|
"eos_device_instruction_entity_ids": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Eos Device Instruction Entity Ids",
|
||||||
|
"description": "Entity IDs for energy management instructions available at EOS.",
|
||||||
|
"readOnly": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"homeassistant_entity_ids",
|
||||||
|
"eos_solution_entity_ids",
|
||||||
|
"eos_device_instruction_entity_ids"
|
||||||
|
],
|
||||||
|
"title": "HomeAssistantAdapterCommonSettings",
|
||||||
|
"description": "Common settings for the home assistant adapter."
|
||||||
|
},
|
||||||
"InverterCommonSettings-Input": {
|
"InverterCommonSettings-Input": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"device_id": {
|
"device_id": {
|
||||||
@@ -5314,9 +5813,21 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"providers": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Providers",
|
||||||
|
"description": "Available load provider ids.",
|
||||||
|
"readOnly": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"providers"
|
||||||
|
],
|
||||||
"title": "LoadCommonSettings",
|
"title": "LoadCommonSettings",
|
||||||
"description": "Load Prediction Configuration."
|
"description": "Load Prediction Configuration."
|
||||||
},
|
},
|
||||||
@@ -5385,7 +5896,7 @@
|
|||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "LoadVrmCommonSettings",
|
"title": "LoadVrmCommonSettings",
|
||||||
"description": "Common settings for VRM API."
|
"description": "Common settings for load forecast VRM API."
|
||||||
},
|
},
|
||||||
"LoggingCommonSettings-Input": {
|
"LoggingCommonSettings-Input": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -5684,6 +6195,46 @@
|
|||||||
"title": "MeasurementCommonSettings",
|
"title": "MeasurementCommonSettings",
|
||||||
"description": "Measurement Configuration."
|
"description": "Measurement Configuration."
|
||||||
},
|
},
|
||||||
|
"NodeREDAdapterCommonSettings": {
|
||||||
|
"properties": {
|
||||||
|
"host": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Host",
|
||||||
|
"description": "Node-RED server IP address. Defaults to 127.0.0.1.",
|
||||||
|
"default": "127.0.0.1",
|
||||||
|
"examples": [
|
||||||
|
"127.0.0.1",
|
||||||
|
"localhost"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"port": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Port",
|
||||||
|
"description": "Node-RED server IP port number. Defaults to 1880.",
|
||||||
|
"default": 1880,
|
||||||
|
"examples": [
|
||||||
|
1880
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"title": "NodeREDAdapterCommonSettings",
|
||||||
|
"description": "Common settings for the NodeRED adapter.\n\nThe Node-RED adapter sends to HTTP IN nodes.\n\nThis is the example flow:\n\n[HTTP In \\\\<URL\\\\>] -> [Function (parse payload)] -> [Debug] -> [HTTP Response]\n\nThere are two URLs that are used:\n\n- GET /eos/data_aquisition\n The GET is issued before the optimization.\n- POST /eos/control_dispatch\n The POST is issued after the optimization."
|
||||||
|
},
|
||||||
"OMBCInstruction": {
|
"OMBCInstruction": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"id": {
|
"id": {
|
||||||
@@ -5799,7 +6350,7 @@
|
|||||||
"title": "OMBCStatus",
|
"title": "OMBCStatus",
|
||||||
"description": "Reports the current operational status of an Operation Mode Based Control system.\n\nThis model provides real-time status information about an OMBC-controlled device,\nincluding which operation mode is currently active, how it is configured,\nand information about recent mode transitions. It enables monitoring of the\ndevice's operational state and tracking mode transition history."
|
"description": "Reports the current operational status of an Operation Mode Based Control system.\n\nThis model provides real-time status information about an OMBC-controlled device,\nincluding which operation mode is currently active, how it is configured,\nand information about recent mode transitions. It enables monitoring of the\ndevice's operational state and tracking mode transition history."
|
||||||
},
|
},
|
||||||
"OptimizationCommonSettings": {
|
"OptimizationCommonSettings-Input": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"horizon_hours": {
|
"horizon_hours": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
@@ -5877,6 +6428,96 @@
|
|||||||
"title": "OptimizationCommonSettings",
|
"title": "OptimizationCommonSettings",
|
||||||
"description": "General Optimization Configuration."
|
"description": "General Optimization Configuration."
|
||||||
},
|
},
|
||||||
|
"OptimizationCommonSettings-Output": {
|
||||||
|
"properties": {
|
||||||
|
"horizon_hours": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 0.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Horizon Hours",
|
||||||
|
"description": "The general time window within which the energy optimization goal shall be achieved [h]. Defaults to 24 hours.",
|
||||||
|
"default": 24,
|
||||||
|
"examples": [
|
||||||
|
24
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"interval": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "integer",
|
||||||
|
"maximum": 3600.0,
|
||||||
|
"minimum": 900.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Interval",
|
||||||
|
"description": "The optimization interval [sec].",
|
||||||
|
"default": 3600,
|
||||||
|
"examples": [
|
||||||
|
3600,
|
||||||
|
900
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"algorithm": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Algorithm",
|
||||||
|
"description": "The optimization algorithm.",
|
||||||
|
"default": "GENETIC",
|
||||||
|
"examples": [
|
||||||
|
"GENETIC"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"genetic": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/components/schemas/GeneticCommonSettings"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Genetic optimization algorithm configuration.",
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"individuals": 400,
|
||||||
|
"penalties": {
|
||||||
|
"ev_soc_miss": 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"keys": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Keys",
|
||||||
|
"description": "The keys of the solution.",
|
||||||
|
"readOnly": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"keys"
|
||||||
|
],
|
||||||
|
"title": "OptimizationCommonSettings",
|
||||||
|
"description": "General Optimization Configuration."
|
||||||
|
},
|
||||||
"OptimizationSolution": {
|
"OptimizationSolution": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"id": {
|
"id": {
|
||||||
@@ -6641,6 +7282,15 @@
|
|||||||
2
|
2
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"providers": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Providers",
|
||||||
|
"description": "Available PVForecast provider ids.",
|
||||||
|
"readOnly": true
|
||||||
|
},
|
||||||
"planes_peakpower": {
|
"planes_peakpower": {
|
||||||
"items": {
|
"items": {
|
||||||
"type": "number"
|
"type": "number"
|
||||||
@@ -6681,6 +7331,7 @@
|
|||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
|
"providers",
|
||||||
"planes_peakpower",
|
"planes_peakpower",
|
||||||
"planes_azimuth",
|
"planes_azimuth",
|
||||||
"planes_tilt",
|
"planes_tilt",
|
||||||
@@ -7027,7 +7678,7 @@
|
|||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "PVForecastVrmCommonSettings",
|
"title": "PVForecastVrmCommonSettings",
|
||||||
"description": "Common settings for VRM API."
|
"description": "Common settings for PV forecast VRM API."
|
||||||
},
|
},
|
||||||
"PowerMeasurement-Input": {
|
"PowerMeasurement-Input": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -7144,7 +7795,7 @@
|
|||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "PredictionCommonSettings",
|
"title": "PredictionCommonSettings",
|
||||||
"description": "General Prediction Configuration.\n\nThis class provides configuration for prediction settings, allowing users to specify\nparameters such as the forecast duration (in hours).\nValidators ensure each parameter is within a specified range.\n\nAttributes:\n hours (Optional[int]): Number of hours into the future for predictions.\n Must be non-negative.\n historic_hours (Optional[int]): Number of hours into the past for historical data.\n Must be non-negative.\n\nValidators:\n validate_hours (int): Ensures `hours` is a non-negative integer.\n validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer."
|
"description": "General Prediction Configuration."
|
||||||
},
|
},
|
||||||
"PydanticDateTimeData": {
|
"PydanticDateTimeData": {
|
||||||
"additionalProperties": {
|
"additionalProperties": {
|
||||||
@@ -7175,7 +7826,7 @@
|
|||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "PydanticDateTimeData",
|
"title": "PydanticDateTimeData",
|
||||||
"description": "Pydantic model for time series data with consistent value lengths.\n\nThis model validates a dictionary where:\n- Keys are strings representing data series names\n- Values are lists of numeric or string values\n- Special keys 'start_datetime' and 'interval' can contain string values\nfor time series indexing\n- All value lists must have the same length\n\nExample:\n .. code-block:: python\n\n {\n \"start_datetime\": \"2024-01-01 00:00:00\", # optional\n \"interval\": \"1 Hour\", # optional\n \"loadforecast_power_w\": [20.5, 21.0, 22.1],\n \"load_min\": [18.5, 19.0, 20.1]\n }"
|
"description": "Pydantic model for time series data with consistent value lengths.\n\nThis model validates a dictionary where:\n- Keys are strings representing data series names\n- Values are lists of numeric or string values\n- Special keys 'start_datetime' and 'interval' can contain string values\nfor time series indexing\n- All value lists must have the same length\n\nExample:\n .. code-block:: python\n\n {\n \"start_datetime\": \"2024-01-01 00:00:00\", # optional\n \"interval\": \"1 hour\", # optional\n \"loadforecast_power_w\": [20.5, 21.0, 22.1],\n \"load_min\": [18.5, 19.0, 20.1]\n }"
|
||||||
},
|
},
|
||||||
"PydanticDateTimeDataFrame": {
|
"PydanticDateTimeDataFrame": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -7421,7 +8072,7 @@
|
|||||||
"optimization": {
|
"optimization": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"$ref": "#/components/schemas/OptimizationCommonSettings"
|
"$ref": "#/components/schemas/OptimizationCommonSettings-Input"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "null"
|
"type": "null"
|
||||||
@@ -7516,6 +8167,17 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"description": "Utilities Settings"
|
"description": "Utilities Settings"
|
||||||
|
},
|
||||||
|
"adapter": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/components/schemas/AdapterCommonSettings-Input"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Adapter Settings"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
@@ -7931,9 +8593,21 @@
|
|||||||
"examples": [
|
"examples": [
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"providers": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"title": "Providers",
|
||||||
|
"description": "Available weather provider ids.",
|
||||||
|
"readOnly": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"providers"
|
||||||
|
],
|
||||||
"title": "WeatherCommonSettings",
|
"title": "WeatherCommonSettings",
|
||||||
"description": "Weather Forecast Configuration."
|
"description": "Weather Forecast Configuration."
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -87,6 +87,9 @@ convention = "google"
|
|||||||
minversion = "8.3.3"
|
minversion = "8.3.3"
|
||||||
pythonpath = [ "src", ]
|
pythonpath = [ "src", ]
|
||||||
testpaths = [ "tests", ]
|
testpaths = [ "tests", ]
|
||||||
|
markers = [
|
||||||
|
"docker: marks tests that require a local Docker engine"
|
||||||
|
]
|
||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
files = ["src", "tests"]
|
files = ["src", "tests"]
|
||||||
|
|||||||
9
repository.yaml
Normal file
9
repository.yaml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Home Assistant - Add-on Repository Configuration
|
||||||
|
# ------------------------------------------------
|
||||||
|
# https://developers.home-assistant.io/docs/add-ons/repository#repository-configuration
|
||||||
|
#
|
||||||
|
# The Akkudoktor-EOS add-on repo is special because there is only one add-on and it is in
|
||||||
|
# the root directory (no add-on folder as usual).
|
||||||
|
name: Akkudoktor-EOS
|
||||||
|
url: http://github.com/Akkudoktor-EOS/EOS
|
||||||
|
maintainer: Akkudoktor-EOS Team
|
||||||
@@ -25,9 +25,11 @@ sphinx_rtd_theme==3.0.2
|
|||||||
sphinx-tabs==3.4.7
|
sphinx-tabs==3.4.7
|
||||||
GitPython==3.1.45
|
GitPython==3.1.45
|
||||||
myst-parser==4.0.1
|
myst-parser==4.0.1
|
||||||
|
docutils==0.21.2
|
||||||
|
|
||||||
# Pytest
|
# Pytest
|
||||||
pytest==9.0.2
|
pytest==9.0.2
|
||||||
|
pytest-asyncio==1.3.0
|
||||||
pytest-cov==7.0.0
|
pytest-cov==7.0.0
|
||||||
coverage==7.13.0
|
|
||||||
pytest-xprocess==1.0.2
|
pytest-xprocess==1.0.2
|
||||||
|
coverage==7.13.0
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""
|
"""Update VERSION_BASE in version.py after a release tag.
|
||||||
Update VERSION_BASE in version.py after a release tag.
|
|
||||||
|
|
||||||
Behavior:
|
Behavior:
|
||||||
- Read VERSION_BASE from version.py
|
- Read VERSION_BASE from version.py
|
||||||
- Strip ANY existing "+dev" suffix
|
- Strip ANY existing ".dev" suffix
|
||||||
- Append exactly one "+dev"
|
- Append exactly one ".dev"
|
||||||
- Write back the updated file
|
- Write back the updated file
|
||||||
|
|
||||||
This ensures:
|
This ensures:
|
||||||
0.2.0 --> 0.2.0+dev
|
0.2.0 --> 0.2.0.dev
|
||||||
0.2.0+dev --> 0.2.0+dev
|
0.2.0.dev --> 0.2.0.dev
|
||||||
0.2.0+dev+dev -> 0.2.0+dev
|
0.2.0.dev.dev -> 0.2.0.dev
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
@@ -33,11 +32,11 @@ def bump_dev_version_file(file: Path) -> str:
|
|||||||
|
|
||||||
base_version = m.group(1)
|
base_version = m.group(1)
|
||||||
|
|
||||||
# Remove trailing +dev if present → ensure idempotency
|
# Remove trailing .dev if present → ensure idempotency
|
||||||
cleaned = re.sub(r'(\+dev)+$', '', base_version)
|
cleaned = re.sub(r'(\.dev)+$', '', base_version)
|
||||||
|
|
||||||
# Append +dev
|
# Append +dev
|
||||||
new_version = f"{cleaned}+dev"
|
new_version = f"{cleaned}.dev"
|
||||||
|
|
||||||
# Replace inside file content
|
# Replace inside file content
|
||||||
new_text = re.sub(
|
new_text = re.sub(
|
||||||
|
|||||||
@@ -15,43 +15,49 @@ from typing import List
|
|||||||
VERSION_PATTERNS = [
|
VERSION_PATTERNS = [
|
||||||
# Python: __version__ = "1.2.3"
|
# Python: __version__ = "1.2.3"
|
||||||
re.compile(
|
re.compile(
|
||||||
r'(?<![A-Za-z0-9])(__version__\s*=\s*")'
|
r'(?<![A-Za-z0-9_])(__version__\s*=\s*")'
|
||||||
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
r'(?P<ver>\d+\.\d+\.\d+(?:[\.\+\-][0-9A-Za-z]+)?)'
|
||||||
r'(")'
|
r'(")'
|
||||||
),
|
),
|
||||||
|
|
||||||
# Python: version = "1.2.3"
|
# Python: version = "1.2.3"
|
||||||
re.compile(
|
re.compile(
|
||||||
r'(?<![A-Za-z0-9])(version\s*=\s*")'
|
r'(?<![A-Za-z0-9_])(version\s*=\s*")'
|
||||||
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
r'(?P<ver>\d+\.\d+\.\d+(?:[\.\+\-][0-9A-Za-z]+)?)'
|
||||||
r'(")'
|
r'(")'
|
||||||
),
|
),
|
||||||
|
|
||||||
# JSON: "version": "1.2.3"
|
# JSON: "version": "1.2.3"
|
||||||
re.compile(
|
re.compile(
|
||||||
r'(?<![A-Za-z0-9])("version"\s*:\s*")'
|
r'(?<![A-Za-z0-9_])("version"\s*:\s*")'
|
||||||
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
r'(?P<ver>\d+\.\d+\.\d+(?:[\.\+\-][0-9A-Za-z]+)?)'
|
||||||
r'(")'
|
r'(")'
|
||||||
),
|
),
|
||||||
|
|
||||||
# Makefile-style: VERSION ?= 1.2.3
|
# Makefile-style: VERSION ?= 1.2.3
|
||||||
re.compile(
|
re.compile(
|
||||||
r'(?<![A-Za-z0-9])(VERSION\s*\?=\s*)'
|
r'(?<![A-Za-z0-9_])(VERSION\s*\?=\s*)'
|
||||||
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
r'(?P<ver>\d+\.\d+\.\d+(?:[\.\+\-][0-9A-Za-z]+)?)'
|
||||||
|
),
|
||||||
|
|
||||||
|
# Environment-style: VERSION = 1.2.3
|
||||||
|
re.compile(
|
||||||
|
r'(?<![A-Za-z0-9_])(VERSION\s*\=\s*)'
|
||||||
|
r'(?P<ver>\d+\.\d+\.\d+(?:[\.\+\-][0-9A-Za-z]+)?)'
|
||||||
),
|
),
|
||||||
|
|
||||||
# YAML: version: "1.2.3"
|
# YAML: version: "1.2.3"
|
||||||
re.compile(
|
re.compile(
|
||||||
r'(?m)^(version\s*:\s*["\']?)'
|
r'(?m)^(version\s*:\s*["\']?)'
|
||||||
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
r'(?P<ver>\d+\.\d+\.\d+(?:[\.\+\-][0-9A-Za-z]+)?)'
|
||||||
r'(["\']?)\s*$'
|
r'(["\']?)\s*$'
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def update_version_in_file(file_path: Path, new_version: str) -> bool:
|
def update_version_in_file(file_path: Path, new_version: str) -> bool:
|
||||||
"""
|
"""Replace version strings in a file based on VERSION_PATTERNS.
|
||||||
Replace version strings in a file based on VERSION_PATTERNS.
|
|
||||||
Returns True if the file was updated.
|
Returns True if the file was updated.
|
||||||
"""
|
"""
|
||||||
content = file_path.read_text()
|
content = file_path.read_text()
|
||||||
|
|||||||
94
src/akkudoktoreos/adapter/adapter.py
Normal file
94
src/akkudoktoreos/adapter/adapter.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
from typing import TYPE_CHECKING, Optional, Union
|
||||||
|
|
||||||
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.adapter.adapterabc import AdapterContainer
|
||||||
|
from akkudoktoreos.adapter.homeassistant import (
|
||||||
|
HomeAssistantAdapter,
|
||||||
|
HomeAssistantAdapterCommonSettings,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.adapter.nodered import NodeREDAdapter, NodeREDAdapterCommonSettings
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
adapter_providers: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
class AdapterCommonSettings(SettingsBaseModel):
|
||||||
|
"""Adapter Configuration."""
|
||||||
|
|
||||||
|
provider: Optional[list[str]] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": ("List of adapter provider id(s) of provider(s) to be used."),
|
||||||
|
"examples": [["HomeAssistant"], ["HomeAssistant", "NodeRED"]],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
homeassistant: HomeAssistantAdapterCommonSettings = Field(
|
||||||
|
default_factory=HomeAssistantAdapterCommonSettings,
|
||||||
|
json_schema_extra={"description": "Home Assistant adapter settings."},
|
||||||
|
)
|
||||||
|
|
||||||
|
nodered: NodeREDAdapterCommonSettings = Field(
|
||||||
|
default_factory=NodeREDAdapterCommonSettings,
|
||||||
|
json_schema_extra={"description": "NodeRED adapter settings."},
|
||||||
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def providers(self) -> list[str]:
|
||||||
|
"""Available electricity price provider ids."""
|
||||||
|
return adapter_providers
|
||||||
|
|
||||||
|
# Validators
|
||||||
|
@field_validator("provider", mode="after")
|
||||||
|
@classmethod
|
||||||
|
def validate_provider(cls, value: Optional[list[str]]) -> Optional[list[str]]:
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
for provider_id in value:
|
||||||
|
if provider_id not in adapter_providers:
|
||||||
|
raise ValueError(
|
||||||
|
f"Provider '{value}' is not a valid adapter provider: {adapter_providers}."
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class Adapter(AdapterContainer):
|
||||||
|
"""Adapter container to manage multiple adapter providers.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
providers (List[Union[PVForecastAkkudoktor, WeatherBrightSky, WeatherClearOutside]]):
|
||||||
|
List of forecast provider instances, in the order they should be updated.
|
||||||
|
Providers may depend on updates from others.
|
||||||
|
"""
|
||||||
|
|
||||||
|
providers: list[
|
||||||
|
Union[
|
||||||
|
HomeAssistantAdapter,
|
||||||
|
NodeREDAdapter,
|
||||||
|
]
|
||||||
|
] = Field(default_factory=list, json_schema_extra={"description": "List of adapter providers"})
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize adapter providers, all are singletons.
|
||||||
|
homeassistant_adapter = HomeAssistantAdapter()
|
||||||
|
nodered_adapter = NodeREDAdapter()
|
||||||
|
|
||||||
|
|
||||||
|
def get_adapter() -> Adapter:
|
||||||
|
"""Gets the EOS adapter data."""
|
||||||
|
# Initialize Adapter instance with providers in the required order
|
||||||
|
# Care for provider sequence as providers may rely on others to be updated before.
|
||||||
|
adapter = Adapter(
|
||||||
|
providers=[
|
||||||
|
homeassistant_adapter,
|
||||||
|
nodered_adapter,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
return adapter
|
||||||
|
|
||||||
|
|
||||||
|
# Valid adapter providers
|
||||||
|
adapter_providers = [provider.provider_id() for provider in get_adapter().providers]
|
||||||
160
src/akkudoktoreos/adapter/adapterabc.py
Normal file
160
src/akkudoktoreos/adapter/adapterabc.py
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
"""Abstract and base classes for adapters."""
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from pydantic import (
|
||||||
|
Field,
|
||||||
|
field_validator,
|
||||||
|
)
|
||||||
|
|
||||||
|
from akkudoktoreos.core.coreabc import (
|
||||||
|
ConfigMixin,
|
||||||
|
MeasurementMixin,
|
||||||
|
SingletonMixin,
|
||||||
|
StartMixin,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
from akkudoktoreos.utils.datetimeutil import (
|
||||||
|
DateTime,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AdapterProvider(SingletonMixin, ConfigMixin, MeasurementMixin, StartMixin, PydanticBaseModel):
|
||||||
|
"""Abstract base class for adapter providers with singleton thread-safety and configurable data parameters.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Derived classes have to provide their own _update_data method.
|
||||||
|
"""
|
||||||
|
|
||||||
|
update_datetime: Optional[DateTime] = Field(
|
||||||
|
None, json_schema_extra={"description": "Latest update datetime for adapter data"}
|
||||||
|
)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def provider_id(self) -> str:
|
||||||
|
"""Return the unique identifier for the adapter provider.
|
||||||
|
|
||||||
|
To be implemented by derived classes.
|
||||||
|
"""
|
||||||
|
return "AdapterProvider"
|
||||||
|
|
||||||
|
def enabled(self) -> bool:
|
||||||
|
"""Return True if the provider is enabled according to configuration.
|
||||||
|
|
||||||
|
Can be overwritten by derived classes.
|
||||||
|
"""
|
||||||
|
if self.config.adapter is None:
|
||||||
|
return False
|
||||||
|
if isinstance(self.config.adapter.provider, str):
|
||||||
|
return self.provider_id() == self.config.adapter.provider
|
||||||
|
if isinstance(self.config.adapter.provider, list):
|
||||||
|
return self.provider_id() in self.config.adapter.provider
|
||||||
|
return False
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def _update_data(self) -> None:
|
||||||
|
"""Abstract method for custom adapter data update logic, to be implemented by derived classes.
|
||||||
|
|
||||||
|
Data update may be requested at different stages of energy management. The stage can be
|
||||||
|
detected by self.ems.stage().
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def update_data(
|
||||||
|
self,
|
||||||
|
force_enable: Optional[bool] = False,
|
||||||
|
) -> None:
|
||||||
|
"""Calls the custom update function if enabled or forced.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
force_enable (bool, optional): If True, forces the update even if the provider is disabled.
|
||||||
|
"""
|
||||||
|
# Check after configuration is updated.
|
||||||
|
if not force_enable and not self.enabled():
|
||||||
|
return
|
||||||
|
|
||||||
|
# Call the custom update logic
|
||||||
|
logger.debug(f"Update adapter provider: {self.provider_id()}")
|
||||||
|
self._update_data()
|
||||||
|
|
||||||
|
|
||||||
|
class AdapterContainer(SingletonMixin, ConfigMixin, PydanticBaseModel):
|
||||||
|
"""A container for managing multiple adapter provider instances.
|
||||||
|
|
||||||
|
This class enables to control multiple adapter providers
|
||||||
|
"""
|
||||||
|
|
||||||
|
providers: list[AdapterProvider] = Field(
|
||||||
|
default_factory=list, json_schema_extra={"description": "List of adapter providers"}
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator("providers")
|
||||||
|
def check_providers(cls, value: list[AdapterProvider]) -> list[AdapterProvider]:
|
||||||
|
# Check each item in the list
|
||||||
|
for item in value:
|
||||||
|
if not isinstance(item, AdapterProvider):
|
||||||
|
raise TypeError(
|
||||||
|
f"Each item in the adapter providers list must be an AdapterProvider, got {type(item).__name__}"
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def enabled_providers(self) -> list[Any]:
|
||||||
|
"""List of providers that are currently enabled."""
|
||||||
|
enab = []
|
||||||
|
for provider in self.providers:
|
||||||
|
if provider.enabled():
|
||||||
|
enab.append(provider)
|
||||||
|
return enab
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def provider_by_id(self, provider_id: str) -> AdapterProvider:
|
||||||
|
"""Retrieves an adapter provider by its unique identifier.
|
||||||
|
|
||||||
|
This method searches through the list of all available providers and
|
||||||
|
returns the first provider whose `provider_id` matches the given
|
||||||
|
`provider_id`. If no matching provider is found, the method returns `None`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_id (str): The unique identifier of the desired data provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DataProvider: The data provider matching the given `provider_id`.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError if provider id is unknown.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
provider = data.provider_by_id("WeatherImport")
|
||||||
|
"""
|
||||||
|
providers = {provider.provider_id(): provider for provider in self.providers}
|
||||||
|
if provider_id not in providers:
|
||||||
|
error_msg = f"Unknown provider id: '{provider_id}' of '{providers.keys()}'."
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
return providers[provider_id]
|
||||||
|
|
||||||
|
def update_data(
|
||||||
|
self,
|
||||||
|
force_enable: Optional[bool] = False,
|
||||||
|
) -> None:
|
||||||
|
"""Calls the custom update function of all adapters if enabled or forced.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
force_enable (bool, optional): If True, forces the update even if the provider is disabled.
|
||||||
|
"""
|
||||||
|
# Call the custom update logic
|
||||||
|
if len(self.providers) > 0:
|
||||||
|
for provider in self.providers:
|
||||||
|
provider.update_data(force_enable=force_enable)
|
||||||
524
src/akkudoktoreos/adapter/homeassistant.py
Normal file
524
src/akkudoktoreos/adapter/homeassistant.py
Normal file
@@ -0,0 +1,524 @@
|
|||||||
|
"""Home Assistant adapter."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import requests
|
||||||
|
from loguru import logger
|
||||||
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.adapter.adapterabc import AdapterProvider
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.core.emplan import (
|
||||||
|
DDBCInstruction,
|
||||||
|
FRBCInstruction,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.core.ems import EnergyManagementStage
|
||||||
|
from akkudoktoreos.devices.devices import get_resource_registry
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
|
||||||
|
# Supervisor API endpoint and token (injected automatically in add-on container)
|
||||||
|
CORE_API = "http://supervisor/core/api"
|
||||||
|
TOKEN = os.environ.get("SUPERVISOR_TOKEN")
|
||||||
|
|
||||||
|
HEADERS = {
|
||||||
|
"Authorization": f"Bearer {TOKEN}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
HOMEASSISTANT_ENTITY_ID_PREFIX = "sensor.eos_"
|
||||||
|
|
||||||
|
resources_eos = get_resource_registry()
|
||||||
|
|
||||||
|
|
||||||
|
class HomeAssistantAdapterCommonSettings(SettingsBaseModel):
|
||||||
|
"""Common settings for the home assistant adapter."""
|
||||||
|
|
||||||
|
config_entity_ids: Optional[dict[str, str]] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": (
|
||||||
|
"Mapping of EOS config keys to Home Assistant entity IDs.\n"
|
||||||
|
"The config key has to be given by a ‘/’-separated path\n"
|
||||||
|
"e.g. devices/batteries/0/capacity_wh"
|
||||||
|
),
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"devices/batteries/0/capacity_wh": "sensor.battery1_capacity",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
load_emr_entity_ids: Optional[list[str]] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": "Entity ID(s) of load energy meter reading [kWh]",
|
||||||
|
"examples": [
|
||||||
|
["sensor.load_energy_total_kwh"],
|
||||||
|
["sensor.load_emr1_kwh", "sensor.load_emr2_kwh"],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
pv_production_emr_entity_ids: Optional[list[str]] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": "Entity ID(s) of PV production energy meter reading [kWh]",
|
||||||
|
"examples": [
|
||||||
|
["sensor.pv_energy_total_kwh"],
|
||||||
|
["sensor.pv_emr1_kwh", "sensor.pv_emr2_kwh"],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
device_measurement_entity_ids: Optional[dict[str, str]] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": "Mapping of EOS measurement keys used by device (resource) simulations to Home Assistant entity IDs.",
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"ev11_soc_factor": "sensor.ev11_soc_factor",
|
||||||
|
"battery1_soc_factor": "sensor.battery1_soc_factor",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
device_instruction_entity_ids: Optional[list[str]] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": (
|
||||||
|
"Entity IDs for device (resource) instructions to be updated by EOS.\n"
|
||||||
|
f"The device ids (resource ids) have to be prepended by '{HOMEASSISTANT_ENTITY_ID_PREFIX}' to build the entity_id.\n"
|
||||||
|
f"E.g. The instruction for device id 'battery1' becomes the entity_id "
|
||||||
|
f"'{HOMEASSISTANT_ENTITY_ID_PREFIX}battery1'."
|
||||||
|
),
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
f"{HOMEASSISTANT_ENTITY_ID_PREFIX}battery1",
|
||||||
|
]
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
solution_entity_ids: Optional[list[str]] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": (
|
||||||
|
"Entity IDs for optimization solution keys to be updated by EOS.\n"
|
||||||
|
f"The solution keys have to be prepended by '{HOMEASSISTANT_ENTITY_ID_PREFIX}' to build the entity_id.\n"
|
||||||
|
f"E.g. solution key 'battery1_idle_op_mode' becomes the entity_id "
|
||||||
|
f"'{HOMEASSISTANT_ENTITY_ID_PREFIX}battery1_idle_op_mode'."
|
||||||
|
),
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
f"{HOMEASSISTANT_ENTITY_ID_PREFIX}battery1_idle_mode_mode",
|
||||||
|
]
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def homeassistant_entity_ids(self) -> list[str]:
|
||||||
|
"""Entity IDs available at Home Assistant."""
|
||||||
|
try:
|
||||||
|
from akkudoktoreos.adapter.adapter import get_adapter
|
||||||
|
|
||||||
|
adapter_eos = get_adapter()
|
||||||
|
result = adapter_eos.provider_by_id("HomeAssistant").get_homeassistant_entity_ids()
|
||||||
|
except:
|
||||||
|
return []
|
||||||
|
return result
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def eos_solution_entity_ids(self) -> list[str]:
|
||||||
|
"""Entity IDs for optimization solution available at EOS."""
|
||||||
|
try:
|
||||||
|
from akkudoktoreos.adapter.adapter import get_adapter
|
||||||
|
|
||||||
|
adapter_eos = get_adapter()
|
||||||
|
result = adapter_eos.provider_by_id("HomeAssistant").get_eos_solution_entity_ids()
|
||||||
|
except:
|
||||||
|
return []
|
||||||
|
return result
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def eos_device_instruction_entity_ids(self) -> list[str]:
|
||||||
|
"""Entity IDs for energy management instructions available at EOS."""
|
||||||
|
try:
|
||||||
|
from akkudoktoreos.adapter.adapter import get_adapter
|
||||||
|
|
||||||
|
adapter_eos = get_adapter()
|
||||||
|
result = adapter_eos.provider_by_id(
|
||||||
|
"HomeAssistant"
|
||||||
|
).get_eos_device_instruction_entity_ids()
|
||||||
|
except:
|
||||||
|
return []
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Validators
|
||||||
|
@field_validator("solution_entity_ids", mode="after")
|
||||||
|
@classmethod
|
||||||
|
def validate_solution_entity_ids(cls, value: Optional[list[str]]) -> Optional[list[str]]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
for entity_id in value:
|
||||||
|
if not entity_id.startswith(HOMEASSISTANT_ENTITY_ID_PREFIX):
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid optimization solution entity id '{entity_id}': prefix '{HOMEASSISTANT_ENTITY_ID_PREFIX}' expected."
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
@field_validator("device_instruction_entity_ids", mode="after")
|
||||||
|
@classmethod
|
||||||
|
def validate_device_instruction_entity_ids(
|
||||||
|
cls, value: Optional[list[str]]
|
||||||
|
) -> Optional[list[str]]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
for entity_id in value:
|
||||||
|
if not entity_id.startswith(HOMEASSISTANT_ENTITY_ID_PREFIX):
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid instruction entity id '{entity_id}': prefix '{HOMEASSISTANT_ENTITY_ID_PREFIX}' expected."
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class HomeAssistantAdapter(AdapterProvider):
|
||||||
|
@classmethod
|
||||||
|
def provider_id(cls) -> str:
|
||||||
|
"""Return the unique identifier for the adapter provider."""
|
||||||
|
return "HomeAssistant"
|
||||||
|
|
||||||
|
def get_homeassistant_entity_ids(self) -> list[str]:
|
||||||
|
"""Retrieve the available entity IDs from Home Assistant.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: The available entity IDs, or [].
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> entity_ids = get_homeassistant_entity_ids()
|
||||||
|
>>> print(entity_ids)
|
||||||
|
["sensor.pv_all", "sensor.battery1_soc"]
|
||||||
|
"""
|
||||||
|
if not TOKEN:
|
||||||
|
raise RuntimeError("Missing SUPERVISOR_TOKEN environment variable.")
|
||||||
|
|
||||||
|
entity_ids = []
|
||||||
|
|
||||||
|
url = f"{CORE_API}/states"
|
||||||
|
resp = requests.get(url, headers=HEADERS, timeout=10)
|
||||||
|
if resp.ok:
|
||||||
|
data = resp.json()
|
||||||
|
entity_ids = [
|
||||||
|
entity["entity_id"]
|
||||||
|
for entity in data
|
||||||
|
if not entity["entity_id"].startswith(HOMEASSISTANT_ENTITY_ID_PREFIX)
|
||||||
|
]
|
||||||
|
debug_msg = f"homeassistant_entity_ids: {entity_ids}"
|
||||||
|
logger.debug(debug_msg)
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to read entity states: {resp.text}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
return sorted(entity_ids)
|
||||||
|
|
||||||
|
def _entity_id_from_solution_key(self, key: str) -> str:
|
||||||
|
return HOMEASSISTANT_ENTITY_ID_PREFIX + key
|
||||||
|
|
||||||
|
def get_eos_solution_entity_ids(self) -> list[str]:
|
||||||
|
"""Retrieve the available entity IDs for the EOS optimization solution.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: The available entity IDs, or [].
|
||||||
|
"""
|
||||||
|
solution_entity_ids = []
|
||||||
|
try:
|
||||||
|
optimization_solution_keys = self.config.optimization.keys
|
||||||
|
for key in sorted(optimization_solution_keys):
|
||||||
|
solution_entity_ids.append(self._entity_id_from_solution_key(key))
|
||||||
|
except:
|
||||||
|
solution_entity_ids = []
|
||||||
|
return solution_entity_ids
|
||||||
|
|
||||||
|
def _entity_id_from_resource_id(self, resource_id: str) -> str:
|
||||||
|
return HOMEASSISTANT_ENTITY_ID_PREFIX + resource_id
|
||||||
|
|
||||||
|
def get_eos_device_instruction_entity_ids(self) -> list[str]:
|
||||||
|
"""Retrieve the available entity IDs for the EOS energy management plan instructions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: The available entity IDs, or [].
|
||||||
|
"""
|
||||||
|
instruction_entity_ids = []
|
||||||
|
plan = self.ems.plan()
|
||||||
|
if plan:
|
||||||
|
resource_ids = plan.get_resources()
|
||||||
|
for resource_id in resource_ids:
|
||||||
|
instruction_entity_ids.append(self._entity_id_from_resource_id(resource_id))
|
||||||
|
return sorted(instruction_entity_ids)
|
||||||
|
|
||||||
|
def set_entity_state(
|
||||||
|
self, entity_id: str, state_value: str, attributes: dict | None = None
|
||||||
|
) -> None:
|
||||||
|
"""Post or update a Home Assistant entity state.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entity_id (str): The Home Assistant entity ID to update.
|
||||||
|
state_value (str): The new state value for the entity.
|
||||||
|
attributes (dict | None): Optional dictionary of additional attributes.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
requests.RequestException: If the HTTP request to Home Assistant fails.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> set_entity_state("sensor.energy_optimizer_status", "running")
|
||||||
|
"""
|
||||||
|
if not TOKEN:
|
||||||
|
raise RuntimeError("Missing SUPERVISOR_TOKEN environment variable.")
|
||||||
|
|
||||||
|
url = f"{CORE_API}/states/{entity_id}"
|
||||||
|
data = {"state": state_value, "attributes": attributes or {}}
|
||||||
|
resp = requests.post(url, headers=HEADERS, json=data, timeout=10)
|
||||||
|
if resp.status_code not in (200, 201):
|
||||||
|
error_msg = f"Failed to update {entity_id}: {resp.text}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
else:
|
||||||
|
debug_msg = f"Updated {entity_id} = {state_value}"
|
||||||
|
logger.debug(debug_msg)
|
||||||
|
|
||||||
|
def get_entity_state(self, entity_id: str) -> str:
|
||||||
|
"""Retrieve the current state of an entity from Home Assistant.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entity_id (str): The Home Assistant entity ID to query.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The current state of the entity.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> state = get_entity_state("switch.living_room_lamp")
|
||||||
|
>>> print(state)
|
||||||
|
"on"
|
||||||
|
"""
|
||||||
|
if not TOKEN:
|
||||||
|
raise RuntimeError("Missing SUPERVISOR_TOKEN environment variable.")
|
||||||
|
|
||||||
|
url = f"{CORE_API}/states/{entity_id}"
|
||||||
|
resp = requests.get(url, headers=HEADERS, timeout=10)
|
||||||
|
if resp.ok:
|
||||||
|
data = resp.json()
|
||||||
|
debug_msg = f"{entity_id}: {data['state']}"
|
||||||
|
logger.debug(debug_msg)
|
||||||
|
return data["state"]
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to read {entity_id}: {resp.text}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
def _convert_entity_state(self, state: str) -> Union[bool, float, str, None]:
|
||||||
|
"""Convert a Home Assistant entity state to a Python value.
|
||||||
|
|
||||||
|
This method converts the raw ``state`` string of a Home Assistant entity
|
||||||
|
into an appropriate Python type, following Home Assistant's global
|
||||||
|
state model and commonly used domain semantics.
|
||||||
|
|
||||||
|
Conversion rules:
|
||||||
|
|
||||||
|
**Availability states**
|
||||||
|
- ``"unavailable"``, ``"unknown"``, ``"none"`` → ``None``
|
||||||
|
|
||||||
|
**Binary / boolean states**
|
||||||
|
Used by binary sensors and many device domains:
|
||||||
|
- ``"on"``, ``"true"``, ``"yes"``, ``"open"``, ``"opening"``,
|
||||||
|
``"locked"``, ``"home"``, ``"detected"``, ``"active"`` → ``True``
|
||||||
|
- ``"off"``, ``"false"``, ``"no"``, ``"closed"``, ``"closing"``,
|
||||||
|
``"unlocked"``, ``"not_home"``, ``"clear"``, ``"idle"`` → ``False``
|
||||||
|
|
||||||
|
**Numeric states**
|
||||||
|
- Values that can be parsed as numbers are converted to ``float``.
|
||||||
|
This covers most sensor entities (temperature, power, energy, etc.).
|
||||||
|
|
||||||
|
**Other states**
|
||||||
|
- Any remaining states (e.g. ``"playing"``, ``"paused"``,
|
||||||
|
``"cooling"``, ``"heating"``, ``"standby"``, ``"jammed"``) are
|
||||||
|
returned as their original string value.
|
||||||
|
|
||||||
|
The input state is normalized using ``strip()`` and ``lower()`` before
|
||||||
|
conversion. If numeric conversion fails, the original unmodified
|
||||||
|
state string is returned.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
state: Raw entity state as provided by Home Assistant.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The converted entity state as one of:
|
||||||
|
``None``, ``bool``, ``float``, or ``str``.
|
||||||
|
"""
|
||||||
|
raw_state = state
|
||||||
|
value = state.strip().lower()
|
||||||
|
|
||||||
|
# Availability / unknown states
|
||||||
|
if value in {"unavailable", "unknown", "none"}:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# States that semantically represent True
|
||||||
|
if value in {
|
||||||
|
"on",
|
||||||
|
"true",
|
||||||
|
"yes",
|
||||||
|
"y",
|
||||||
|
"open",
|
||||||
|
"opening",
|
||||||
|
"locked",
|
||||||
|
"home",
|
||||||
|
"detected",
|
||||||
|
"active",
|
||||||
|
}:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# States that semantically represent False
|
||||||
|
if value in {
|
||||||
|
"off",
|
||||||
|
"false",
|
||||||
|
"no",
|
||||||
|
"n",
|
||||||
|
"closed",
|
||||||
|
"closing",
|
||||||
|
"unlocked",
|
||||||
|
"not_home",
|
||||||
|
"clear",
|
||||||
|
"idle",
|
||||||
|
}:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Numeric states (sensors, counters, percentages, etc.)
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except ValueError:
|
||||||
|
# Preserve original state for enums and free-text states
|
||||||
|
return raw_state
|
||||||
|
|
||||||
|
def _update_data(self) -> None:
|
||||||
|
stage = self.ems.stage()
|
||||||
|
if stage == EnergyManagementStage.DATA_ACQUISITION:
|
||||||
|
# Sync configuration
|
||||||
|
entity_ids = self.config.adapter.homeassistant.config_entity_ids
|
||||||
|
if entity_ids:
|
||||||
|
for (
|
||||||
|
config_key,
|
||||||
|
entity_id,
|
||||||
|
) in entity_ids.items():
|
||||||
|
try:
|
||||||
|
state = self.get_entity_state(entity_id)
|
||||||
|
logger.debug(f"Entity {entity_id}: {state}")
|
||||||
|
value = self._convert_entity_state(state)
|
||||||
|
if value:
|
||||||
|
self.config.set_nested_value(config_key, value)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{e}")
|
||||||
|
|
||||||
|
# Retrieve measurements necessary for device simulations
|
||||||
|
entity_ids = self.config.adapter.homeassistant.device_measurement_entity_ids
|
||||||
|
if entity_ids:
|
||||||
|
for (
|
||||||
|
measurement_key,
|
||||||
|
entity_id,
|
||||||
|
) in entity_ids.items():
|
||||||
|
if entity_id:
|
||||||
|
try:
|
||||||
|
state = self.get_entity_state(entity_id)
|
||||||
|
logger.debug(f"Entity {entity_id}: {state}")
|
||||||
|
if state:
|
||||||
|
measurement_value = float(state)
|
||||||
|
self.measurement.update_value(
|
||||||
|
self.ems_start_datetime, measurement_key, measurement_value
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{e}")
|
||||||
|
|
||||||
|
# Retrieve measurements for load prediction
|
||||||
|
entity_ids = self.config.adapter.homeassistant.load_emr_entity_ids
|
||||||
|
if entity_ids:
|
||||||
|
measurement_keys = self.config.measurement.load_emr_keys
|
||||||
|
if measurement_keys is None:
|
||||||
|
measurement_keys = []
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
measurement_key = entity_id
|
||||||
|
if measurement_key not in measurement_keys:
|
||||||
|
measurement_keys.append(measurement_key)
|
||||||
|
self.comfig.measurement.load_emr_keys = measurement_keys
|
||||||
|
try:
|
||||||
|
state = self.get_entity_state(entity_id)
|
||||||
|
logger.debug(f"Entity {entity_id}: {state}")
|
||||||
|
if state:
|
||||||
|
measurement_value = float(state)
|
||||||
|
self.measurement.update_value(
|
||||||
|
self.ems_start_datetime, measurement_key, measurement_value
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{e}")
|
||||||
|
|
||||||
|
# Retrieve measurements for PV prediction
|
||||||
|
entity_ids = self.config.adapter.homeassistant.pv_production_emr_entity_ids
|
||||||
|
if entity_ids:
|
||||||
|
measurement_keys = self.config.measurement.pv_production_emr_keys
|
||||||
|
if measurement_keys is None:
|
||||||
|
measurement_keys = []
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
measurement_key = entity_id
|
||||||
|
if measurement_key not in measurement_keys:
|
||||||
|
measurement_keys.append(measurement_key)
|
||||||
|
self.comfig.measurement.pv_production_emr_keys = measurement_keys
|
||||||
|
try:
|
||||||
|
state = self.get_entity_state(entity_id)
|
||||||
|
logger.debug(f"Entity {entity_id}: {state}")
|
||||||
|
if state:
|
||||||
|
measurement_value = float(state)
|
||||||
|
self.measurement.update_value(
|
||||||
|
self.ems_start_datetime, measurement_key, measurement_value
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{e}")
|
||||||
|
|
||||||
|
# We got data - mark the update time
|
||||||
|
self.update_datetime = to_datetime()
|
||||||
|
|
||||||
|
if stage == EnergyManagementStage.CONTROL_DISPATCH:
|
||||||
|
# Currently active optimization solution
|
||||||
|
optimization_solution = self.ems.optimization_solution()
|
||||||
|
entity_ids = self.config.adapter.homeassistant.solution_entity_ids
|
||||||
|
if optimization_solution and entity_ids:
|
||||||
|
df = optimization_solution.solution.to_dataframe()
|
||||||
|
now = pd.Timestamp.now(tz=df.index.tz)
|
||||||
|
row = df.loc[:now].iloc[-1] # Last known value before now
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
solution_key = entity_id[len(HOMEASSISTANT_ENTITY_ID_PREFIX) :]
|
||||||
|
try:
|
||||||
|
self.set_entity_state(entity_id, row[solution_key])
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{e}")
|
||||||
|
# Currently active instructions
|
||||||
|
instructions = self.ems.plan().get_active_instructions()
|
||||||
|
entity_ids = self.config.adapter.homeassistant.device_instruction_entity_ids
|
||||||
|
if instructions and entity_ids:
|
||||||
|
for instruction in instructions:
|
||||||
|
entity_id = self._entity_id_from_resource_id(instruction.resource_id)
|
||||||
|
if entity_id in entity_ids:
|
||||||
|
if isinstance(instruction, (DDBCInstruction, FRBCInstruction)):
|
||||||
|
state = instruction.operation_mode_id.lower()
|
||||||
|
attributes = {
|
||||||
|
"operation_mode_factor": instruction.operation_mode_factor,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
self.set_entity_state(entity_id, state, attributes)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{e}")
|
||||||
128
src/akkudoktoreos/adapter/nodered.py
Normal file
128
src/akkudoktoreos/adapter/nodered.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
"""Nod-RED adapter."""
|
||||||
|
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from loguru import logger
|
||||||
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
|
from akkudoktoreos.adapter.adapterabc import AdapterProvider
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.core.emplan import DDBCInstruction, FRBCInstruction
|
||||||
|
from akkudoktoreos.core.ems import EnergyManagementStage
|
||||||
|
from akkudoktoreos.server.server import get_default_host, validate_ip_or_hostname
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
|
||||||
|
|
||||||
|
class NodeREDAdapterCommonSettings(SettingsBaseModel):
|
||||||
|
r"""Common settings for the NodeRED adapter.
|
||||||
|
|
||||||
|
The Node-RED adapter sends to HTTP IN nodes.
|
||||||
|
|
||||||
|
This is the example flow:
|
||||||
|
|
||||||
|
[HTTP In \\<URL\\>] -> [Function (parse payload)] -> [Debug] -> [HTTP Response]
|
||||||
|
|
||||||
|
There are two URLs that are used:
|
||||||
|
|
||||||
|
- GET /eos/data_aquisition
|
||||||
|
The GET is issued before the optimization.
|
||||||
|
- POST /eos/control_dispatch
|
||||||
|
The POST is issued after the optimization.
|
||||||
|
"""
|
||||||
|
|
||||||
|
host: Optional[str] = Field(
|
||||||
|
default=get_default_host(),
|
||||||
|
json_schema_extra={
|
||||||
|
"description": "Node-RED server IP address. Defaults to 127.0.0.1.",
|
||||||
|
"examples": ["127.0.0.1", "localhost"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
port: Optional[int] = Field(
|
||||||
|
default=1880,
|
||||||
|
json_schema_extra={
|
||||||
|
"description": "Node-RED server IP port number. Defaults to 1880.",
|
||||||
|
"examples": [
|
||||||
|
1880,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator("host", mode="before")
|
||||||
|
def validate_server_host(cls, value: Optional[str]) -> Optional[str]:
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = validate_ip_or_hostname(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
@field_validator("port")
|
||||||
|
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
||||||
|
if value is not None and not (1024 <= value <= 49151):
|
||||||
|
raise ValueError("Server port number must be between 1024 and 49151.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class NodeREDAdapter(AdapterProvider):
|
||||||
|
def provider_id(self) -> str:
|
||||||
|
"""Return the unique identifier for the adapter provider."""
|
||||||
|
return "NodeRED"
|
||||||
|
|
||||||
|
def _update_data(self) -> None:
|
||||||
|
"""Custom adapter data update logic.
|
||||||
|
|
||||||
|
Data update may be requested at different stages of energy management. The stage can be
|
||||||
|
detected by self.ems.stage().
|
||||||
|
"""
|
||||||
|
server = f"http://{self.config.adapter.nodered.host}:{self.config.adapter.nodered.port}"
|
||||||
|
|
||||||
|
data: Optional[dict[str, Union[str, float]]] = None
|
||||||
|
stage = self.ems.stage()
|
||||||
|
if stage == EnergyManagementStage.CONTROL_DISPATCH:
|
||||||
|
data = {}
|
||||||
|
# currently active instructions
|
||||||
|
instructions = self.ems.plan().get_active_instructions()
|
||||||
|
for instruction in instructions:
|
||||||
|
idx = instruction.id.find("@")
|
||||||
|
resource_id = instruction.id[:idx] if idx != -1 else instruction.id
|
||||||
|
operation_mode_id = "<unknown>"
|
||||||
|
operation_mode_factor = 0.0
|
||||||
|
if isinstance(instruction, (DDBCInstruction, FRBCInstruction)):
|
||||||
|
operation_mode_id = instruction.operation_mode_id
|
||||||
|
operation_mode_factor = instruction.operation_mode_factor
|
||||||
|
data[f"{resource_id}_op_mode"] = operation_mode_id
|
||||||
|
data[f"{resource_id}_op_factor"] = operation_mode_factor
|
||||||
|
elif stage == EnergyManagementStage.DATA_ACQUISITION:
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"NodeRED {str(stage).lower()} at {server}: {data}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
error_msg = None
|
||||||
|
if stage == EnergyManagementStage.CONTROL_DISPATCH:
|
||||||
|
response = requests.post(f"{server}/eos/{str(stage).lower()}", json=data, timeout=5)
|
||||||
|
elif stage == EnergyManagementStage.DATA_ACQUISITION:
|
||||||
|
response = requests.get(f"{server}/eos/{str(stage).lower()}", json=data, timeout=5)
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError as e:
|
||||||
|
try:
|
||||||
|
# Try to get 'detail' from the JSON response
|
||||||
|
detail = response.json().get(
|
||||||
|
"detail", f"No error details for data '{data}' '{response.text}'"
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
# Response is not JSON
|
||||||
|
detail = f"No error details for data '{data}' '{response.text}'"
|
||||||
|
error_msg = f"NodeRED `{str(stage).lower()}` fails at `{server}`: {detail}"
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"NodeRED `{str(stage).lower()}` fails at `{server}`: {e}"
|
||||||
|
if error_msg:
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise RuntimeError(error_msg)
|
||||||
|
|
||||||
|
if stage == EnergyManagementStage.DATA_ACQUISITION:
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# We got data - mark the update time
|
||||||
|
self.update_datetime = to_datetime()
|
||||||
@@ -13,7 +13,7 @@ import json
|
|||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, ClassVar, Optional, Type
|
from typing import Any, ClassVar, Optional, Type, Union
|
||||||
|
|
||||||
import pydantic_settings
|
import pydantic_settings
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
@@ -21,6 +21,7 @@ from platformdirs import user_config_dir, user_data_dir
|
|||||||
from pydantic import Field, computed_field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
# settings
|
# settings
|
||||||
|
from akkudoktoreos.adapter.adapter import AdapterCommonSettings
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.config.configmigrate import migrate_config_data, migrate_config_file
|
from akkudoktoreos.config.configmigrate import migrate_config_data, migrate_config_file
|
||||||
from akkudoktoreos.core.cachesettings import CacheCommonSettings
|
from akkudoktoreos.core.cachesettings import CacheCommonSettings
|
||||||
@@ -65,25 +66,17 @@ def get_absolute_path(
|
|||||||
|
|
||||||
|
|
||||||
class GeneralSettings(SettingsBaseModel):
|
class GeneralSettings(SettingsBaseModel):
|
||||||
"""Settings for common configuration.
|
"""General settings."""
|
||||||
|
|
||||||
General configuration to set directories of cache and output files and system location (latitude
|
|
||||||
and longitude).
|
|
||||||
Validators ensure each parameter is within a specified range. A computed property, `timezone`,
|
|
||||||
determines the time zone based on latitude and longitude.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
|
||||||
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
|
||||||
|
|
||||||
Properties:
|
|
||||||
timezone (Optional[str]): Computed time zone string based on the specified latitude
|
|
||||||
and longitude.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_config_folder_path: ClassVar[Optional[Path]] = None
|
_config_folder_path: ClassVar[Optional[Path]] = None
|
||||||
_config_file_path: ClassVar[Optional[Path]] = None
|
_config_file_path: ClassVar[Optional[Path]] = None
|
||||||
|
|
||||||
|
# Detect Home Assistant add-on environment
|
||||||
|
# Home Assistant sets this environment variable automatically
|
||||||
|
_home_assistant_addon: ClassVar[bool] = (
|
||||||
|
"HASSIO_TOKEN" in os.environ or "SUPERVISOR_TOKEN" in os.environ
|
||||||
|
)
|
||||||
|
|
||||||
version: str = Field(
|
version: str = Field(
|
||||||
default=__version__,
|
default=__version__,
|
||||||
json_schema_extra={
|
json_schema_extra={
|
||||||
@@ -109,21 +102,21 @@ class GeneralSettings(SettingsBaseModel):
|
|||||||
ge=-90.0,
|
ge=-90.0,
|
||||||
le=90.0,
|
le=90.0,
|
||||||
json_schema_extra={
|
json_schema_extra={
|
||||||
"description": "Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
"description": "Latitude in decimal degrees between -90 and 90. North is positive (ISO 19115) (°)"
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
longitude: Optional[float] = Field(
|
longitude: Optional[float] = Field(
|
||||||
default=13.405,
|
default=13.405,
|
||||||
ge=-180.0,
|
ge=-180.0,
|
||||||
le=180.0,
|
le=180.0,
|
||||||
json_schema_extra={"description": "Longitude in decimal degrees, within -180 to 180 (°)"},
|
json_schema_extra={"description": "Longitude in decimal degrees within -180 to 180 (°)"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Computed fields
|
# Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def timezone(self) -> Optional[str]:
|
def timezone(self) -> Optional[str]:
|
||||||
"""Compute timezone based on latitude and longitude."""
|
"""Computed timezone based on latitude and longitude."""
|
||||||
if self.latitude and self.longitude:
|
if self.latitude and self.longitude:
|
||||||
return to_timezone(location=(self.latitude, self.longitude), as_string=True)
|
return to_timezone(location=(self.latitude, self.longitude), as_string=True)
|
||||||
return None
|
return None
|
||||||
@@ -131,7 +124,10 @@ class GeneralSettings(SettingsBaseModel):
|
|||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def data_output_path(self) -> Optional[Path]:
|
def data_output_path(self) -> Optional[Path]:
|
||||||
"""Compute data_output_path based on data_folder_path."""
|
"""Computed data_output_path based on data_folder_path."""
|
||||||
|
if self.home_assistant_addon:
|
||||||
|
# Only /data is persistent for home assistant add-on
|
||||||
|
return Path("/data/output")
|
||||||
return get_absolute_path(self.data_folder_path, self.data_output_subpath)
|
return get_absolute_path(self.data_folder_path, self.data_output_subpath)
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@@ -146,6 +142,12 @@ class GeneralSettings(SettingsBaseModel):
|
|||||||
"""Path to EOS configuration file."""
|
"""Path to EOS configuration file."""
|
||||||
return self._config_file_path
|
return self._config_file_path
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def home_assistant_addon(self) -> bool:
|
||||||
|
"""EOS is running as home assistant add-on."""
|
||||||
|
return self._home_assistant_addon
|
||||||
|
|
||||||
compatible_versions: ClassVar[list[str]] = [__version__]
|
compatible_versions: ClassVar[list[str]] = [__version__]
|
||||||
|
|
||||||
@field_validator("version")
|
@field_validator("version")
|
||||||
@@ -160,6 +162,22 @@ class GeneralSettings(SettingsBaseModel):
|
|||||||
raise ValueError(error)
|
raise ValueError(error)
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
@field_validator("data_folder_path", mode="after")
|
||||||
|
@classmethod
|
||||||
|
def validate_data_folder_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
|
"""Ensure dir is available."""
|
||||||
|
if cls._home_assistant_addon:
|
||||||
|
# Force to home assistant add-on /data directory
|
||||||
|
return Path("/data")
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = Path(value)
|
||||||
|
value.resolve()
|
||||||
|
if not value.is_dir():
|
||||||
|
raise ValueError(f"Data folder path '{value}' is not a directory.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
class SettingsEOS(pydantic_settings.BaseSettings, PydanticModelNestedValueMixin):
|
class SettingsEOS(pydantic_settings.BaseSettings, PydanticModelNestedValueMixin):
|
||||||
"""Settings for all EOS.
|
"""Settings for all EOS.
|
||||||
@@ -212,6 +230,9 @@ class SettingsEOS(pydantic_settings.BaseSettings, PydanticModelNestedValueMixin)
|
|||||||
utils: Optional[UtilsCommonSettings] = Field(
|
utils: Optional[UtilsCommonSettings] = Field(
|
||||||
default=None, json_schema_extra={"description": "Utilities Settings"}
|
default=None, json_schema_extra={"description": "Utilities Settings"}
|
||||||
)
|
)
|
||||||
|
adapter: Optional[AdapterCommonSettings] = Field(
|
||||||
|
default=None, json_schema_extra={"description": "Adapter Settings"}
|
||||||
|
)
|
||||||
|
|
||||||
model_config = pydantic_settings.SettingsConfigDict(
|
model_config = pydantic_settings.SettingsConfigDict(
|
||||||
env_nested_delimiter="__",
|
env_nested_delimiter="__",
|
||||||
@@ -242,6 +263,7 @@ class SettingsEOSDefaults(SettingsEOS):
|
|||||||
weather: WeatherCommonSettings = WeatherCommonSettings()
|
weather: WeatherCommonSettings = WeatherCommonSettings()
|
||||||
server: ServerCommonSettings = ServerCommonSettings()
|
server: ServerCommonSettings = ServerCommonSettings()
|
||||||
utils: UtilsCommonSettings = UtilsCommonSettings()
|
utils: UtilsCommonSettings = UtilsCommonSettings()
|
||||||
|
adapter: AdapterCommonSettings = AdapterCommonSettings()
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
def __hash__(self) -> int:
|
||||||
# Just for usage in configmigrate, finally overwritten when used by ConfigEOS.
|
# Just for usage in configmigrate, finally overwritten when used by ConfigEOS.
|
||||||
@@ -297,6 +319,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
APP_NAME: ClassVar[str] = "net.akkudoktor.eos" # reverse order
|
APP_NAME: ClassVar[str] = "net.akkudoktor.eos" # reverse order
|
||||||
APP_AUTHOR: ClassVar[str] = "akkudoktor"
|
APP_AUTHOR: ClassVar[str] = "akkudoktor"
|
||||||
EOS_DIR: ClassVar[str] = "EOS_DIR"
|
EOS_DIR: ClassVar[str] = "EOS_DIR"
|
||||||
|
EOS_DATA_DIR: ClassVar[str] = "EOS_DATA_DIR"
|
||||||
EOS_CONFIG_DIR: ClassVar[str] = "EOS_CONFIG_DIR"
|
EOS_CONFIG_DIR: ClassVar[str] = "EOS_CONFIG_DIR"
|
||||||
ENCODING: ClassVar[str] = "UTF-8"
|
ENCODING: ClassVar[str] = "UTF-8"
|
||||||
CONFIG_FILE_NAME: ClassVar[str] = "EOS.config.json"
|
CONFIG_FILE_NAME: ClassVar[str] = "EOS.config.json"
|
||||||
@@ -355,27 +378,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
- It ensures that a fallback to a default configuration file is always possible.
|
- It ensures that a fallback to a default configuration file is always possible.
|
||||||
"""
|
"""
|
||||||
# Ensure we know and have the config folder path and the config file
|
# Ensure we know and have the config folder path and the config file
|
||||||
config_file, exists = cls._get_config_file_path()
|
config_file = cls._setup_config_file()
|
||||||
config_dir = config_file.parent
|
|
||||||
if not exists:
|
|
||||||
config_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
# Create minimum config file
|
|
||||||
config_minimum_content = '{ "general": { "version": "' + __version__ + '" } }'
|
|
||||||
try:
|
|
||||||
config_file.write_text(config_minimum_content, encoding="utf-8")
|
|
||||||
except Exception as exc:
|
|
||||||
# Create minimum config in temporary config directory as last resort
|
|
||||||
error_msg = f"Could not create minimum config file in {config_dir}: {exc}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
temp_dir = Path(tempfile.mkdtemp())
|
|
||||||
info_msg = f"Using temporary config directory {temp_dir}"
|
|
||||||
logger.info(info_msg)
|
|
||||||
config_dir = temp_dir
|
|
||||||
config_file = temp_dir / config_file.name
|
|
||||||
config_file.write_text(config_minimum_content, encoding="utf-8")
|
|
||||||
# Remember config_dir and config file
|
|
||||||
GeneralSettings._config_folder_path = config_dir
|
|
||||||
GeneralSettings._config_file_path = config_file
|
|
||||||
|
|
||||||
# All the settings sources in priority sequence
|
# All the settings sources in priority sequence
|
||||||
setting_sources = [
|
setting_sources = [
|
||||||
@@ -384,7 +387,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
dotenv_settings,
|
dotenv_settings,
|
||||||
]
|
]
|
||||||
|
|
||||||
# Apend file settings to sources
|
# Append file settings to sources
|
||||||
file_settings: Optional[pydantic_settings.JsonConfigSettingsSource] = None
|
file_settings: Optional[pydantic_settings.JsonConfigSettingsSource] = None
|
||||||
try:
|
try:
|
||||||
backup_file = config_file.with_suffix(f".{to_datetime(as_string='YYYYMMDDHHmmss')}")
|
backup_file = config_file.with_suffix(f".{to_datetime(as_string='YYYYMMDDHHmmss')}")
|
||||||
@@ -426,7 +429,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
# (Re-)load settings - call base class init
|
# (Re-)load settings - call base class init
|
||||||
SettingsEOSDefaults.__init__(self, *args, **kwargs)
|
SettingsEOSDefaults.__init__(self, *args, **kwargs)
|
||||||
# Init config file and data folder pathes
|
# Init config file and data folder pathes
|
||||||
self._create_initial_config_file()
|
self._setup_config_file()
|
||||||
self._update_data_folder_path()
|
self._update_data_folder_path()
|
||||||
self._initialized = True
|
self._initialized = True
|
||||||
logger.debug("Config setup:\n{}", self)
|
logger.debug("Config setup:\n{}", self)
|
||||||
@@ -559,17 +562,6 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _create_initial_config_file(self) -> None:
|
|
||||||
if self.general.config_file_path and not self.general.config_file_path.exists():
|
|
||||||
self.general.config_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
try:
|
|
||||||
with self.general.config_file_path.open("w", encoding="utf-8", newline="\n") as f:
|
|
||||||
f.write(self.model_dump_json(indent=4))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Could not write configuration file '{self.general.config_file_path}': {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _update_data_folder_path(self) -> None:
|
def _update_data_folder_path(self) -> None:
|
||||||
"""Updates path to the data directory."""
|
"""Updates path to the data directory."""
|
||||||
# From Settings
|
# From Settings
|
||||||
@@ -579,7 +571,16 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
self.general.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Could not setup data dir: {e}")
|
logger.warning(f"Could not setup data dir {data_dir}: {e}")
|
||||||
|
# From EOS_DATA_DIR env
|
||||||
|
if env_dir := os.getenv(self.EOS_DATA_DIR):
|
||||||
|
try:
|
||||||
|
data_dir = Path(env_dir).resolve()
|
||||||
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.general.data_folder_path = data_dir
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not setup data dir {data_dir}: {e}")
|
||||||
# From EOS_DIR env
|
# From EOS_DIR env
|
||||||
if env_dir := os.getenv(self.EOS_DIR):
|
if env_dir := os.getenv(self.EOS_DIR):
|
||||||
try:
|
try:
|
||||||
@@ -588,7 +589,7 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
self.general.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Could not setup data dir: {e}")
|
logger.warning(f"Could not setup data dir {data_dir}: {e}")
|
||||||
# From platform specific default path
|
# From platform specific default path
|
||||||
try:
|
try:
|
||||||
data_dir = Path(user_data_dir(self.APP_NAME, self.APP_AUTHOR))
|
data_dir = Path(user_data_dir(self.APP_NAME, self.APP_AUTHOR))
|
||||||
@@ -597,9 +598,10 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
self.general.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Could not setup data dir: {e}")
|
logger.warning(f"Could not setup data dir {data_dir}: {e}")
|
||||||
# Current working directory
|
# Current working directory
|
||||||
data_dir = Path.cwd()
|
data_dir = Path.cwd()
|
||||||
|
logger.warning(f"Using data dir {data_dir}")
|
||||||
self.general.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -611,16 +613,28 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
2. user configuration directory
|
2. user configuration directory
|
||||||
3. current working directory
|
3. current working directory
|
||||||
|
|
||||||
|
If running as Home Assistat add-on returns /data/config/EOS.config.json.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
tuple[Path, bool]: The path to the configuration file and if there is already a config file there
|
tuple[Path, bool]: The path to the configuration file and if there is already a config file there
|
||||||
"""
|
"""
|
||||||
|
if GeneralSettings._home_assistant_addon:
|
||||||
|
# Only /data is persistent for home assistant add-on
|
||||||
|
cfile = Path("/data/config") / cls.CONFIG_FILE_NAME
|
||||||
|
logger.debug(f"Config file forced to: '{cfile}'")
|
||||||
|
return cfile, cfile.exists()
|
||||||
|
|
||||||
config_dirs = []
|
config_dirs = []
|
||||||
env_base_dir = os.getenv(cls.EOS_DIR)
|
env_eos_dir = os.getenv(cls.EOS_DIR)
|
||||||
env_config_dir = os.getenv(cls.EOS_CONFIG_DIR)
|
logger.debug(f"Environment EOS_DIR: '{env_eos_dir}'")
|
||||||
env_dir = get_absolute_path(env_base_dir, env_config_dir)
|
|
||||||
logger.debug(f"Environment config dir: '{env_dir}'")
|
env_eos_config_dir = os.getenv(cls.EOS_CONFIG_DIR)
|
||||||
if env_dir is not None:
|
logger.debug(f"Environment EOS_CONFIG_DIR: '{env_eos_config_dir}'")
|
||||||
config_dirs.append(env_dir.resolve())
|
env_config_dir = get_absolute_path(env_eos_dir, env_eos_config_dir)
|
||||||
|
logger.debug(f"Resulting environment config dir: '{env_config_dir}'")
|
||||||
|
|
||||||
|
if env_config_dir is not None:
|
||||||
|
config_dirs.append(env_config_dir.resolve())
|
||||||
config_dirs.append(Path(user_config_dir(cls.APP_NAME, cls.APP_AUTHOR)))
|
config_dirs.append(Path(user_config_dir(cls.APP_NAME, cls.APP_AUTHOR)))
|
||||||
config_dirs.append(Path.cwd())
|
config_dirs.append(Path.cwd())
|
||||||
for cdir in config_dirs:
|
for cdir in config_dirs:
|
||||||
@@ -628,8 +642,52 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
if cfile.exists():
|
if cfile.exists():
|
||||||
logger.debug(f"Found config file: '{cfile}'")
|
logger.debug(f"Found config file: '{cfile}'")
|
||||||
return cfile, True
|
return cfile, True
|
||||||
|
|
||||||
return config_dirs[0].joinpath(cls.CONFIG_FILE_NAME), False
|
return config_dirs[0].joinpath(cls.CONFIG_FILE_NAME), False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _setup_config_file(cls) -> Path:
|
||||||
|
"""Setup config file.
|
||||||
|
|
||||||
|
Creates an initial config file if it does not exist.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
config_file_path (Path): Path to config file
|
||||||
|
"""
|
||||||
|
config_file_path, exists = cls._get_config_file_path()
|
||||||
|
if (
|
||||||
|
GeneralSettings._config_file_path
|
||||||
|
and GeneralSettings._config_file_path != config_file_path
|
||||||
|
):
|
||||||
|
debug_msg = (
|
||||||
|
f"Config file changed from '{GeneralSettings._config_file_path}' to "
|
||||||
|
f"'{config_file_path}'"
|
||||||
|
)
|
||||||
|
logger.debug(debug_msg)
|
||||||
|
if not exists:
|
||||||
|
# Create minimum config file
|
||||||
|
config_minimum_content = '{ "general": { "version": "' + __version__ + '" } }'
|
||||||
|
try:
|
||||||
|
config_file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
config_file_path.write_text(config_minimum_content, encoding="utf-8")
|
||||||
|
except Exception as exc:
|
||||||
|
# Create minimum config in temporary config directory as last resort
|
||||||
|
error_msg = (
|
||||||
|
f"Could not create minimum config file in {config_file_path.parent}: {exc}"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
temp_dir = Path(tempfile.mkdtemp())
|
||||||
|
info_msg = f"Using temporary config directory {temp_dir}"
|
||||||
|
logger.info(info_msg)
|
||||||
|
config_file_path = temp_dir / config_file_path.name
|
||||||
|
config_file_path.write_text(config_minimum_content, encoding="utf-8")
|
||||||
|
|
||||||
|
# Remember config_dir and config file
|
||||||
|
GeneralSettings._config_folder_path = config_file_path.parent
|
||||||
|
GeneralSettings._config_file_path = config_file_path
|
||||||
|
|
||||||
|
return config_file_path
|
||||||
|
|
||||||
def to_config_file(self) -> None:
|
def to_config_file(self) -> None:
|
||||||
"""Saves the current configuration to the configuration file.
|
"""Saves the current configuration to the configuration file.
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,11 @@ if TYPE_CHECKING:
|
|||||||
# - tuple[str, Callable[[Any], Any]] (new path + transform)
|
# - tuple[str, Callable[[Any], Any]] (new path + transform)
|
||||||
# - None (drop)
|
# - None (drop)
|
||||||
MIGRATION_MAP: Dict[str, Union[str, Tuple[str, Callable[[Any], Any]], None]] = {
|
MIGRATION_MAP: Dict[str, Union[str, Tuple[str, Callable[[Any], Any]], None]] = {
|
||||||
|
# 0.2.0.dev -> 0.2.0.dev
|
||||||
|
"adapter/homeassistant/optimization_solution_entity_ids": (
|
||||||
|
"adapter/homeassistant/solution_entity_ids",
|
||||||
|
lambda v: v if isinstance(v, list) else None,
|
||||||
|
),
|
||||||
# 0.2.0 -> 0.2.0+dev
|
# 0.2.0 -> 0.2.0+dev
|
||||||
"elecprice/provider_settings/ElecPriceImport/import_file_path": "elecprice/elecpriceimport/import_file_path",
|
"elecprice/provider_settings/ElecPriceImport/import_file_path": "elecprice/elecpriceimport/import_file_path",
|
||||||
"elecprice/provider_settings/ElecPriceImport/import_json": "elecprice/elecpriceimport/import_json",
|
"elecprice/provider_settings/ElecPriceImport/import_json": "elecprice/elecpriceimport/import_json",
|
||||||
|
|||||||
@@ -27,7 +27,10 @@ class CacheCommonSettings(SettingsBaseModel):
|
|||||||
# Do not make this a pydantic computed field. The pydantic model must be fully initialized
|
# Do not make this a pydantic computed field. The pydantic model must be fully initialized
|
||||||
# to have access to config.general, which may not be the case if it is a computed field.
|
# to have access to config.general, which may not be the case if it is a computed field.
|
||||||
def path(self) -> Optional[Path]:
|
def path(self) -> Optional[Path]:
|
||||||
"""Compute cache path based on general.data_folder_path."""
|
"""Computed cache path based on general.data_folder_path."""
|
||||||
|
if self.config.general.home_assistant_addon:
|
||||||
|
# Only /data is persistent for home assistant add-on
|
||||||
|
return Path("/data/cache")
|
||||||
data_cache_path = self.config.general.data_folder_path
|
data_cache_path = self.config.general.data_folder_path
|
||||||
if data_cache_path is None or self.subpath is None:
|
if data_cache_path is None or self.subpath is None:
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -18,12 +18,53 @@ from loguru import logger
|
|||||||
from akkudoktoreos.core.decorators import classproperty
|
from akkudoktoreos.core.decorators import classproperty
|
||||||
from akkudoktoreos.utils.datetimeutil import DateTime
|
from akkudoktoreos.utils.datetimeutil import DateTime
|
||||||
|
|
||||||
|
adapter_eos: Any = None
|
||||||
config_eos: Any = None
|
config_eos: Any = None
|
||||||
measurement_eos: Any = None
|
measurement_eos: Any = None
|
||||||
prediction_eos: Any = None
|
prediction_eos: Any = None
|
||||||
ems_eos: Any = None
|
ems_eos: Any = None
|
||||||
|
|
||||||
|
|
||||||
|
class AdapterMixin:
|
||||||
|
"""Mixin class for managing EOS adapter.
|
||||||
|
|
||||||
|
This class serves as a foundational component for EOS-related classes requiring access
|
||||||
|
to the global EOS adapters. It provides a `adapter` property that dynamically retrieves
|
||||||
|
the adapter instance.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Subclass this base class to gain access to the `adapter` attribute, which retrieves the
|
||||||
|
global adapter instance lazily to avoid import-time circular dependencies.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
adapter (Adapter): Property to access the global EOS adapter.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class MyEOSClass(AdapterMixin):
|
||||||
|
def my_method(self):
|
||||||
|
self.adapter.update_date()
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def adapter(cls) -> Any:
|
||||||
|
"""Convenience class method/ attribute to retrieve the EOS adapters.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Adapter: The adapters.
|
||||||
|
"""
|
||||||
|
# avoid circular dependency at import time
|
||||||
|
global adapter_eos
|
||||||
|
if adapter_eos is None:
|
||||||
|
from akkudoktoreos.adapter.adapter import get_adapter
|
||||||
|
|
||||||
|
adapter_eos = get_adapter()
|
||||||
|
|
||||||
|
return adapter_eos
|
||||||
|
|
||||||
|
|
||||||
class ConfigMixin:
|
class ConfigMixin:
|
||||||
"""Mixin class for managing EOS configuration data.
|
"""Mixin class for managing EOS configuration data.
|
||||||
|
|
||||||
|
|||||||
@@ -1018,7 +1018,7 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
end_datetime: Optional[DateTime] = None,
|
end_datetime: Optional[DateTime] = None,
|
||||||
interval: Optional[Duration] = None,
|
interval: Optional[Duration] = None,
|
||||||
fill_method: Optional[str] = None,
|
fill_method: Optional[str] = None,
|
||||||
dropna: Optional[bool] = None,
|
dropna: Optional[bool] = True,
|
||||||
) -> NDArray[Shape["*"], Any]:
|
) -> NDArray[Shape["*"], Any]:
|
||||||
"""Extract an array indexed by fixed time intervals from data records within an optional date range.
|
"""Extract an array indexed by fixed time intervals from data records within an optional date range.
|
||||||
|
|
||||||
@@ -1032,17 +1032,19 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
- 'ffill': Forward fill missing values.
|
- 'ffill': Forward fill missing values.
|
||||||
- 'bfill': Backward fill missing values.
|
- 'bfill': Backward fill missing values.
|
||||||
- 'none': Defaults to 'linear' for numeric values, otherwise 'ffill'.
|
- 'none': Defaults to 'linear' for numeric values, otherwise 'ffill'.
|
||||||
dropna: (bool, optional): Whether to drop NAN/ None values before processing. Defaults to True.
|
dropna: (bool, optional): Whether to drop NAN/ None values before processing.
|
||||||
|
Defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
np.ndarray: A NumPy Array of the values extracted from the specified key.
|
np.ndarray: A NumPy Array of the values at the chosen frequency extracted from the
|
||||||
|
specified key.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
KeyError: If the specified key is not found in any of the DataRecords.
|
KeyError: If the specified key is not found in any of the DataRecords.
|
||||||
"""
|
"""
|
||||||
self._validate_key(key)
|
self._validate_key(key)
|
||||||
|
|
||||||
# General check on fill_method
|
# Validate fill method
|
||||||
if fill_method not in ("ffill", "bfill", "linear", "none", None):
|
if fill_method not in ("ffill", "bfill", "linear", "none", None):
|
||||||
raise ValueError(f"Unsupported fill method: {fill_method}")
|
raise ValueError(f"Unsupported fill method: {fill_method}")
|
||||||
|
|
||||||
@@ -1050,13 +1052,17 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
start_datetime = to_datetime(start_datetime, to_maxtime=False) if start_datetime else None
|
start_datetime = to_datetime(start_datetime, to_maxtime=False) if start_datetime else None
|
||||||
end_datetime = to_datetime(end_datetime, to_maxtime=False) if end_datetime else None
|
end_datetime = to_datetime(end_datetime, to_maxtime=False) if end_datetime else None
|
||||||
|
|
||||||
resampled = None
|
|
||||||
if interval is None:
|
if interval is None:
|
||||||
interval = to_duration("1 hour")
|
interval = to_duration("1 hour")
|
||||||
|
resample_freq = "1h"
|
||||||
|
else:
|
||||||
|
resample_freq = to_duration(interval, as_string="pandas")
|
||||||
|
|
||||||
|
# Load raw lists (already sorted & filtered)
|
||||||
dates, values = self.key_to_lists(key=key, dropna=dropna)
|
dates, values = self.key_to_lists(key=key, dropna=dropna)
|
||||||
values_len = len(values)
|
values_len = len(values)
|
||||||
|
|
||||||
|
# Bring lists into shape
|
||||||
if values_len < 1:
|
if values_len < 1:
|
||||||
# No values, assume at least one value set to None
|
# No values, assume at least one value set to None
|
||||||
if start_datetime is not None:
|
if start_datetime is not None:
|
||||||
@@ -1092,40 +1098,40 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
dates.append(end_datetime)
|
dates.append(end_datetime)
|
||||||
values.append(values[-1])
|
values.append(values[-1])
|
||||||
|
|
||||||
series = pd.Series(data=values, index=pd.DatetimeIndex(dates), name=key)
|
# Construct series
|
||||||
if not series.index.inferred_type == "datetime64":
|
series = pd.Series(values, index=pd.DatetimeIndex(dates), name=key)
|
||||||
|
if series.index.inferred_type != "datetime64":
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
f"Expected DatetimeIndex, but got {type(series.index)} "
|
f"Expected DatetimeIndex, but got {type(series.index)} "
|
||||||
f"infered to {series.index.inferred_type}: {series}"
|
f"infered to {series.index.inferred_type}: {series}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Handle missing values
|
# Determine default fill method depending on dtype
|
||||||
if series.dtype in [np.float64, np.float32, np.int64, np.int32]:
|
if fill_method is None:
|
||||||
# Numeric types
|
if pd.api.types.is_numeric_dtype(series):
|
||||||
if fill_method is None:
|
|
||||||
fill_method = "linear"
|
fill_method = "linear"
|
||||||
# Resample the series to the specified interval
|
else:
|
||||||
resampled = series.resample(interval, origin=resample_origin).first()
|
|
||||||
if fill_method == "linear":
|
|
||||||
resampled = resampled.interpolate(method="linear")
|
|
||||||
elif fill_method == "ffill":
|
|
||||||
resampled = resampled.ffill()
|
|
||||||
elif fill_method == "bfill":
|
|
||||||
resampled = resampled.bfill()
|
|
||||||
elif fill_method != "none":
|
|
||||||
raise ValueError(f"Unsupported fill method: {fill_method}")
|
|
||||||
else:
|
|
||||||
# Non-numeric types
|
|
||||||
if fill_method is None:
|
|
||||||
fill_method = "ffill"
|
fill_method = "ffill"
|
||||||
# Resample the series to the specified interval
|
|
||||||
|
# Perform the resampling
|
||||||
|
if pd.api.types.is_numeric_dtype(series):
|
||||||
|
# numeric → use mean
|
||||||
|
resampled = series.resample(interval, origin=resample_origin).mean()
|
||||||
|
else:
|
||||||
|
# non-numeric → fallback (first, last, mode, or ffill)
|
||||||
resampled = series.resample(interval, origin=resample_origin).first()
|
resampled = series.resample(interval, origin=resample_origin).first()
|
||||||
if fill_method == "ffill":
|
|
||||||
resampled = resampled.ffill()
|
# Handle missing values after resampling
|
||||||
elif fill_method == "bfill":
|
if fill_method == "linear" and pd.api.types.is_numeric_dtype(series):
|
||||||
resampled = resampled.bfill()
|
resampled = resampled.interpolate("linear")
|
||||||
elif fill_method != "none":
|
elif fill_method == "ffill":
|
||||||
raise ValueError(f"Unsupported fill method for non-numeric data: {fill_method}")
|
resampled = resampled.ffill()
|
||||||
|
elif fill_method == "bfill":
|
||||||
|
resampled = resampled.bfill()
|
||||||
|
elif fill_method == "none":
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported fill method: {fill_method}")
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Resampled for '{}' with length {}: {}...{}",
|
"Resampled for '{}' with length {}: {}...{}",
|
||||||
@@ -1141,6 +1147,16 @@ class DataSequence(DataBase, MutableSequence):
|
|||||||
if end_datetime is not None and len(resampled) > 0:
|
if end_datetime is not None and len(resampled) > 0:
|
||||||
resampled = resampled.truncate(after=end_datetime.subtract(seconds=1))
|
resampled = resampled.truncate(after=end_datetime.subtract(seconds=1))
|
||||||
array = resampled.values
|
array = resampled.values
|
||||||
|
|
||||||
|
# Convert NaN to None if there are actually NaNs
|
||||||
|
if (
|
||||||
|
isinstance(array, np.ndarray)
|
||||||
|
and np.issubdtype(array.dtype.type, np.floating)
|
||||||
|
and pd.isna(array).any()
|
||||||
|
):
|
||||||
|
array = array.astype(object)
|
||||||
|
array[pd.isna(array)] = None
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Array for '{}' with length {}: {}...{}", key, len(array), array[:10], array[-10:]
|
"Array for '{}' with length {}: {}...{}", key, len(array), array[:10], array[-10:]
|
||||||
)
|
)
|
||||||
@@ -1691,6 +1707,14 @@ class DataImportMixin:
|
|||||||
}
|
}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# Strip quotes if provided - does not effect unquoted string
|
||||||
|
json_str = json_str.strip() # strip white space at start and end
|
||||||
|
if (json_str.startswith("'") and json_str.endswith("'")) or (
|
||||||
|
json_str.startswith('"') and json_str.endswith('"')
|
||||||
|
):
|
||||||
|
json_str = json_str[1:-1] # strip outer quotes
|
||||||
|
json_str = json_str.strip() # strip remaining white space at start and end
|
||||||
|
|
||||||
# Try pandas dataframe with orient="split"
|
# Try pandas dataframe with orient="split"
|
||||||
try:
|
try:
|
||||||
import_data = PydanticDateTimeDataFrame.model_validate_json(json_str)
|
import_data = PydanticDateTimeDataFrame.model_validate_json(json_str)
|
||||||
@@ -1720,10 +1744,15 @@ class DataImportMixin:
|
|||||||
logger.debug(f"PydanticDateTimeData import: {error_msg}")
|
logger.debug(f"PydanticDateTimeData import: {error_msg}")
|
||||||
|
|
||||||
# Use simple dict format
|
# Use simple dict format
|
||||||
import_data = json.loads(json_str)
|
try:
|
||||||
self.import_from_dict(
|
import_data = json.loads(json_str)
|
||||||
import_data, key_prefix=key_prefix, start_datetime=start_datetime, interval=interval
|
self.import_from_dict(
|
||||||
)
|
import_data, key_prefix=key_prefix, start_datetime=start_datetime, interval=interval
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Invalid JSON string '{json_str}': {e}"
|
||||||
|
logger.debug(error_msg)
|
||||||
|
raise ValueError(error_msg) from e
|
||||||
|
|
||||||
def import_from_file(
|
def import_from_file(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -25,11 +25,11 @@ class classproperty:
|
|||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
__get__: Retrieves the value of the class property by calling the
|
__get__: Retrieves the value of the class property by calling the
|
||||||
decorated method on the class.
|
decorated method on the class.
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
fget (Callable[[Any], Any]): A method that takes the class as an
|
fget (Callable[[Any], Any]): A method that takes the class as an
|
||||||
argument and returns a value.
|
argument and returns a value.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
RuntimeError: If `fget` is not defined when `__get__` is called.
|
RuntimeError: If `fget` is not defined when `__get__` is called.
|
||||||
|
|||||||
@@ -10,9 +10,11 @@ Demand Driven Based Control.
|
|||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from collections import defaultdict
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Annotated, Literal, Optional, Union
|
from typing import Annotated, Literal, Optional, Union
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
from pydantic import Field, computed_field, model_validator
|
from pydantic import Field, computed_field, model_validator
|
||||||
|
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
@@ -2257,20 +2259,60 @@ class EnergyManagementPlan(PydanticBaseModel):
|
|||||||
self.valid_from = to_datetime()
|
self.valid_from = to_datetime()
|
||||||
self.valid_until = None
|
self.valid_until = None
|
||||||
|
|
||||||
|
def get_resources(self) -> list[str]:
|
||||||
|
"""Retrieves the resource_ids for the resources the plan currently holds instructions for.
|
||||||
|
|
||||||
|
Returns a list of resource ids.
|
||||||
|
"""
|
||||||
|
resource_ids = []
|
||||||
|
for instr in self.instructions:
|
||||||
|
resource_id = instr.resource_id
|
||||||
|
if resource_id not in resource_ids:
|
||||||
|
resource_ids.append(resource_id)
|
||||||
|
return resource_ids
|
||||||
|
|
||||||
def get_active_instructions(
|
def get_active_instructions(
|
||||||
self, now: Optional[DateTime] = None
|
self, now: Optional[DateTime] = None
|
||||||
) -> list[EnergyManagementInstruction]:
|
) -> list["EnergyManagementInstruction"]:
|
||||||
"""Retrieves all currently active instructions at the specified time."""
|
"""Retrieves the currently active instruction for each resource at the specified time.
|
||||||
|
|
||||||
|
Semantics:
|
||||||
|
- For each resource, consider only instructions with execution_time <= now.
|
||||||
|
- Choose the instruction with the latest execution_time (the most recent).
|
||||||
|
- If that instruction has a duration (timedelta), it's active only if now < execution_time + duration.
|
||||||
|
- If that instruction has no duration (None), treat it as open-ended (active until superseded).
|
||||||
|
|
||||||
|
Returns a list with at most one instruction per resource (the active one).
|
||||||
|
"""
|
||||||
now = now or to_datetime()
|
now = now or to_datetime()
|
||||||
active = []
|
# Group instructions by resource_id
|
||||||
|
by_resource: dict[str, list["EnergyManagementInstruction"]] = defaultdict(list)
|
||||||
for instr in self.instructions:
|
for instr in self.instructions:
|
||||||
instr_duration = instr.duration()
|
# skip instructions scheduled in the future
|
||||||
|
if instr.execution_time <= now:
|
||||||
|
by_resource[instr.resource_id].append(instr)
|
||||||
|
|
||||||
|
active: list["EnergyManagementInstruction"] = []
|
||||||
|
|
||||||
|
for resource_id, instrs in by_resource.items():
|
||||||
|
# pick latest instruction by execution_time
|
||||||
|
latest = max(instrs, key=lambda i: i.execution_time)
|
||||||
|
|
||||||
|
if len(instrs) == 0:
|
||||||
|
# No instructions, ther shall be at least one
|
||||||
|
error_msg = f"No instructions for {resource_id}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
instr_duration = latest.duration() # expected: Duration| None
|
||||||
if instr_duration is None:
|
if instr_duration is None:
|
||||||
if instr.execution_time <= now:
|
# open-ended (active until replaced) -> active because we selected latest <= now
|
||||||
active.append(instr)
|
active.append(latest)
|
||||||
else:
|
else:
|
||||||
if instr.execution_time <= now < instr.execution_time + instr_duration:
|
# active only if now is strictly before execution_time + duration
|
||||||
active.append(instr)
|
if latest.execution_time + instr_duration > now:
|
||||||
|
active.append(latest)
|
||||||
|
|
||||||
return active
|
return active
|
||||||
|
|
||||||
def get_next_instruction(
|
def get_next_instruction(
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import traceback
|
import traceback
|
||||||
from asyncio import Lock, get_running_loop
|
from asyncio import Lock, get_running_loop
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from enum import Enum
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import ClassVar, Optional
|
from typing import ClassVar, Optional
|
||||||
|
|
||||||
@@ -8,7 +9,12 @@ from loguru import logger
|
|||||||
from pydantic import computed_field
|
from pydantic import computed_field
|
||||||
|
|
||||||
from akkudoktoreos.core.cache import CacheEnergyManagementStore
|
from akkudoktoreos.core.cache import CacheEnergyManagementStore
|
||||||
from akkudoktoreos.core.coreabc import ConfigMixin, PredictionMixin, SingletonMixin
|
from akkudoktoreos.core.coreabc import (
|
||||||
|
AdapterMixin,
|
||||||
|
ConfigMixin,
|
||||||
|
PredictionMixin,
|
||||||
|
SingletonMixin,
|
||||||
|
)
|
||||||
from akkudoktoreos.core.emplan import EnergyManagementPlan
|
from akkudoktoreos.core.emplan import EnergyManagementPlan
|
||||||
from akkudoktoreos.core.emsettings import EnergyManagementMode
|
from akkudoktoreos.core.emsettings import EnergyManagementMode
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
@@ -24,7 +30,23 @@ from akkudoktoreos.utils.datetimeutil import DateTime, compare_datetimes, to_dat
|
|||||||
executor = ThreadPoolExecutor(max_workers=1)
|
executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
|
||||||
|
|
||||||
class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBaseModel):
|
class EnergyManagementStage(Enum):
|
||||||
|
"""Enumeration of the main stages in the energy management lifecycle."""
|
||||||
|
|
||||||
|
IDLE = "IDLE"
|
||||||
|
DATA_ACQUISITION = "DATA_AQUISITION"
|
||||||
|
FORECAST_RETRIEVAL = "FORECAST_RETRIEVAL"
|
||||||
|
OPTIMIZATION = "OPTIMIZATION"
|
||||||
|
CONTROL_DISPATCH = "CONTROL_DISPATCH"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
"""Return the string representation of the stage."""
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
|
||||||
|
class EnergyManagement(
|
||||||
|
SingletonMixin, ConfigMixin, PredictionMixin, AdapterMixin, PydanticBaseModel
|
||||||
|
):
|
||||||
"""Energy management."""
|
"""Energy management."""
|
||||||
|
|
||||||
# Start datetime.
|
# Start datetime.
|
||||||
@@ -33,6 +55,9 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
# last run datetime. Used by energy management task
|
# last run datetime. Used by energy management task
|
||||||
_last_run_datetime: ClassVar[Optional[DateTime]] = None
|
_last_run_datetime: ClassVar[Optional[DateTime]] = None
|
||||||
|
|
||||||
|
# Current energy management stage
|
||||||
|
_stage: ClassVar[EnergyManagementStage] = EnergyManagementStage.IDLE
|
||||||
|
|
||||||
# energy management plan of latest energy management run with optimization
|
# energy management plan of latest energy management run with optimization
|
||||||
_plan: ClassVar[Optional[EnergyManagementPlan]] = None
|
_plan: ClassVar[Optional[EnergyManagementPlan]] = None
|
||||||
|
|
||||||
@@ -81,6 +106,15 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
cls._start_datetime = start_datetime.set(minute=0, second=0, microsecond=0)
|
cls._start_datetime = start_datetime.set(minute=0, second=0, microsecond=0)
|
||||||
return cls._start_datetime
|
return cls._start_datetime
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def stage(cls) -> EnergyManagementStage:
|
||||||
|
"""Get the the stage of the energy management.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
EnergyManagementStage: The current stage of energy management.
|
||||||
|
"""
|
||||||
|
return cls._stage
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def plan(cls) -> Optional[EnergyManagementPlan]:
|
def plan(cls) -> Optional[EnergyManagementPlan]:
|
||||||
"""Get the latest energy management plan.
|
"""Get the latest energy management plan.
|
||||||
@@ -122,6 +156,7 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
"""Run the energy management.
|
"""Run the energy management.
|
||||||
|
|
||||||
This method initializes the energy management run by setting its
|
This method initializes the energy management run by setting its
|
||||||
|
|
||||||
start datetime, updating predictions, and optionally starting
|
start datetime, updating predictions, and optionally starting
|
||||||
optimization depending on the selected mode or configuration.
|
optimization depending on the selected mode or configuration.
|
||||||
|
|
||||||
@@ -157,6 +192,8 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
|
|
||||||
logger.info("Starting energy management run.")
|
logger.info("Starting energy management run.")
|
||||||
|
|
||||||
|
cls._stage = EnergyManagementStage.DATA_ACQUISITION
|
||||||
|
|
||||||
# Remember/ set the start datetime of this energy management run.
|
# Remember/ set the start datetime of this energy management run.
|
||||||
# None leads
|
# None leads
|
||||||
cls.set_start_datetime(start_datetime)
|
cls.set_start_datetime(start_datetime)
|
||||||
@@ -164,12 +201,23 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
# Throw away any memory cached results of the last energy management run.
|
# Throw away any memory cached results of the last energy management run.
|
||||||
CacheEnergyManagementStore().clear()
|
CacheEnergyManagementStore().clear()
|
||||||
|
|
||||||
|
# Do data aquisition by adapters
|
||||||
|
try:
|
||||||
|
cls.adapter.update_data(force_enable)
|
||||||
|
except Exception as e:
|
||||||
|
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||||
|
error_msg = f"Adapter update failed - phase {cls._stage}: {e}\n{trace}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
|
||||||
|
cls._stage = EnergyManagementStage.FORECAST_RETRIEVAL
|
||||||
|
|
||||||
if mode is None:
|
if mode is None:
|
||||||
mode = cls.config.ems.mode
|
mode = cls.config.ems.mode
|
||||||
if mode is None or mode == "PREDICTION":
|
if mode is None or mode == "PREDICTION":
|
||||||
# Update the predictions
|
# Update the predictions
|
||||||
cls.prediction.update_data(force_enable=force_enable, force_update=force_update)
|
cls.prediction.update_data(force_enable=force_enable, force_update=force_update)
|
||||||
logger.info("Energy management run done (predictions updated)")
|
logger.info("Energy management run done (predictions updated)")
|
||||||
|
cls._stage = EnergyManagementStage.IDLE
|
||||||
return
|
return
|
||||||
|
|
||||||
# Prepare optimization parameters
|
# Prepare optimization parameters
|
||||||
@@ -184,8 +232,12 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
logger.error(
|
logger.error(
|
||||||
"Energy management run canceled. Could not prepare optimisation parameters."
|
"Energy management run canceled. Could not prepare optimisation parameters."
|
||||||
)
|
)
|
||||||
|
cls._stage = EnergyManagementStage.IDLE
|
||||||
return
|
return
|
||||||
|
|
||||||
|
cls._stage = EnergyManagementStage.OPTIMIZATION
|
||||||
|
logger.info("Starting energy management optimization.")
|
||||||
|
|
||||||
# Take values from config if not given
|
# Take values from config if not given
|
||||||
if genetic_individuals is None:
|
if genetic_individuals is None:
|
||||||
genetic_individuals = cls.config.optimization.genetic.individuals
|
genetic_individuals = cls.config.optimization.genetic.individuals
|
||||||
@@ -195,7 +247,6 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
if cls._start_datetime is None: # Make mypy happy - already set by us
|
if cls._start_datetime is None: # Make mypy happy - already set by us
|
||||||
raise RuntimeError("Start datetime not set.")
|
raise RuntimeError("Start datetime not set.")
|
||||||
|
|
||||||
logger.info("Starting energy management optimization.")
|
|
||||||
try:
|
try:
|
||||||
optimization = GeneticOptimization(
|
optimization = GeneticOptimization(
|
||||||
verbose=bool(cls.config.server.verbose),
|
verbose=bool(cls.config.server.verbose),
|
||||||
@@ -208,8 +259,11 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
logger.exception("Energy management optimization failed.")
|
logger.exception("Energy management optimization failed.")
|
||||||
|
cls._stage = EnergyManagementStage.IDLE
|
||||||
return
|
return
|
||||||
|
|
||||||
|
cls._stage = EnergyManagementStage.CONTROL_DISPATCH
|
||||||
|
|
||||||
# Make genetic solution public
|
# Make genetic solution public
|
||||||
cls._genetic_solution = solution
|
cls._genetic_solution = solution
|
||||||
|
|
||||||
@@ -224,6 +278,17 @@ class EnergyManagement(SingletonMixin, ConfigMixin, PredictionMixin, PydanticBas
|
|||||||
logger.debug("Energy management plan:\n{}", cls._plan)
|
logger.debug("Energy management plan:\n{}", cls._plan)
|
||||||
logger.info("Energy management run done (optimization updated)")
|
logger.info("Energy management run done (optimization updated)")
|
||||||
|
|
||||||
|
# Do control dispatch by adapters
|
||||||
|
try:
|
||||||
|
cls.adapter.update_data(force_enable)
|
||||||
|
except Exception as e:
|
||||||
|
trace = "".join(traceback.TracebackException.from_exception(e).format())
|
||||||
|
error_msg = f"Adapter update failed - phase {cls._stage}: {e}\n{trace}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
|
||||||
|
# energy management run finished
|
||||||
|
cls._stage = EnergyManagementStage.IDLE
|
||||||
|
|
||||||
async def run(
|
async def run(
|
||||||
self,
|
self,
|
||||||
start_datetime: Optional[DateTime] = None,
|
start_datetime: Optional[DateTime] = None,
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ console_handler_id = None
|
|||||||
file_handler_id = None
|
file_handler_id = None
|
||||||
|
|
||||||
|
|
||||||
def track_logging_config(config_eos: Any, path: str, old_value: Any, value: Any) -> None:
|
def logging_track_config(config_eos: Any, path: str, old_value: Any, value: Any) -> None:
|
||||||
"""Track logging config changes."""
|
"""Track logging config changes."""
|
||||||
global console_handler_id, file_handler_id
|
global console_handler_id, file_handler_id
|
||||||
|
|
||||||
|
|||||||
@@ -400,7 +400,21 @@ class PydanticModelNestedValueMixin:
|
|||||||
|
|
||||||
# Get next value
|
# Get next value
|
||||||
next_value = None
|
next_value = None
|
||||||
if isinstance(model, BaseModel):
|
if isinstance(model, RootModel):
|
||||||
|
# If this is the final key, set the value
|
||||||
|
if is_final_key:
|
||||||
|
try:
|
||||||
|
model.validate_and_set(key, value)
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Error updating model: {e}") from e
|
||||||
|
return
|
||||||
|
|
||||||
|
next_value = model.root
|
||||||
|
|
||||||
|
elif isinstance(model, BaseModel):
|
||||||
|
logger.debug(
|
||||||
|
f"Detected base model {model.__class__.__name__} of type {type(model)}"
|
||||||
|
)
|
||||||
# Track parent and key for possible assignment later
|
# Track parent and key for possible assignment later
|
||||||
parent = model
|
parent = model
|
||||||
parent_key = [
|
parent_key = [
|
||||||
@@ -432,6 +446,7 @@ class PydanticModelNestedValueMixin:
|
|||||||
next_value = getattr(model, key, None)
|
next_value = getattr(model, key, None)
|
||||||
|
|
||||||
elif isinstance(model, list):
|
elif isinstance(model, list):
|
||||||
|
logger.debug(f"Detected list of type {type(model)}")
|
||||||
# Handle lists (ensure index exists and modify safely)
|
# Handle lists (ensure index exists and modify safely)
|
||||||
try:
|
try:
|
||||||
idx = int(key)
|
idx = int(key)
|
||||||
@@ -468,6 +483,7 @@ class PydanticModelNestedValueMixin:
|
|||||||
return
|
return
|
||||||
|
|
||||||
elif isinstance(model, dict):
|
elif isinstance(model, dict):
|
||||||
|
logger.debug(f"Detected dict of type {type(model)}")
|
||||||
# Handle dictionaries (auto-create missing keys)
|
# Handle dictionaries (auto-create missing keys)
|
||||||
|
|
||||||
# Get next type from parent key type information
|
# Get next type from parent key type information
|
||||||
@@ -795,29 +811,61 @@ class PydanticBaseModel(PydanticModelNestedValueMixin, BaseModel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def field_description(cls, field_name: str) -> Optional[str]:
|
def field_description(cls, field_name: str) -> Optional[str]:
|
||||||
"""Return the description metadata of a model field, if available.
|
"""Return a human-readable description for a model field.
|
||||||
|
|
||||||
This method retrieves the `Field` specification from the model's
|
Looks up descriptions for both regular and computed fields.
|
||||||
`model_fields` registry and extracts its description from the field's
|
Resolution order:
|
||||||
`json_schema_extra` / `extra` metadata (as provided by
|
|
||||||
`_field_extra_dict`). If the field does not exist or no description is
|
Normal fields:
|
||||||
present, ``None`` is returned.
|
1) json_schema_extra["description"]
|
||||||
|
2) field.description
|
||||||
|
|
||||||
|
Computed fields:
|
||||||
|
1) ComputedFieldInfo.description
|
||||||
|
2) function docstring (func.__doc__)
|
||||||
|
3) json_schema_extra["description"]
|
||||||
|
|
||||||
|
If a field exists but no description is found, returns "-".
|
||||||
|
If the field does not exist, returns None.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
field_name (str):
|
field_name: Field name.
|
||||||
Name of the field whose description should be returned.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Optional[str]:
|
Description string, "-" if missing, or None if not a field.
|
||||||
The textual description if present, otherwise ``None``.
|
|
||||||
"""
|
"""
|
||||||
field = cls.model_fields.get(field_name)
|
# 1) Regular declared fields
|
||||||
if not field:
|
field: FieldInfo | None = cls.model_fields.get(field_name)
|
||||||
|
if field is not None:
|
||||||
|
extra = cls._field_extra_dict(field)
|
||||||
|
if "description" in extra:
|
||||||
|
return str(extra["description"])
|
||||||
|
# some FieldInfo may also have .description directly
|
||||||
|
if getattr(field, "description", None):
|
||||||
|
return str(field.description)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
extra = cls._field_extra_dict(field)
|
|
||||||
|
# 2) Computed fields live in a separate mapping
|
||||||
|
cfield: ComputedFieldInfo | None = cls.model_computed_fields.get(field_name)
|
||||||
|
if cfield is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 2a) ComputedFieldInfo may have a description attribute
|
||||||
|
if getattr(cfield, "description", None):
|
||||||
|
return str(cfield.description)
|
||||||
|
|
||||||
|
# 2b) fallback to wrapped property's docstring
|
||||||
|
func = getattr(cfield, "func", None)
|
||||||
|
if func and func.__doc__:
|
||||||
|
return func.__doc__.strip()
|
||||||
|
|
||||||
|
# 2c) last resort: json_schema_extra if you use it for computed fields
|
||||||
|
extra = cls._field_extra_dict(cfield)
|
||||||
if "description" in extra:
|
if "description" in extra:
|
||||||
return str(extra["description"])
|
return str(extra["description"])
|
||||||
return None
|
|
||||||
|
return "-"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def field_deprecated(cls, field_name: str) -> Optional[str]:
|
def field_deprecated(cls, field_name: str) -> Optional[str]:
|
||||||
@@ -887,7 +935,7 @@ class PydanticDateTimeData(RootModel):
|
|||||||
|
|
||||||
{
|
{
|
||||||
"start_datetime": "2024-01-01 00:00:00", # optional
|
"start_datetime": "2024-01-01 00:00:00", # optional
|
||||||
"interval": "1 Hour", # optional
|
"interval": "1 hour", # optional
|
||||||
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
"loadforecast_power_w": [20.5, 21.0, 22.1],
|
||||||
"load_min": [18.5, 19.0, 20.1]
|
"load_min": [18.5, 19.0, 20.1]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,13 +6,15 @@ from fnmatch import fnmatch
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
# For development add `+dev` to previous release
|
# For development add `.dev` to previous release
|
||||||
# For release omit `+dev`.
|
# For release omit `.dev`.
|
||||||
VERSION_BASE = "0.2.0+dev"
|
VERSION_BASE = "0.2.0.dev"
|
||||||
|
|
||||||
# Project hash of relevant files
|
# Project hash of relevant files
|
||||||
HASH_EOS = ""
|
HASH_EOS = ""
|
||||||
|
|
||||||
|
# Number of digits to append to .dev to identify a development version
|
||||||
|
VERSION_DEV_PRECISION = 8
|
||||||
|
|
||||||
# ------------------------------
|
# ------------------------------
|
||||||
# Helpers for version generation
|
# Helpers for version generation
|
||||||
@@ -91,8 +93,11 @@ def _version_calculate() -> str:
|
|||||||
"""Compute version."""
|
"""Compute version."""
|
||||||
global HASH_EOS
|
global HASH_EOS
|
||||||
HASH_EOS = _version_hash()
|
HASH_EOS = _version_hash()
|
||||||
if VERSION_BASE.endswith("+dev"):
|
if VERSION_BASE.endswith("dev"):
|
||||||
return f"{VERSION_BASE}.{HASH_EOS[:6]}"
|
# After dev only digits are allowed - convert hexdigest to digits
|
||||||
|
hash_value = int(HASH_EOS, 16)
|
||||||
|
hash_digits = str(hash_value % (10**VERSION_DEV_PRECISION)).zfill(VERSION_DEV_PRECISION)
|
||||||
|
return f"{VERSION_BASE}{hash_digits}"
|
||||||
else:
|
else:
|
||||||
return VERSION_BASE
|
return VERSION_BASE
|
||||||
|
|
||||||
@@ -114,10 +119,10 @@ __version__ = _version_calculate()
|
|||||||
VERSION_RE = re.compile(
|
VERSION_RE = re.compile(
|
||||||
r"""
|
r"""
|
||||||
^(?P<base>\d+\.\d+\.\d+) # x.y.z
|
^(?P<base>\d+\.\d+\.\d+) # x.y.z
|
||||||
(?:\+ # +dev.hash starts here
|
(?:[\.\+\-] # .dev<hash> starts here
|
||||||
(?:
|
(?:
|
||||||
(?P<dev>dev) # literal 'dev'
|
(?P<dev>dev) # literal 'dev'
|
||||||
(?:\.(?P<hash>[A-Za-z0-9]+))? # optional .hash
|
(?:(?P<hash>[A-Za-z0-9]+))? # optional <hash>
|
||||||
)
|
)
|
||||||
)?
|
)?
|
||||||
$
|
$
|
||||||
@@ -131,8 +136,8 @@ def version() -> dict[str, Optional[str]]:
|
|||||||
|
|
||||||
The version string shall be of the form:
|
The version string shall be of the form:
|
||||||
x.y.z
|
x.y.z
|
||||||
x.y.z+dev
|
x.y.z.dev
|
||||||
x.y.z+dev.HASH
|
x.y.z.dev<HASH>
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from akkudoktoreos.devices.devicesabc import DevicesBaseSettings
|
|||||||
from akkudoktoreos.utils.datetimeutil import DateTime, TimeWindowSequence, to_datetime
|
from akkudoktoreos.utils.datetimeutil import DateTime, TimeWindowSequence, to_datetime
|
||||||
|
|
||||||
# Default charge rates for battery
|
# Default charge rates for battery
|
||||||
BATTERY_DEFAULT_CHARGE_RATES = np.linspace(0.0, 1.0, 11) # 0.0, 0.1, ..., 1.0
|
BATTERY_DEFAULT_CHARGE_RATES: list[float] = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
|
||||||
|
|
||||||
|
|
||||||
class BatteriesCommonSettings(DevicesBaseSettings):
|
class BatteriesCommonSettings(DevicesBaseSettings):
|
||||||
@@ -68,7 +68,7 @@ class BatteriesCommonSettings(DevicesBaseSettings):
|
|||||||
json_schema_extra={"description": "Minimum charging power [W].", "examples": [50]},
|
json_schema_extra={"description": "Minimum charging power [W].", "examples": [50]},
|
||||||
)
|
)
|
||||||
|
|
||||||
charge_rates: Optional[NDArray[Shape["*"], float]] = Field(
|
charge_rates: Optional[list[float]] = Field(
|
||||||
default=BATTERY_DEFAULT_CHARGE_RATES,
|
default=BATTERY_DEFAULT_CHARGE_RATES,
|
||||||
json_schema_extra={
|
json_schema_extra={
|
||||||
"description": (
|
"description": (
|
||||||
@@ -165,10 +165,7 @@ class BatteriesCommonSettings(DevicesBaseSettings):
|
|||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def measurement_keys(self) -> Optional[list[str]]:
|
def measurement_keys(self) -> Optional[list[str]]:
|
||||||
"""Measurement keys for the battery stati that are measurements.
|
"""Measurement keys for the battery stati that are measurements."""
|
||||||
|
|
||||||
Battery SoC, power.
|
|
||||||
"""
|
|
||||||
keys: list[str] = [
|
keys: list[str] = [
|
||||||
self.measurement_key_soc_factor,
|
self.measurement_key_soc_factor,
|
||||||
self.measurement_key_power_l1_w,
|
self.measurement_key_power_l1_w,
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ class Battery:
|
|||||||
self.discharging_efficiency = self.parameters.discharging_efficiency
|
self.discharging_efficiency = self.parameters.discharging_efficiency
|
||||||
|
|
||||||
# Charge rates, in case of None use default
|
# Charge rates, in case of None use default
|
||||||
self.charge_rates = BATTERY_DEFAULT_CHARGE_RATES
|
self.charge_rates = np.array(BATTERY_DEFAULT_CHARGE_RATES, dtype=float)
|
||||||
if self.parameters.charge_rates:
|
if self.parameters.charge_rates:
|
||||||
charge_rates = np.array(self.parameters.charge_rates, dtype=float)
|
charge_rates = np.array(self.parameters.charge_rates, dtype=float)
|
||||||
charge_rates = np.unique(charge_rates)
|
charge_rates = np.unique(charge_rates)
|
||||||
|
|||||||
@@ -234,14 +234,14 @@ class GeneticSimulation(PydanticBaseModel):
|
|||||||
consumption_energy_per_hour = np.full((total_hours), np.nan)
|
consumption_energy_per_hour = np.full((total_hours), np.nan)
|
||||||
costs_per_hour = np.full((total_hours), np.nan)
|
costs_per_hour = np.full((total_hours), np.nan)
|
||||||
revenue_per_hour = np.full((total_hours), np.nan)
|
revenue_per_hour = np.full((total_hours), np.nan)
|
||||||
soc_per_hour = np.full((total_hours), np.nan)
|
|
||||||
soc_ev_per_hour = np.full((total_hours), np.nan)
|
|
||||||
losses_wh_per_hour = np.full((total_hours), np.nan)
|
losses_wh_per_hour = np.full((total_hours), np.nan)
|
||||||
home_appliance_wh_per_hour = np.full((total_hours), np.nan)
|
|
||||||
electricity_price_per_hour = np.full((total_hours), np.nan)
|
electricity_price_per_hour = np.full((total_hours), np.nan)
|
||||||
|
|
||||||
# Set initial state
|
# Set initial state
|
||||||
if battery_fast:
|
if battery_fast:
|
||||||
|
# Pre-allocate arrays for the results, optimized for speed
|
||||||
|
soc_per_hour = np.full((total_hours), np.nan)
|
||||||
|
|
||||||
soc_per_hour[0] = battery_fast.current_soc_percentage()
|
soc_per_hour[0] = battery_fast.current_soc_percentage()
|
||||||
# Fill the charge array of the battery
|
# Fill the charge array of the battery
|
||||||
dc_charge_hours_fast[0:start_hour] = 0
|
dc_charge_hours_fast[0:start_hour] = 0
|
||||||
@@ -255,8 +255,14 @@ class GeneticSimulation(PydanticBaseModel):
|
|||||||
bat_discharge_hours_fast[0:start_hour] = 0
|
bat_discharge_hours_fast[0:start_hour] = 0
|
||||||
bat_discharge_hours_fast[end_hour:] = 0
|
bat_discharge_hours_fast[end_hour:] = 0
|
||||||
battery_fast.discharge_array = bat_discharge_hours_fast
|
battery_fast.discharge_array = bat_discharge_hours_fast
|
||||||
|
else:
|
||||||
|
# Default return if no battery is available
|
||||||
|
soc_per_hour = np.full((total_hours), 0)
|
||||||
|
|
||||||
if ev_fast:
|
if ev_fast:
|
||||||
|
# Pre-allocate arrays for the results, optimized for speed
|
||||||
|
soc_ev_per_hour = np.full((total_hours), np.nan)
|
||||||
|
|
||||||
soc_ev_per_hour[0] = ev_fast.current_soc_percentage()
|
soc_ev_per_hour[0] = ev_fast.current_soc_percentage()
|
||||||
# Fill the charge array of the ev
|
# Fill the charge array of the ev
|
||||||
ev_charge_hours_fast[0:start_hour] = 0
|
ev_charge_hours_fast[0:start_hour] = 0
|
||||||
@@ -266,14 +272,22 @@ class GeneticSimulation(PydanticBaseModel):
|
|||||||
ev_discharge_hours_fast[0:start_hour] = 0
|
ev_discharge_hours_fast[0:start_hour] = 0
|
||||||
ev_discharge_hours_fast[end_hour:] = 0
|
ev_discharge_hours_fast[end_hour:] = 0
|
||||||
ev_fast.discharge_array = ev_discharge_hours_fast
|
ev_fast.discharge_array = ev_discharge_hours_fast
|
||||||
|
else:
|
||||||
|
# Default return if no electric vehicle is available
|
||||||
|
soc_ev_per_hour = np.full((total_hours), 0)
|
||||||
|
|
||||||
if home_appliance_fast and self.home_appliance_start_hour:
|
if home_appliance_fast and self.home_appliance_start_hour:
|
||||||
home_appliance_enabled = True
|
home_appliance_enabled = True
|
||||||
|
# Pre-allocate arrays for the results, optimized for speed
|
||||||
|
home_appliance_wh_per_hour = np.full((total_hours), np.nan)
|
||||||
|
|
||||||
self.home_appliance_start_hour = home_appliance_fast.set_starting_time(
|
self.home_appliance_start_hour = home_appliance_fast.set_starting_time(
|
||||||
self.home_appliance_start_hour, start_hour
|
self.home_appliance_start_hour, start_hour
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
home_appliance_enabled = False
|
home_appliance_enabled = False
|
||||||
|
# Default return if no home appliance is available
|
||||||
|
home_appliance_wh_per_hour = np.full((total_hours), 0)
|
||||||
|
|
||||||
for hour in range(start_hour, end_hour):
|
for hour in range(start_hour, end_hour):
|
||||||
hour_idx = hour - start_hour
|
hour_idx = hour - start_hour
|
||||||
|
|||||||
@@ -177,33 +177,33 @@ class GeneticOptimizationParameters(
|
|||||||
# Check for general predictions conditions
|
# Check for general predictions conditions
|
||||||
if cls.config.general.latitude is None:
|
if cls.config.general.latitude is None:
|
||||||
default_latitude = 52.52
|
default_latitude = 52.52
|
||||||
logger.error(f"Latitude unknown - defaulting to {default_latitude}.")
|
logger.info(f"Latitude unknown - defaulting to {default_latitude}.")
|
||||||
cls.config.general.latitude = default_latitude
|
cls.config.general.latitude = default_latitude
|
||||||
if cls.config.general.longitude is None:
|
if cls.config.general.longitude is None:
|
||||||
default_longitude = 13.405
|
default_longitude = 13.405
|
||||||
logger.error(f"Longitude unknown - defaulting to {default_longitude}.")
|
logger.info(f"Longitude unknown - defaulting to {default_longitude}.")
|
||||||
cls.config.general.longitude = default_longitude
|
cls.config.general.longitude = default_longitude
|
||||||
if cls.config.prediction.hours is None:
|
if cls.config.prediction.hours is None:
|
||||||
logger.error("Prediction hours unknown - defaulting to 48 hours.")
|
logger.info("Prediction hours unknown - defaulting to 48 hours.")
|
||||||
cls.config.prediction.hours = 48
|
cls.config.prediction.hours = 48
|
||||||
if cls.config.prediction.historic_hours is None:
|
if cls.config.prediction.historic_hours is None:
|
||||||
logger.error("Prediction historic hours unknown - defaulting to 24 hours.")
|
logger.info("Prediction historic hours unknown - defaulting to 24 hours.")
|
||||||
cls.config.prediction.historic_hours = 24
|
cls.config.prediction.historic_hours = 24
|
||||||
# Check optimization definitions
|
# Check optimization definitions
|
||||||
if cls.config.optimization.horizon_hours is None:
|
if cls.config.optimization.horizon_hours is None:
|
||||||
logger.error("Optimization horizon unknown - defaulting to 24 hours.")
|
logger.info("Optimization horizon unknown - defaulting to 24 hours.")
|
||||||
cls.config.optimization.horizon_hours = 24
|
cls.config.optimization.horizon_hours = 24
|
||||||
if cls.config.optimization.interval is None:
|
if cls.config.optimization.interval is None:
|
||||||
logger.error("Optimization interval unknown - defaulting to 3600 seconds.")
|
logger.info("Optimization interval unknown - defaulting to 3600 seconds.")
|
||||||
cls.config.optimization.interval = 3600
|
cls.config.optimization.interval = 3600
|
||||||
if cls.config.optimization.interval != 3600:
|
if cls.config.optimization.interval != 3600:
|
||||||
logger.error(
|
logger.info(
|
||||||
"Optimization interval '{}' seconds not supported - forced to 3600 seconds."
|
"Optimization interval '{}' seconds not supported - forced to 3600 seconds."
|
||||||
)
|
)
|
||||||
cls.config.optimization.interval = 3600
|
cls.config.optimization.interval = 3600
|
||||||
# Check genetic algorithm definitions
|
# Check genetic algorithm definitions
|
||||||
if cls.config.optimization.genetic is None:
|
if cls.config.optimization.genetic is None:
|
||||||
logger.error(
|
logger.info(
|
||||||
"Genetic optimization configuration not configured - defaulting to demo config."
|
"Genetic optimization configuration not configured - defaulting to demo config."
|
||||||
)
|
)
|
||||||
cls.config.optimization.genetic = {
|
cls.config.optimization.genetic = {
|
||||||
@@ -215,16 +215,16 @@ class GeneticOptimizationParameters(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
if cls.config.optimization.genetic.individuals is None:
|
if cls.config.optimization.genetic.individuals is None:
|
||||||
logger.error("Genetic individuals unknown - defaulting to 300.")
|
logger.info("Genetic individuals unknown - defaulting to 300.")
|
||||||
cls.config.optimization.genetic.individuals = 300
|
cls.config.optimization.genetic.individuals = 300
|
||||||
if cls.config.optimization.genetic.generations is None:
|
if cls.config.optimization.genetic.generations is None:
|
||||||
logger.error("Genetic generations unknown - defaulting to 400.")
|
logger.info("Genetic generations unknown - defaulting to 400.")
|
||||||
cls.config.optimization.genetic.generations = 400
|
cls.config.optimization.genetic.generations = 400
|
||||||
if cls.config.optimization.genetic.penalties is None:
|
if cls.config.optimization.genetic.penalties is None:
|
||||||
logger.error("Genetic penalties unknown - defaulting to demo config.")
|
logger.info("Genetic penalties unknown - defaulting to demo config.")
|
||||||
cls.config.optimization.genetic.penalties = {"ev_soc_miss": 10}
|
cls.config.optimization.genetic.penalties = {"ev_soc_miss": 10}
|
||||||
if "ev_soc_miss" not in cls.config.optimization.genetic.penalties:
|
if "ev_soc_miss" not in cls.config.optimization.genetic.penalties:
|
||||||
logger.error("ev_soc_miss penalty function parameter unknown - defaulting to 100.")
|
logger.info("ev_soc_miss penalty function parameter unknown - defaulting to 10.")
|
||||||
cls.config.optimization.genetic.penalties["ev_soc_miss"] = 10
|
cls.config.optimization.genetic.penalties["ev_soc_miss"] = 10
|
||||||
|
|
||||||
# Get start solution from last run
|
# Get start solution from last run
|
||||||
@@ -262,7 +262,7 @@ class GeneticOptimizationParameters(
|
|||||||
* power_to_energy_per_interval_factor
|
* power_to_energy_per_interval_factor
|
||||||
).tolist()
|
).tolist()
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No PV forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No PV forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -270,6 +270,7 @@ class GeneticOptimizationParameters(
|
|||||||
{
|
{
|
||||||
"pvforecast": {
|
"pvforecast": {
|
||||||
"provider": "PVForecastAkkudoktor",
|
"provider": "PVForecastAkkudoktor",
|
||||||
|
"max_planes": 4,
|
||||||
"planes": [
|
"planes": [
|
||||||
{
|
{
|
||||||
"peakpower": 5.0,
|
"peakpower": 5.0,
|
||||||
@@ -314,7 +315,7 @@ class GeneticOptimizationParameters(
|
|||||||
fill_method="ffill",
|
fill_method="ffill",
|
||||||
).tolist()
|
).tolist()
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No Electricity Marketprice forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No Electricity Marketprice forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -330,7 +331,7 @@ class GeneticOptimizationParameters(
|
|||||||
fill_method="ffill",
|
fill_method="ffill",
|
||||||
).tolist()
|
).tolist()
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No Load forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No Load forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -357,7 +358,7 @@ class GeneticOptimizationParameters(
|
|||||||
fill_method="ffill",
|
fill_method="ffill",
|
||||||
).tolist()
|
).tolist()
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No feed in tariff forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No feed in tariff forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -384,7 +385,7 @@ class GeneticOptimizationParameters(
|
|||||||
fill_method="ffill",
|
fill_method="ffill",
|
||||||
).tolist()
|
).tolist()
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No weather forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No weather forecast data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -397,14 +398,14 @@ class GeneticOptimizationParameters(
|
|||||||
# Batteries
|
# Batteries
|
||||||
# ---------
|
# ---------
|
||||||
if cls.config.devices.max_batteries is None:
|
if cls.config.devices.max_batteries is None:
|
||||||
logger.error("Number of battery devices not configured - defaulting to 1.")
|
logger.info("Number of battery devices not configured - defaulting to 1.")
|
||||||
cls.config.devices.max_batteries = 1
|
cls.config.devices.max_batteries = 1
|
||||||
if cls.config.devices.max_batteries == 0:
|
if cls.config.devices.max_batteries == 0:
|
||||||
battery_params = None
|
battery_params = None
|
||||||
battery_lcos_kwh = 0
|
battery_lcos_kwh = 0
|
||||||
else:
|
else:
|
||||||
if cls.config.devices.batteries is None:
|
if cls.config.devices.batteries is None:
|
||||||
logger.error("No battery device data available - defaulting to demo data.")
|
logger.info("No battery device data available - defaulting to demo data.")
|
||||||
cls.config.devices.batteries = [{"device_id": "battery1", "capacity_wh": 8000}]
|
cls.config.devices.batteries = [{"device_id": "battery1", "capacity_wh": 8000}]
|
||||||
try:
|
try:
|
||||||
battery_config = cls.config.devices.batteries[0]
|
battery_config = cls.config.devices.batteries[0]
|
||||||
@@ -418,7 +419,7 @@ class GeneticOptimizationParameters(
|
|||||||
max_soc_percentage=battery_config.max_soc_percentage,
|
max_soc_percentage=battery_config.max_soc_percentage,
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No battery device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No battery device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -427,7 +428,7 @@ class GeneticOptimizationParameters(
|
|||||||
continue
|
continue
|
||||||
# Levelized cost of ownership
|
# Levelized cost of ownership
|
||||||
if battery_config.levelized_cost_of_storage_kwh is None:
|
if battery_config.levelized_cost_of_storage_kwh is None:
|
||||||
logger.error(
|
logger.info(
|
||||||
"No battery device LCOS data available - defaulting to 0 €/kWh. Parameter preparation attempt {}.",
|
"No battery device LCOS data available - defaulting to 0 €/kWh. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -449,7 +450,7 @@ class GeneticOptimizationParameters(
|
|||||||
except:
|
except:
|
||||||
initial_soc_percentage = None
|
initial_soc_percentage = None
|
||||||
if initial_soc_percentage is None:
|
if initial_soc_percentage is None:
|
||||||
logger.error(
|
logger.info(
|
||||||
f"No battery device SoC data (measurement key = '{battery_config.measurement_key_soc_factor}') available - defaulting to 0."
|
f"No battery device SoC data (measurement key = '{battery_config.measurement_key_soc_factor}') available - defaulting to 0."
|
||||||
)
|
)
|
||||||
initial_soc_percentage = 0
|
initial_soc_percentage = 0
|
||||||
@@ -458,13 +459,13 @@ class GeneticOptimizationParameters(
|
|||||||
# Electric Vehicles
|
# Electric Vehicles
|
||||||
# -----------------
|
# -----------------
|
||||||
if cls.config.devices.max_electric_vehicles is None:
|
if cls.config.devices.max_electric_vehicles is None:
|
||||||
logger.error("Number of electric_vehicle devices not configured - defaulting to 1.")
|
logger.info("Number of electric_vehicle devices not configured - defaulting to 1.")
|
||||||
cls.config.devices.max_electric_vehicles = 1
|
cls.config.devices.max_electric_vehicles = 1
|
||||||
if cls.config.devices.max_electric_vehicles == 0:
|
if cls.config.devices.max_electric_vehicles == 0:
|
||||||
electric_vehicle_params = None
|
electric_vehicle_params = None
|
||||||
else:
|
else:
|
||||||
if cls.config.devices.electric_vehicles is None:
|
if cls.config.devices.electric_vehicles is None:
|
||||||
logger.error(
|
logger.info(
|
||||||
"No electric vehicle device data available - defaulting to demo data."
|
"No electric vehicle device data available - defaulting to demo data."
|
||||||
)
|
)
|
||||||
cls.config.devices.max_electric_vehicles = 1
|
cls.config.devices.max_electric_vehicles = 1
|
||||||
@@ -489,7 +490,7 @@ class GeneticOptimizationParameters(
|
|||||||
max_soc_percentage=electric_vehicle_config.max_soc_percentage,
|
max_soc_percentage=electric_vehicle_config.max_soc_percentage,
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No electric_vehicle device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No electric_vehicle device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -520,7 +521,7 @@ class GeneticOptimizationParameters(
|
|||||||
except:
|
except:
|
||||||
initial_soc_percentage = None
|
initial_soc_percentage = None
|
||||||
if initial_soc_percentage is None:
|
if initial_soc_percentage is None:
|
||||||
logger.error(
|
logger.info(
|
||||||
f"No electric vehicle device SoC data (measurement key = '{electric_vehicle_config.measurement_key_soc_factor}') available - defaulting to 0."
|
f"No electric vehicle device SoC data (measurement key = '{electric_vehicle_config.measurement_key_soc_factor}') available - defaulting to 0."
|
||||||
)
|
)
|
||||||
initial_soc_percentage = 0
|
initial_soc_percentage = 0
|
||||||
@@ -529,13 +530,13 @@ class GeneticOptimizationParameters(
|
|||||||
# Inverters
|
# Inverters
|
||||||
# ---------
|
# ---------
|
||||||
if cls.config.devices.max_inverters is None:
|
if cls.config.devices.max_inverters is None:
|
||||||
logger.error("Number of inverter devices not configured - defaulting to 1.")
|
logger.info("Number of inverter devices not configured - defaulting to 1.")
|
||||||
cls.config.devices.max_inverters = 1
|
cls.config.devices.max_inverters = 1
|
||||||
if cls.config.devices.max_inverters == 0:
|
if cls.config.devices.max_inverters == 0:
|
||||||
inverter_params = None
|
inverter_params = None
|
||||||
else:
|
else:
|
||||||
if cls.config.devices.inverters is None:
|
if cls.config.devices.inverters is None:
|
||||||
logger.error("No inverter device data available - defaulting to demo data.")
|
logger.info("No inverter device data available - defaulting to demo data.")
|
||||||
cls.config.devices.inverters = [
|
cls.config.devices.inverters = [
|
||||||
{
|
{
|
||||||
"device_id": "inverter1",
|
"device_id": "inverter1",
|
||||||
@@ -551,7 +552,7 @@ class GeneticOptimizationParameters(
|
|||||||
battery_id=inverter_config.battery_id,
|
battery_id=inverter_config.battery_id,
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No inverter device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No inverter device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -568,14 +569,14 @@ class GeneticOptimizationParameters(
|
|||||||
# Home Appliances
|
# Home Appliances
|
||||||
# ---------------
|
# ---------------
|
||||||
if cls.config.devices.max_home_appliances is None:
|
if cls.config.devices.max_home_appliances is None:
|
||||||
logger.error("Number of home appliance devices not configured - defaulting to 1.")
|
logger.info("Number of home appliance devices not configured - defaulting to 1.")
|
||||||
cls.config.devices.max_home_appliances = 1
|
cls.config.devices.max_home_appliances = 1
|
||||||
if cls.config.devices.max_home_appliances == 0:
|
if cls.config.devices.max_home_appliances == 0:
|
||||||
home_appliance_params = None
|
home_appliance_params = None
|
||||||
else:
|
else:
|
||||||
home_appliance_params = None
|
home_appliance_params = None
|
||||||
if cls.config.devices.home_appliances is None:
|
if cls.config.devices.home_appliances is None:
|
||||||
logger.error(
|
logger.info(
|
||||||
"No home appliance device data available - defaulting to demo data."
|
"No home appliance device data available - defaulting to demo data."
|
||||||
)
|
)
|
||||||
cls.config.devices.home_appliances = [
|
cls.config.devices.home_appliances = [
|
||||||
@@ -606,7 +607,7 @@ class GeneticOptimizationParameters(
|
|||||||
time_windows=home_appliance_config.time_windows,
|
time_windows=home_appliance_config.time_windows,
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"No home appliance device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
"No home appliance device data available - defaulting to demo data. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
@@ -639,7 +640,7 @@ class GeneticOptimizationParameters(
|
|||||||
start_solution=start_solution,
|
start_solution=start_solution,
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
logger.exception(
|
logger.info(
|
||||||
"Can not prepare optimization parameters - will retry. Parameter preparation attempt {}.",
|
"Can not prepare optimization parameters - will retry. Parameter preparation attempt {}.",
|
||||||
attempt,
|
attempt,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -416,9 +416,35 @@ class GeneticSolution(ConfigMixin, GeneticParametersBaseModel):
|
|||||||
solution[key] = operation[key]
|
solution[key] = operation[key]
|
||||||
|
|
||||||
# Add home appliance data
|
# Add home appliance data
|
||||||
if self.washingstart:
|
if self.config.devices.max_home_appliances and self.config.devices.max_home_appliances > 0:
|
||||||
|
# Use config and not self.washingstart as washingstart may be None (no start)
|
||||||
|
# even if configured to be started.
|
||||||
|
|
||||||
# result starts at start_day_hour
|
# result starts at start_day_hour
|
||||||
solution["homeappliance1_energy_wh"] = self.result.Home_appliance_wh_per_hour[:n_points]
|
solution["homeappliance1_energy_wh"] = self.result.Home_appliance_wh_per_hour[:n_points]
|
||||||
|
operation = {
|
||||||
|
"homeappliance1_run_op_mode": [],
|
||||||
|
"homeappliance1_run_op_factor": [],
|
||||||
|
"homeappliance1_off_op_mode": [],
|
||||||
|
"homeappliance1_off_op_factor": [],
|
||||||
|
}
|
||||||
|
for hour_idx, energy in enumerate(solution["homeappliance1_energy_wh"]):
|
||||||
|
if energy > 0.0:
|
||||||
|
operation["homeappliance1_run_op_mode"].append(1.0)
|
||||||
|
operation["homeappliance1_run_op_factor"].append(1.0)
|
||||||
|
operation["homeappliance1_off_op_mode"].append(0.0)
|
||||||
|
operation["homeappliance1_off_op_factor"].append(0.0)
|
||||||
|
else:
|
||||||
|
operation["homeappliance1_run_op_mode"].append(0.0)
|
||||||
|
operation["homeappliance1_run_op_factor"].append(0.0)
|
||||||
|
operation["homeappliance1_off_op_mode"].append(1.0)
|
||||||
|
operation["homeappliance1_off_op_factor"].append(1.0)
|
||||||
|
for key in operation.keys():
|
||||||
|
if len(operation[key]) != n_points:
|
||||||
|
error_msg = f"instruction {key} has invalid length {len(operation[key])} - expected {n_points}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
solution[key] = operation[key]
|
||||||
|
|
||||||
# Fill prediction into dataframe with correct column names
|
# Fill prediction into dataframe with correct column names
|
||||||
# - pvforecast_ac_energy_wh_energy_wh: PV energy prediction (positive) in wh
|
# - pvforecast_ac_energy_wh_energy_wh: PV energy prediction (positive) in wh
|
||||||
@@ -633,19 +659,33 @@ class GeneticSolution(ConfigMixin, GeneticParametersBaseModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Add home appliance instructions (demand driven based control)
|
# Add home appliance instructions (demand driven based control)
|
||||||
if self.washingstart:
|
if self.config.devices.max_home_appliances and self.config.devices.max_home_appliances > 0:
|
||||||
|
# Use config and not self.washingstart as washingstart may be None (no start)
|
||||||
|
# even if configured to be started.
|
||||||
resource_id = "homeappliance1"
|
resource_id = "homeappliance1"
|
||||||
operation_mode = ApplianceOperationMode.RUN # type: ignore[assignment]
|
last_energy: Optional[float] = None
|
||||||
operation_mode_factor = 1.0
|
for hours, energy in enumerate(self.result.Home_appliance_wh_per_hour):
|
||||||
execution_time = start_datetime.add(hours=self.washingstart - start_day_hour)
|
# hours starts at start_datetime with 0
|
||||||
plan.add_instruction(
|
if energy is None:
|
||||||
DDBCInstruction(
|
raise ValueError(
|
||||||
resource_id=resource_id,
|
f"Unexpected value {energy} in {self.result.Home_appliance_wh_per_hour}"
|
||||||
execution_time=execution_time,
|
)
|
||||||
actuator_id=resource_id,
|
if last_energy is None or energy != last_energy:
|
||||||
operation_mode_id=operation_mode,
|
if energy > 0.0:
|
||||||
operation_mode_factor=operation_mode_factor,
|
operation_mode = ApplianceOperationMode.RUN # type: ignore[assignment]
|
||||||
)
|
else:
|
||||||
)
|
operation_mode = ApplianceOperationMode.OFF # type: ignore[assignment]
|
||||||
|
operation_mode_factor = 1.0
|
||||||
|
execution_time = start_datetime.add(hours=hours)
|
||||||
|
plan.add_instruction(
|
||||||
|
DDBCInstruction(
|
||||||
|
resource_id=resource_id,
|
||||||
|
execution_time=execution_time,
|
||||||
|
actuator_id=resource_id,
|
||||||
|
operation_mode_id=operation_mode,
|
||||||
|
operation_mode_factor=operation_mode_factor,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
last_energy = energy
|
||||||
|
|
||||||
return plan
|
return plan
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
from pydantic import Field, model_validator
|
from pydantic import Field, computed_field, model_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.pydantic import (
|
from akkudoktoreos.core.pydantic import (
|
||||||
@@ -86,6 +86,22 @@ class OptimizationCommonSettings(SettingsBaseModel):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def keys(self) -> list[str]:
|
||||||
|
"""The keys of the solution."""
|
||||||
|
from akkudoktoreos.core.ems import get_ems
|
||||||
|
|
||||||
|
key_list = []
|
||||||
|
optimization_solution = get_ems().optimization_solution()
|
||||||
|
if optimization_solution:
|
||||||
|
# Prepare mapping
|
||||||
|
df = optimization_solution.solution.to_dataframe()
|
||||||
|
key_list = df.columns.tolist()
|
||||||
|
return sorted(set(key_list))
|
||||||
|
|
||||||
|
# Validators
|
||||||
@model_validator(mode="after")
|
@model_validator(mode="after")
|
||||||
def _enforce_algorithm_configuration(self) -> "OptimizationCommonSettings":
|
def _enforce_algorithm_configuration(self) -> "OptimizationCommonSettings":
|
||||||
"""Ensure algorithm default configuration is set."""
|
"""Ensure algorithm default configuration is set."""
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
from akkudoktoreos.prediction.elecpriceabc import ElecPriceProvider
|
||||||
@@ -57,6 +57,12 @@ class ElecPriceCommonSettings(SettingsBaseModel):
|
|||||||
json_schema_extra={"description": "Energy Charts provider settings."},
|
json_schema_extra={"description": "Energy Charts provider settings."},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def providers(self) -> list[str]:
|
||||||
|
"""Available electricity price provider ids."""
|
||||||
|
return elecprice_providers
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("provider", mode="after")
|
@field_validator("provider", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.prediction.feedintariffabc import FeedInTariffProvider
|
from akkudoktoreos.prediction.feedintariffabc import FeedInTariffProvider
|
||||||
@@ -56,6 +56,12 @@ class FeedInTariffCommonSettings(SettingsBaseModel):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def providers(self) -> list[str]:
|
||||||
|
"""Available feed in tariff provider ids."""
|
||||||
|
return feedintariff_providers
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("provider", mode="after")
|
@field_validator("provider", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.prediction.loadabc import LoadProvider
|
from akkudoktoreos.prediction.loadabc import LoadProvider
|
||||||
@@ -62,6 +62,12 @@ class LoadCommonSettings(SettingsBaseModel):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def providers(self) -> list[str]:
|
||||||
|
"""Available load provider ids."""
|
||||||
|
return load_providers
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("provider", mode="after")
|
@field_validator("provider", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -39,11 +39,11 @@ class LoadImportCommonSettings(SettingsBaseModel):
|
|||||||
@field_validator("import_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
|
"""Ensure file is available."""
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = Path(value)
|
value = Path(value)
|
||||||
"""Ensure file is available."""
|
|
||||||
value.resolve()
|
value.resolve()
|
||||||
if not value.is_file():
|
if not value.is_file():
|
||||||
raise ValueError(f"Import file path '{value}' is not a file.")
|
raise ValueError(f"Import file path '{value}' is not a file.")
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ class VrmForecastResponse(PydanticBaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class LoadVrmCommonSettings(SettingsBaseModel):
|
class LoadVrmCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for VRM API."""
|
"""Common settings for load forecast VRM API."""
|
||||||
|
|
||||||
load_vrm_token: str = Field(
|
load_vrm_token: str = Field(
|
||||||
default="your-token",
|
default="your-token",
|
||||||
|
|||||||
@@ -52,22 +52,7 @@ from akkudoktoreos.prediction.weatherimport import WeatherImport
|
|||||||
|
|
||||||
|
|
||||||
class PredictionCommonSettings(SettingsBaseModel):
|
class PredictionCommonSettings(SettingsBaseModel):
|
||||||
"""General Prediction Configuration.
|
"""General Prediction Configuration."""
|
||||||
|
|
||||||
This class provides configuration for prediction settings, allowing users to specify
|
|
||||||
parameters such as the forecast duration (in hours).
|
|
||||||
Validators ensure each parameter is within a specified range.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
hours (Optional[int]): Number of hours into the future for predictions.
|
|
||||||
Must be non-negative.
|
|
||||||
historic_hours (Optional[int]): Number of hours into the past for historical data.
|
|
||||||
Must be non-negative.
|
|
||||||
|
|
||||||
Validators:
|
|
||||||
validate_hours (int): Ensures `hours` is a non-negative integer.
|
|
||||||
validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.
|
|
||||||
"""
|
|
||||||
|
|
||||||
hours: Optional[int] = Field(
|
hours: Optional[int] = Field(
|
||||||
default=48,
|
default=48,
|
||||||
|
|||||||
@@ -260,6 +260,12 @@ class PVForecastCommonSettings(SettingsBaseModel):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def providers(self) -> list[str]:
|
||||||
|
"""Available PVForecast provider ids."""
|
||||||
|
return pvforecast_providers
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("provider", mode="after")
|
@field_validator("provider", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -193,20 +193,6 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
from the PVForecastAkkudoktor API and maps it to `PVForecastDataRecord` fields, applying
|
from the PVForecastAkkudoktor API and maps it to `PVForecastDataRecord` fields, applying
|
||||||
any necessary scaling or unit corrections. It manages the forecast over a range
|
any necessary scaling or unit corrections. It manages the forecast over a range
|
||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
|
||||||
hours (int, optional): Number of hours in the future for the forecast.
|
|
||||||
historic_hours (int, optional): Number of past hours for retaining data.
|
|
||||||
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
|
||||||
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
|
||||||
_request_forecast(): Fetches the forecast from the Akkudoktor API.
|
|
||||||
_update_data(): Processes and updates forecast data from Akkudoktor in PVForecastDataRecord format.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ class VrmForecastResponse(PydanticBaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PVForecastVrmCommonSettings(SettingsBaseModel):
|
class PVForecastVrmCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for VRM API."""
|
"""Common settings for PV forecast VRM API."""
|
||||||
|
|
||||||
pvforecast_vrm_token: str = Field(
|
pvforecast_vrm_token: str = Field(
|
||||||
default="your-token",
|
default="your-token",
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
from akkudoktoreos.prediction.prediction import get_prediction
|
||||||
@@ -52,6 +52,12 @@ class WeatherCommonSettings(SettingsBaseModel):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def providers(self) -> list[str]:
|
||||||
|
"""Available weather provider ids."""
|
||||||
|
return weather_providers
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("provider", mode="after")
|
@field_validator("provider", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ WheaterDataClearOutsideMapping: List[Tuple[str, Optional[str], Optional[float]]]
|
|||||||
("Precipitation Type", "Precipitation Type", None),
|
("Precipitation Type", "Precipitation Type", None),
|
||||||
("Precipitation Probability (%)", "Precipitation Probability (%)", 1),
|
("Precipitation Probability (%)", "Precipitation Probability (%)", 1),
|
||||||
("Precipitation Amount (mm)", "Precipitation Amount (mm)", 1),
|
("Precipitation Amount (mm)", "Precipitation Amount (mm)", 1),
|
||||||
("Wind Speed (mph)", "Wind Speed (kmph)", 1.60934),
|
("Wind Speed/Direction (mph)", "Wind Speed (kmph)", 1.60934),
|
||||||
("Chance of Frost", "Chance of Frost", None),
|
("Chance of Frost", "Chance of Frost", None),
|
||||||
("Temperature (°C)", "Temperature (°C)", 1),
|
("Temperature (°C)", "Temperature (°C)", 1),
|
||||||
("Feels Like (°C)", "Feels Like (°C)", 1),
|
("Feels Like (°C)", "Feels Like (°C)", 1),
|
||||||
@@ -218,7 +218,7 @@ class WeatherClearOutside(WeatherProvider):
|
|||||||
for detail_name in detail_names:
|
for detail_name in detail_names:
|
||||||
if detail_name not in clearoutside_key_mapping:
|
if detail_name not in clearoutside_key_mapping:
|
||||||
warning_msg = (
|
warning_msg = (
|
||||||
f"Clearoutside schema change. Unexpected detail name {detail_name}."
|
f"Clearoutside schema change. Unexpected detail name '{detail_name}'."
|
||||||
)
|
)
|
||||||
logger.warning(warning_msg)
|
logger.warning(warning_msg)
|
||||||
|
|
||||||
@@ -226,17 +226,13 @@ class WeatherClearOutside(WeatherProvider):
|
|||||||
# Beware there is one ul paragraph before that is not associated to a detail
|
# Beware there is one ul paragraph before that is not associated to a detail
|
||||||
p_detail_tables = p_day.find_all("ul")
|
p_detail_tables = p_day.find_all("ul")
|
||||||
if len(p_detail_tables) != len(detail_names) + 1:
|
if len(p_detail_tables) != len(detail_names) + 1:
|
||||||
error_msg = f"Clearoutside schema change. Unexpected number ({p_detail_tables}) of `ul` for details {len(detail_names)}. Should be one extra only."
|
error_msg = f"Clearoutside schema change. Unexpected number ({p_detail_tables}) of 'ul' for details {len(detail_names)}. Should be one extra only."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
p_detail_tables.pop(0)
|
p_detail_tables.pop(0)
|
||||||
|
|
||||||
# Create clearout data
|
# Create clearout data
|
||||||
clearout_data = {}
|
clearout_data = {}
|
||||||
# Replace some detail names that we use differently
|
|
||||||
detail_names = [
|
|
||||||
s.replace("Wind Speed/Direction (mph)", "Wind Speed (mph)") for s in detail_names
|
|
||||||
]
|
|
||||||
# Number of detail values. On last day may be less than 24.
|
# Number of detail values. On last day may be less than 24.
|
||||||
detail_values_count = None
|
detail_values_count = None
|
||||||
# Add data values
|
# Add data values
|
||||||
@@ -266,7 +262,7 @@ class WeatherClearOutside(WeatherProvider):
|
|||||||
extra_detail_name = None
|
extra_detail_name = None
|
||||||
extra_detail_data = []
|
extra_detail_data = []
|
||||||
for p_detail_value in p_detail_values:
|
for p_detail_value in p_detail_values:
|
||||||
if detail_name == "Wind Speed (mph)":
|
if detail_name == "Wind Speed/Direction (mph)":
|
||||||
# Get the usual value
|
# Get the usual value
|
||||||
value_str = p_detail_value.get_text()
|
value_str = p_detail_value.get_text()
|
||||||
# Also extract extra data
|
# Also extract extra data
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ over a specified period.
|
|||||||
|
|
||||||
Documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedocs.io/en/latest/).
|
Documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedocs.io/en/latest/).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Version Information
|
## Version Information
|
||||||
|
|
||||||
**Current Version:** {__version__}
|
**Current Version:** {__version__}
|
||||||
@@ -29,4 +31,5 @@ Documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedoc
|
|||||||
|
|
||||||
|
|
||||||
def About(**kwargs: Any) -> Div:
|
def About(**kwargs: Any) -> Div:
|
||||||
|
global about_md
|
||||||
return Markdown(about_md, **kwargs)
|
return Markdown(about_md, **kwargs)
|
||||||
|
|||||||
@@ -5,17 +5,13 @@ for the EOS dashboard.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from fasthtml.common import Select
|
from fasthtml.common import Select
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from monsterui.foundations import stringify
|
|
||||||
from monsterui.franken import ( # Select, TODO: Select from FrankenUI does not work - using Select from FastHTML instead
|
from monsterui.franken import ( # Select, TODO: Select from FrankenUI does not work - using Select from FastHTML instead
|
||||||
H3,
|
H3,
|
||||||
Button,
|
|
||||||
ButtonT,
|
|
||||||
Card,
|
Card,
|
||||||
Details,
|
Details,
|
||||||
Div,
|
Div,
|
||||||
@@ -28,33 +24,12 @@ from monsterui.franken import ( # Select, TODO: Select from FrankenUI does not
|
|||||||
Summary,
|
Summary,
|
||||||
UkIcon,
|
UkIcon,
|
||||||
)
|
)
|
||||||
from platformdirs import user_config_dir
|
|
||||||
|
|
||||||
from akkudoktoreos.server.dash.components import Error, Success
|
from akkudoktoreos.server.dash.components import ConfigButton, Error, Success
|
||||||
from akkudoktoreos.server.dash.configuration import get_nested_value
|
from akkudoktoreos.server.dash.configuration import get_nested_value
|
||||||
|
from akkudoktoreos.server.dash.context import export_import_directory, request_url_for
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
|
|
||||||
# Directory to export files to, or to import files from
|
|
||||||
export_import_directory = Path(user_config_dir("net.akkudoktor.eosdash", "akkudoktor"))
|
|
||||||
|
|
||||||
|
|
||||||
def AdminButton(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any) -> Button:
|
|
||||||
"""Creates a styled button for administrative actions.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
*c (Any): Positional arguments representing the button's content.
|
|
||||||
cls (Optional[Union[str, tuple]]): Additional CSS classes for styling. Defaults to None.
|
|
||||||
**kwargs (Any): Additional keyword arguments passed to the `Button`.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Button: A styled `Button` component for admin actions.
|
|
||||||
"""
|
|
||||||
new_cls = f"{ButtonT.primary}"
|
|
||||||
if cls:
|
|
||||||
new_cls += f" {stringify(cls)}"
|
|
||||||
kwargs["cls"] = new_cls
|
|
||||||
return Button(*c, submit=False, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def AdminCache(
|
def AdminCache(
|
||||||
eos_host: str, eos_port: Union[str, int], data: Optional[dict], config: Optional[dict[str, Any]]
|
eos_host: str, eos_port: Union[str, int], data: Optional[dict], config: Optional[dict[str, Any]]
|
||||||
@@ -111,9 +86,9 @@ def AdminCache(
|
|||||||
Grid(
|
Grid(
|
||||||
DivHStacked(
|
DivHStacked(
|
||||||
UkIcon(icon="play"),
|
UkIcon(icon="play"),
|
||||||
AdminButton(
|
ConfigButton(
|
||||||
"Clear all",
|
"Clear all",
|
||||||
hx_post="/eosdash/admin",
|
hx_post=request_url_for("/eosdash/admin"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='{"category": "cache", "action": "clear"}',
|
hx_vals='{"category": "cache", "action": "clear"}',
|
||||||
@@ -132,9 +107,9 @@ def AdminCache(
|
|||||||
Grid(
|
Grid(
|
||||||
DivHStacked(
|
DivHStacked(
|
||||||
UkIcon(icon="play"),
|
UkIcon(icon="play"),
|
||||||
AdminButton(
|
ConfigButton(
|
||||||
"Clear expired",
|
"Clear expired",
|
||||||
hx_post="/eosdash/admin",
|
hx_post=request_url_for("/eosdash/admin"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='{"category": "cache", "action": "clear-expired"}',
|
hx_vals='{"category": "cache", "action": "clear-expired"}',
|
||||||
@@ -301,14 +276,16 @@ def AdminConfig(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Update for display, in case we added a new file before
|
# Update for display, in case we added a new file before
|
||||||
import_from_file_names = [f.name for f in list(export_import_directory.glob("*.json"))]
|
import_from_file_names = sorted([f.name for f in list(export_import_directory.glob("*.json"))])
|
||||||
if config_backup is None:
|
if config_backup is None:
|
||||||
revert_to_backup_metadata_list = ["Backup list not available"]
|
revert_to_backup_metadata_list = ["Backup list not available"]
|
||||||
else:
|
else:
|
||||||
revert_to_backup_metadata_list = [
|
revert_to_backup_metadata_list = sorted(
|
||||||
f"{backup_meta['date_time']} {backup_meta['version']}"
|
[
|
||||||
for backup_id, backup_meta in config_backup.items()
|
f"{backup_meta['date_time']} {backup_meta['version']}"
|
||||||
]
|
for backup_id, backup_meta in config_backup.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
category,
|
category,
|
||||||
@@ -319,9 +296,9 @@ def AdminConfig(
|
|||||||
Grid(
|
Grid(
|
||||||
DivHStacked(
|
DivHStacked(
|
||||||
UkIcon(icon="play"),
|
UkIcon(icon="play"),
|
||||||
AdminButton(
|
ConfigButton(
|
||||||
"Save to file",
|
"Save to file",
|
||||||
hx_post="/eosdash/admin",
|
hx_post=request_url_for("/eosdash/admin"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='{"category": "configuration", "action": "save_to_file"}',
|
hx_vals='{"category": "configuration", "action": "save_to_file"}',
|
||||||
@@ -341,9 +318,9 @@ def AdminConfig(
|
|||||||
Grid(
|
Grid(
|
||||||
DivHStacked(
|
DivHStacked(
|
||||||
UkIcon(icon="play"),
|
UkIcon(icon="play"),
|
||||||
AdminButton(
|
ConfigButton(
|
||||||
"Revert to backup",
|
"Revert to backup",
|
||||||
hx_post="/eosdash/admin",
|
hx_post=request_url_for("/eosdash/admin"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='js:{ "category": "configuration", "action": "revert_to_backup", "backup_metadata": document.querySelector("[name=\'selected_backup_metadata\']").value }',
|
hx_vals='js:{ "category": "configuration", "action": "revert_to_backup", "backup_metadata": document.querySelector("[name=\'selected_backup_metadata\']").value }',
|
||||||
@@ -352,6 +329,7 @@ def AdminConfig(
|
|||||||
*Options(*revert_to_backup_metadata_list),
|
*Options(*revert_to_backup_metadata_list),
|
||||||
id="backup_metadata",
|
id="backup_metadata",
|
||||||
name="selected_backup_metadata", # Name of hidden input field with selected value
|
name="selected_backup_metadata", # Name of hidden input field with selected value
|
||||||
|
cls="border rounded px-3 py-2 mr-2",
|
||||||
placeholder="Select backup",
|
placeholder="Select backup",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@@ -368,9 +346,9 @@ def AdminConfig(
|
|||||||
Grid(
|
Grid(
|
||||||
DivHStacked(
|
DivHStacked(
|
||||||
UkIcon(icon="play"),
|
UkIcon(icon="play"),
|
||||||
AdminButton(
|
ConfigButton(
|
||||||
"Export to file",
|
"Export to file",
|
||||||
hx_post="/eosdash/admin",
|
hx_post=request_url_for("/eosdash/admin"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='js:{"category": "configuration", "action": "export_to_file", "export_to_file_tag": document.querySelector("[name=\'chosen_export_file_tag\']").value }',
|
hx_vals='js:{"category": "configuration", "action": "export_to_file", "export_to_file_tag": document.querySelector("[name=\'chosen_export_file_tag\']").value }',
|
||||||
@@ -398,9 +376,9 @@ def AdminConfig(
|
|||||||
Grid(
|
Grid(
|
||||||
DivHStacked(
|
DivHStacked(
|
||||||
UkIcon(icon="play"),
|
UkIcon(icon="play"),
|
||||||
AdminButton(
|
ConfigButton(
|
||||||
"Import from file",
|
"Import from file",
|
||||||
hx_post="/eosdash/admin",
|
hx_post=request_url_for("/eosdash/admin"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='js:{ "category": "configuration", "action": "import_from_file", "import_file_name": document.querySelector("[name=\'selected_import_file_name\']").value }',
|
hx_vals='js:{ "category": "configuration", "action": "import_from_file", "import_file_name": document.querySelector("[name=\'selected_import_file_name\']").value }',
|
||||||
@@ -409,6 +387,7 @@ def AdminConfig(
|
|||||||
*Options(*import_from_file_names),
|
*Options(*import_from_file_names),
|
||||||
id="import_file_name",
|
id="import_file_name",
|
||||||
name="selected_import_file_name", # Name of hidden input field with selected value
|
name="selected_import_file_name", # Name of hidden input field with selected value
|
||||||
|
cls="border rounded px-3 py-2 mr-2",
|
||||||
placeholder="Select file",
|
placeholder="Select file",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -2,35 +2,13 @@
|
|||||||
# MIT license
|
# MIT license
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import bokeh
|
|
||||||
from bokeh.embed import components
|
from bokeh.embed import components
|
||||||
from bokeh.models import Plot
|
from bokeh.models import Plot
|
||||||
from monsterui.franken import H4, Card, NotStr, Script
|
from bokeh.resources import INLINE
|
||||||
|
from monsterui.franken import H4, Card, NotStr
|
||||||
|
|
||||||
bokeh_version = bokeh.__version__
|
# Javascript for bokeh - to be included by the page
|
||||||
|
BokehJS = [NotStr(INLINE.render_css()), NotStr(INLINE.render_js())]
|
||||||
BokehJS = [
|
|
||||||
Script(
|
|
||||||
src=f"https://cdn.bokeh.org/bokeh/release/bokeh-{bokeh_version}.min.js",
|
|
||||||
crossorigin="anonymous",
|
|
||||||
),
|
|
||||||
Script(
|
|
||||||
src=f"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-{bokeh_version}.min.js",
|
|
||||||
crossorigin="anonymous",
|
|
||||||
),
|
|
||||||
Script(
|
|
||||||
src=f"https://cdn.bokeh.org/bokeh/release/bokeh-tables-{bokeh_version}.min.js",
|
|
||||||
crossorigin="anonymous",
|
|
||||||
),
|
|
||||||
Script(
|
|
||||||
src=f"https://cdn.bokeh.org/bokeh/release/bokeh-gl-{bokeh_version}.min.js",
|
|
||||||
crossorigin="anonymous",
|
|
||||||
),
|
|
||||||
Script(
|
|
||||||
src=f"https://cdn.bokeh.org/bokeh/release/bokeh-mathjax-{bokeh_version}.min.js",
|
|
||||||
crossorigin="anonymous",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def bokey_apply_theme_to_plot(plot: Plot, dark: bool) -> None:
|
def bokey_apply_theme_to_plot(plot: Plot, dark: bool) -> None:
|
||||||
|
|||||||
@@ -1,28 +1,36 @@
|
|||||||
from typing import Any, Optional, Union
|
import json
|
||||||
|
from typing import Any, Callable, Optional, Union
|
||||||
|
|
||||||
from fasthtml.common import H1, Button, Div, Li
|
from fasthtml.common import H1, Button, Div, Li, Select
|
||||||
from monsterui.daisy import (
|
from monsterui.daisy import (
|
||||||
Alert,
|
Alert,
|
||||||
AlertT,
|
AlertT,
|
||||||
)
|
)
|
||||||
from monsterui.foundations import stringify
|
from monsterui.foundations import stringify
|
||||||
from monsterui.franken import ( # Button, Does not pass hx_vals
|
from monsterui.franken import ( # Select: Does not work - using Select from FastHTML instead;; Button: Does not pass hx_vals - using Button from FastHTML instead
|
||||||
H3,
|
H3,
|
||||||
|
ButtonT,
|
||||||
Card,
|
Card,
|
||||||
|
Code,
|
||||||
Container,
|
Container,
|
||||||
ContainerT,
|
ContainerT,
|
||||||
Details,
|
Details,
|
||||||
|
DivHStacked,
|
||||||
DivLAligned,
|
DivLAligned,
|
||||||
DivRAligned,
|
DivRAligned,
|
||||||
Form,
|
Form,
|
||||||
Grid,
|
Grid,
|
||||||
Input,
|
Input,
|
||||||
|
Option,
|
||||||
P,
|
P,
|
||||||
|
Pre,
|
||||||
Summary,
|
Summary,
|
||||||
TabContainer,
|
TabContainer,
|
||||||
UkIcon,
|
UkIcon,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from akkudoktoreos.server.dash.context import request_url_for
|
||||||
|
|
||||||
scrollbar_viewport_styles = (
|
scrollbar_viewport_styles = (
|
||||||
"scrollbar-width: none; -ms-overflow-style: none; -webkit-overflow-scrolling: touch;"
|
"scrollbar-width: none; -ms-overflow-style: none; -webkit-overflow-scrolling: touch;"
|
||||||
)
|
)
|
||||||
@@ -71,11 +79,59 @@ def ScrollArea(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def JsonView(data: Any) -> Pre:
|
||||||
|
"""Render structured data as formatted JSON inside a styled <pre> block.
|
||||||
|
|
||||||
|
The data is serialized to JSON using indentation for readability and
|
||||||
|
UTF-8 characters are preserved. The JSON is wrapped in a <code> element
|
||||||
|
with a JSON language class to support syntax highlighting, and then
|
||||||
|
placed inside a <pre> container with MonsterUI-compatible styling.
|
||||||
|
|
||||||
|
The JSON output is height-constrained and scrollable to safely display
|
||||||
|
large payloads without breaking the page layout.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Any JSON-serializable Python object to render.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A FastHTML `Pre` element containing a formatted JSON representation
|
||||||
|
of the input data.
|
||||||
|
"""
|
||||||
|
code_str = json.dumps(data, indent=2, ensure_ascii=False)
|
||||||
|
return Pre(
|
||||||
|
Code(code_str, cls="language-json"),
|
||||||
|
cls="rounded-lg bg-muted p-3 max-h-[30vh] overflow-y-auto overflow-x-hidden whitespace-pre-wrap",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def TextView(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any) -> Pre:
|
||||||
|
"""Render plain text with preserved line breaks and wrapped long lines.
|
||||||
|
|
||||||
|
This view uses a <pre> element with whitespace wrapping enabled so that
|
||||||
|
newline characters are respected while long lines are wrapped instead
|
||||||
|
of causing horizontal scrolling.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*c (Any): Positional arguments representing the TextView content.
|
||||||
|
cls (Optional[Union[str, tuple]]): Additional CSS classes for styling. Defaults to None.
|
||||||
|
**kwargs (Any): Additional keyword arguments passed to the `Pre`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A FastHTML `Pre` element that displays the text with preserved
|
||||||
|
formatting and line wrapping.
|
||||||
|
"""
|
||||||
|
new_cls = "whitespace-pre-wrap"
|
||||||
|
if cls:
|
||||||
|
new_cls += f"{stringify(cls)}"
|
||||||
|
kwargs["cls"] = new_cls
|
||||||
|
return Pre(*c, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def Success(*c: Any) -> Alert:
|
def Success(*c: Any) -> Alert:
|
||||||
return Alert(
|
return Alert(
|
||||||
DivLAligned(
|
DivLAligned(
|
||||||
UkIcon("check"),
|
UkIcon("check"),
|
||||||
P(*c),
|
TextView(*c),
|
||||||
),
|
),
|
||||||
cls=AlertT.success,
|
cls=AlertT.success,
|
||||||
)
|
)
|
||||||
@@ -85,12 +141,321 @@ def Error(*c: Any) -> Alert:
|
|||||||
return Alert(
|
return Alert(
|
||||||
DivLAligned(
|
DivLAligned(
|
||||||
UkIcon("triangle-alert"),
|
UkIcon("triangle-alert"),
|
||||||
P(*c),
|
TextView(*c),
|
||||||
),
|
),
|
||||||
cls=AlertT.error,
|
cls=AlertT.error,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ConfigButton(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any) -> Button:
|
||||||
|
"""Creates a styled button for configuration actions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*c (Any): Positional arguments representing the button's content.
|
||||||
|
cls (Optional[Union[str, tuple]]): Additional CSS classes for styling. Defaults to None.
|
||||||
|
**kwargs (Any): Additional keyword arguments passed to the `Button`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Button: A styled `Button` component for configuration actions.
|
||||||
|
"""
|
||||||
|
new_cls = f"px-4 py-2 rounded {ButtonT.primary}"
|
||||||
|
if cls:
|
||||||
|
new_cls += f"{stringify(cls)}"
|
||||||
|
kwargs["cls"] = new_cls
|
||||||
|
return Button(*c, submit=False, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def make_config_update_form() -> Callable[[str, str], Grid]:
|
||||||
|
"""Factory for a form that sets a single configuration value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A function (config_name: str, value: str) -> Grid
|
||||||
|
"""
|
||||||
|
|
||||||
|
def ConfigUpdateForm(config_name: str, value: str) -> Grid:
|
||||||
|
config_id = config_name.lower().replace(".", "-")
|
||||||
|
|
||||||
|
return Grid(
|
||||||
|
DivRAligned(P("update")),
|
||||||
|
Grid(
|
||||||
|
Form(
|
||||||
|
Input(value="update", type="hidden", id="action"),
|
||||||
|
Input(value=config_name, type="hidden", id="key"),
|
||||||
|
Input(value=value, type="text", id="value"),
|
||||||
|
hx_put=request_url_for("/eosdash/configuration"),
|
||||||
|
hx_target="#page-content",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
id=f"{config_id}-update-form",
|
||||||
|
)
|
||||||
|
|
||||||
|
return ConfigUpdateForm
|
||||||
|
|
||||||
|
|
||||||
|
def make_config_update_value_form(
|
||||||
|
available_values: list[str],
|
||||||
|
) -> Callable[[str, str], Grid]:
|
||||||
|
"""Factory for a form that sets a single configuration value with pre-set avaliable values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
available_values: Allowed values for the configuration
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A function (config_name: str, value: str) -> Grid
|
||||||
|
"""
|
||||||
|
|
||||||
|
def ConfigUpdateValueForm(config_name: str, value: str) -> Grid:
|
||||||
|
config_id = config_name.lower().replace(".", "-")
|
||||||
|
|
||||||
|
return Grid(
|
||||||
|
DivRAligned(P("update value")),
|
||||||
|
DivHStacked(
|
||||||
|
ConfigButton(
|
||||||
|
"Set",
|
||||||
|
hx_put=request_url_for("/eosdash/configuration"),
|
||||||
|
hx_target="#page-content",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
hx_vals=f"""js:{{
|
||||||
|
action: "update",
|
||||||
|
key: "{config_name}",
|
||||||
|
value: document
|
||||||
|
.querySelector("[name='{config_id}_selected_value']")
|
||||||
|
.value
|
||||||
|
}}""",
|
||||||
|
),
|
||||||
|
Select(
|
||||||
|
Option("Select a value...", value="", selected=True, disabled=True),
|
||||||
|
*[
|
||||||
|
Option(
|
||||||
|
val,
|
||||||
|
value=val,
|
||||||
|
selected=(val == value),
|
||||||
|
)
|
||||||
|
for val in available_values
|
||||||
|
],
|
||||||
|
id=f"{config_id}-value-select",
|
||||||
|
name=f"{config_id}_selected_value",
|
||||||
|
required=True,
|
||||||
|
cls="border rounded px-3 py-2 mr-2 col-span-4",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
id=f"{config_id}-update-value-form",
|
||||||
|
)
|
||||||
|
|
||||||
|
return ConfigUpdateValueForm
|
||||||
|
|
||||||
|
|
||||||
|
def make_config_update_list_form(available_values: list[str]) -> Callable[[str, str], Grid]:
|
||||||
|
"""Factory function that creates a ConfigUpdateListForm with pre-set available values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
available_values: List of available values to choose from
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A function that creates ConfigUpdateListForm instances with the given available_values.
|
||||||
|
The returned function takes (config_name: str, value: str) and returns a Grid.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def ConfigUpdateListForm(config_name: str, value: str) -> Grid:
|
||||||
|
"""Creates a card with a form to add/remove values from a list.
|
||||||
|
|
||||||
|
Sends to "/eosdash/configuration":
|
||||||
|
The form sends an HTTP PUT request with the following parameters:
|
||||||
|
|
||||||
|
- key (str): The configuration key name (value of config_name parameter)
|
||||||
|
- value (str): A JSON string representing the updated list of values
|
||||||
|
|
||||||
|
The value parameter will always be a valid JSON string representation of a list.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_name: The name of the configuration
|
||||||
|
value (str): The current value of the configuration, a list of values in json format.
|
||||||
|
"""
|
||||||
|
current_values = json.loads(value)
|
||||||
|
if current_values is None:
|
||||||
|
current_values = []
|
||||||
|
config_id = config_name.lower().replace(".", "-")
|
||||||
|
|
||||||
|
return Grid(
|
||||||
|
DivRAligned(P("update list")),
|
||||||
|
Grid(
|
||||||
|
# Form to add new value to list
|
||||||
|
DivHStacked(
|
||||||
|
ConfigButton(
|
||||||
|
"Add",
|
||||||
|
hx_put=request_url_for("/eosdash/configuration"),
|
||||||
|
hx_target="#page-content",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
hx_vals=f"""js:{{
|
||||||
|
action: "update",
|
||||||
|
key: "{config_name}",
|
||||||
|
value: JSON.stringify(
|
||||||
|
[...new Set([
|
||||||
|
...{json.dumps(current_values)},
|
||||||
|
document.querySelector("[name='{config_id}_selected_add_value']").value.trim()
|
||||||
|
])].filter(v => v !== "")
|
||||||
|
)
|
||||||
|
}}""",
|
||||||
|
),
|
||||||
|
Select(
|
||||||
|
Option("Select a value...", value="", selected=True, disabled=True),
|
||||||
|
*[
|
||||||
|
Option(val, value=val, disabled=val in current_values)
|
||||||
|
for val in available_values
|
||||||
|
],
|
||||||
|
id=f"{config_id}-add-value-select",
|
||||||
|
name=f"{config_id}_selected_add_value", # Name of hidden input with selected value
|
||||||
|
required=True,
|
||||||
|
cls="border rounded px-3 py-2 mr-2 col-span-4",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
# Form to delete value from list
|
||||||
|
DivHStacked(
|
||||||
|
ConfigButton(
|
||||||
|
"Delete",
|
||||||
|
hx_put=request_url_for("/eosdash/configuration"),
|
||||||
|
hx_target="#page-content",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
hx_vals=f"""js:{{
|
||||||
|
action: "update",
|
||||||
|
key: "{config_name}",
|
||||||
|
value: JSON.stringify(
|
||||||
|
[...new Set([
|
||||||
|
...{json.dumps(current_values)}
|
||||||
|
])].filter(v => v !== document.querySelector("[name='{config_id}_selected_delete_value']").value.trim())
|
||||||
|
)
|
||||||
|
}}""",
|
||||||
|
),
|
||||||
|
Select(
|
||||||
|
Option("Select a value...", value="", selected=True, disabled=True),
|
||||||
|
*[Option(val, value=val) for val in current_values],
|
||||||
|
id=f"{config_id}-delete-value-select",
|
||||||
|
name=f"{config_id}_selected_delete_value", # Name of hidden input with selected value
|
||||||
|
required=True,
|
||||||
|
cls="border rounded px-3 py-2 mr-2 col-span-4",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
cols=1,
|
||||||
|
),
|
||||||
|
id=f"{config_id}-update-list-form",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return the function that creates a ConfigUpdateListForm instance
|
||||||
|
return ConfigUpdateListForm
|
||||||
|
|
||||||
|
|
||||||
|
def make_config_update_map_form(
|
||||||
|
available_keys: list[str] | None = None,
|
||||||
|
available_values: list[str] | None = None,
|
||||||
|
) -> Callable[[str, str], Grid]:
|
||||||
|
"""Factory function that creates a ConfigUpdateMapForm.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
available_keys: Optional list of allowed keys (None = free text)
|
||||||
|
available_values: Optional list of allowed values (None = free text)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A function that creates ConfigUpdateMapForm instances.
|
||||||
|
The returned function takes (config_name: str, value: str) and returns a Grid.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def ConfigUpdateMapForm(config_name: str, value: str) -> Grid:
|
||||||
|
"""Creates a card with a form to add/update/delete entries in a map."""
|
||||||
|
current_map: dict[str, str] = json.loads(value) or {}
|
||||||
|
config_id = config_name.lower().replace(".", "-")
|
||||||
|
|
||||||
|
return Grid(
|
||||||
|
DivRAligned(P("update map")),
|
||||||
|
Grid(
|
||||||
|
# Add / update key-value pair
|
||||||
|
DivHStacked(
|
||||||
|
ConfigButton(
|
||||||
|
"Set",
|
||||||
|
hx_put=request_url_for("/eosdash/configuration"),
|
||||||
|
hx_target="#page-content",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
hx_vals=f"""js:{{
|
||||||
|
action: "update",
|
||||||
|
key: "{config_name}",
|
||||||
|
value: JSON.stringify(
|
||||||
|
Object.assign(
|
||||||
|
{json.dumps(current_map)},
|
||||||
|
{{
|
||||||
|
[document.querySelector("[name='{config_id}_set_key']").value.trim()]:
|
||||||
|
document.querySelector("[name='{config_id}_set_value']").value.trim()
|
||||||
|
}}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}}""",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Select(
|
||||||
|
Option("Select key...", value="", selected=True, disabled=True),
|
||||||
|
*[Option(k, value=k) for k in (sorted(available_keys) or [])],
|
||||||
|
name=f"{config_id}_set_key",
|
||||||
|
cls="border rounded px-3 py-2 col-span-2",
|
||||||
|
)
|
||||||
|
if available_keys
|
||||||
|
else Input(
|
||||||
|
name=f"{config_id}_set_key",
|
||||||
|
placeholder="Key",
|
||||||
|
required=True,
|
||||||
|
cls="border rounded px-3 py-2 col-span-2",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Select(
|
||||||
|
Option("Select value...", value="", selected=True, disabled=True),
|
||||||
|
*[Option(k, value=k) for k in (sorted(available_values) or [])],
|
||||||
|
name=f"{config_id}_set_value",
|
||||||
|
cls="border rounded px-3 py-2 col-span-2",
|
||||||
|
)
|
||||||
|
if available_values
|
||||||
|
else Input(
|
||||||
|
name=f"{config_id}_set_value",
|
||||||
|
placeholder="Value",
|
||||||
|
required=True,
|
||||||
|
cls="border rounded px-3 py-2 col-span-2",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
# Delete key
|
||||||
|
DivHStacked(
|
||||||
|
ConfigButton(
|
||||||
|
"Delete",
|
||||||
|
hx_put=request_url_for("/eosdash/configuration"),
|
||||||
|
hx_target="#page-content",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
hx_vals=f"""js:{{
|
||||||
|
action: "update",
|
||||||
|
key: "{config_name}",
|
||||||
|
value: JSON.stringify(
|
||||||
|
Object.fromEntries(
|
||||||
|
Object.entries({json.dumps(current_map)})
|
||||||
|
.filter(([k]) =>
|
||||||
|
k !== document.querySelector("[name='{config_id}_delete_key']").value
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}}""",
|
||||||
|
),
|
||||||
|
Select(
|
||||||
|
Option("Select key...", value="", selected=True, disabled=True),
|
||||||
|
*[Option(k, value=k) for k in sorted(current_map.keys())],
|
||||||
|
name=f"{config_id}_delete_key",
|
||||||
|
required=True,
|
||||||
|
cls="border rounded px-3 py-2 col-span-4",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
cols=1,
|
||||||
|
),
|
||||||
|
id=f"{config_id}-update-map-form",
|
||||||
|
)
|
||||||
|
|
||||||
|
return ConfigUpdateMapForm
|
||||||
|
|
||||||
|
|
||||||
def ConfigCard(
|
def ConfigCard(
|
||||||
config_name: str,
|
config_name: str,
|
||||||
config_type: str,
|
config_type: str,
|
||||||
@@ -102,6 +467,7 @@ def ConfigCard(
|
|||||||
update_error: Optional[str],
|
update_error: Optional[str],
|
||||||
update_value: Optional[str],
|
update_value: Optional[str],
|
||||||
update_open: Optional[bool],
|
update_open: Optional[bool],
|
||||||
|
update_form_factory: Optional[Callable[[str, str], Grid]] = None,
|
||||||
) -> Card:
|
) -> Card:
|
||||||
"""Creates a styled configuration card for displaying configuration details.
|
"""Creates a styled configuration card for displaying configuration details.
|
||||||
|
|
||||||
@@ -113,7 +479,7 @@ def ConfigCard(
|
|||||||
config_name (str): The name of the configuration.
|
config_name (str): The name of the configuration.
|
||||||
config_type (str): The type of the configuration.
|
config_type (str): The type of the configuration.
|
||||||
read_only (str): Indicates if the configuration is read-only ("rw" for read-write,
|
read_only (str): Indicates if the configuration is read-only ("rw" for read-write,
|
||||||
any other value indicates read-only).
|
any other value indicates read-only).
|
||||||
value (str): The current value of the configuration.
|
value (str): The current value of the configuration.
|
||||||
default (str): The default value of the configuration.
|
default (str): The default value of the configuration.
|
||||||
description (str): A description of the configuration.
|
description (str): A description of the configuration.
|
||||||
@@ -121,7 +487,9 @@ def ConfigCard(
|
|||||||
update_error (Optional[str]): The error message, if any, during the update process.
|
update_error (Optional[str]): The error message, if any, during the update process.
|
||||||
update_value (Optional[str]): The value to be updated, if different from the current value.
|
update_value (Optional[str]): The value to be updated, if different from the current value.
|
||||||
update_open (Optional[bool]): A flag indicating whether the update section of the card
|
update_open (Optional[bool]): A flag indicating whether the update section of the card
|
||||||
should be initially expanded.
|
should be initially expanded.
|
||||||
|
update_form_factory (Optional[Callable[[str, str], Grid]]): The factory to create a form to
|
||||||
|
use to update the configuration value. Defaults to simple text input.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Card: A styled Card component containing the configuration details.
|
Card: A styled Card component containing the configuration details.
|
||||||
@@ -131,6 +499,11 @@ def ConfigCard(
|
|||||||
update_value = value
|
update_value = value
|
||||||
if not update_open:
|
if not update_open:
|
||||||
update_open = False
|
update_open = False
|
||||||
|
if not update_form_factory:
|
||||||
|
# Default update form
|
||||||
|
update_form = make_config_update_form()(config_name, update_value)
|
||||||
|
else:
|
||||||
|
update_form = update_form_factory(config_name, update_value)
|
||||||
if deprecated:
|
if deprecated:
|
||||||
if isinstance(deprecated, bool):
|
if isinstance(deprecated, bool):
|
||||||
deprecated = "Deprecated"
|
deprecated = "Deprecated"
|
||||||
@@ -147,12 +520,12 @@ def ConfigCard(
|
|||||||
P(read_only),
|
P(read_only),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
P(value),
|
JsonView(json.loads(value)),
|
||||||
),
|
),
|
||||||
cls="list-none",
|
cls="list-none",
|
||||||
),
|
),
|
||||||
Grid(
|
Grid(
|
||||||
P(description),
|
TextView(description),
|
||||||
P(config_type),
|
P(config_type),
|
||||||
)
|
)
|
||||||
if not deprecated
|
if not deprecated
|
||||||
@@ -171,27 +544,18 @@ def ConfigCard(
|
|||||||
if read_only == "rw" and not deprecated
|
if read_only == "rw" and not deprecated
|
||||||
else None,
|
else None,
|
||||||
# Set value
|
# Set value
|
||||||
Grid(
|
update_form if read_only == "rw" and not deprecated else None,
|
||||||
DivRAligned(P("update")),
|
|
||||||
Grid(
|
|
||||||
Form(
|
|
||||||
Input(value=config_name, type="hidden", id="key"),
|
|
||||||
Input(value=update_value, type="text", id="value"),
|
|
||||||
hx_put="/eosdash/configuration",
|
|
||||||
hx_target="#page-content",
|
|
||||||
hx_swap="innerHTML",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
if read_only == "rw" and not deprecated
|
|
||||||
else None,
|
|
||||||
# Last error
|
# Last error
|
||||||
Grid(
|
Grid(
|
||||||
DivRAligned(P("update error")),
|
DivRAligned(P("update error")),
|
||||||
P(update_error),
|
TextView(update_error),
|
||||||
)
|
)
|
||||||
if update_error
|
if update_error
|
||||||
else None,
|
else None,
|
||||||
|
# Provide minimal update form on error if complex update_form is used
|
||||||
|
make_config_update_form()(config_name, update_value)
|
||||||
|
if update_error and update_form_factory is not None
|
||||||
|
else None,
|
||||||
cls="space-y-4 gap-4",
|
cls="space-y-4 gap-4",
|
||||||
open=update_open,
|
open=update_open,
|
||||||
),
|
),
|
||||||
@@ -226,7 +590,7 @@ def DashboardFooter(*c: Any, path: str) -> Card:
|
|||||||
"""
|
"""
|
||||||
return Card(
|
return Card(
|
||||||
Container(*c, id="footer-content"),
|
Container(*c, id="footer-content"),
|
||||||
hx_get=f"{path}",
|
hx_get=request_url_for(path),
|
||||||
hx_trigger="every 5s",
|
hx_trigger="every 5s",
|
||||||
hx_target="#footer-content",
|
hx_target="#footer-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
@@ -266,7 +630,7 @@ def DashboardTabs(dashboard_items: dict[str, str]) -> Card:
|
|||||||
Li(
|
Li(
|
||||||
DashboardTrigger(
|
DashboardTrigger(
|
||||||
H3(menu),
|
H3(menu),
|
||||||
hx_get=f"{path}",
|
hx_get=request_url_for(path),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='js:{ "dark": window.matchMedia("(prefers-color-scheme: dark)").matches }',
|
hx_vals='js:{ "dark": window.matchMedia("(prefers-color-scheme: dark)").matches }',
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from typing import Any, Dict, List, Optional, Sequence, TypeVar, Union
|
from collections.abc import Sequence
|
||||||
|
from typing import Any, Dict, List, Optional, TypeVar, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
@@ -7,6 +8,7 @@ from monsterui.franken import (
|
|||||||
H3,
|
H3,
|
||||||
H4,
|
H4,
|
||||||
Card,
|
Card,
|
||||||
|
CardTitle,
|
||||||
Details,
|
Details,
|
||||||
Div,
|
Div,
|
||||||
DividerLine,
|
DividerLine,
|
||||||
@@ -15,6 +17,7 @@ from monsterui.franken import (
|
|||||||
Form,
|
Form,
|
||||||
Grid,
|
Grid,
|
||||||
Input,
|
Input,
|
||||||
|
LabelCheckboxX,
|
||||||
P,
|
P,
|
||||||
Summary,
|
Summary,
|
||||||
UkIcon,
|
UkIcon,
|
||||||
@@ -25,7 +28,15 @@ from pydantic_core import PydanticUndefined
|
|||||||
from akkudoktoreos.config.config import ConfigEOS
|
from akkudoktoreos.config.config import ConfigEOS
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
from akkudoktoreos.prediction.pvforecast import PVForecastPlaneSetting
|
from akkudoktoreos.prediction.pvforecast import PVForecastPlaneSetting
|
||||||
from akkudoktoreos.server.dash.components import ConfigCard
|
from akkudoktoreos.server.dash.components import (
|
||||||
|
ConfigCard,
|
||||||
|
JsonView,
|
||||||
|
TextView,
|
||||||
|
make_config_update_list_form,
|
||||||
|
make_config_update_map_form,
|
||||||
|
make_config_update_value_form,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.server.dash.context import request_url_for
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
@@ -33,6 +44,14 @@ T = TypeVar("T")
|
|||||||
# Dictionary of config names and associated dictionary with keys "value", "result", "error", "open".
|
# Dictionary of config names and associated dictionary with keys "value", "result", "error", "open".
|
||||||
config_update_latest: dict[str, dict[str, Optional[Union[str, bool]]]] = {}
|
config_update_latest: dict[str, dict[str, Optional[Union[str, bool]]]] = {}
|
||||||
|
|
||||||
|
# Current state of config displayed
|
||||||
|
config_visible: dict[str, dict] = {
|
||||||
|
"config-visible-read-only": {
|
||||||
|
"label": "Configuration (read-only)",
|
||||||
|
"visible": False,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_nested_value(
|
def get_nested_value(
|
||||||
dictionary: Union[Dict[str, Any], List[Any]],
|
dictionary: Union[Dict[str, Any], List[Any]],
|
||||||
@@ -178,9 +197,9 @@ def resolve_nested_types(field_type: Any, parent_types: list[str]) -> list[tuple
|
|||||||
return resolved_types
|
return resolved_types
|
||||||
|
|
||||||
|
|
||||||
def configuration(
|
def create_config_details(
|
||||||
model: type[PydanticBaseModel], values: dict, values_prefix: list[str] = []
|
model: type[PydanticBaseModel], values: dict, values_prefix: list[str] = []
|
||||||
) -> list[dict]:
|
) -> dict[str, dict]:
|
||||||
"""Generate configuration details based on provided values and model metadata.
|
"""Generate configuration details based on provided values and model metadata.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -189,9 +208,9 @@ def configuration(
|
|||||||
values_prefix (list[str]): A list of parent type names that prefixes the model values in the values.
|
values_prefix (list[str]): A list of parent type names that prefixes the model values in the values.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[dict]: A sorted list of configuration details, each represented as a dictionary.
|
dict[dict]: A dictionary of configuration details, each represented as a dictionary.
|
||||||
"""
|
"""
|
||||||
configs = []
|
config_details: dict[str, dict] = {}
|
||||||
inner_types: set[type[PydanticBaseModel]] = set()
|
inner_types: set[type[PydanticBaseModel]] = set()
|
||||||
|
|
||||||
for field_name, field_info in list(model.model_fields.items()) + list(
|
for field_name, field_info in list(model.model_fields.items()) + list(
|
||||||
@@ -244,7 +263,7 @@ def configuration(
|
|||||||
.replace("NoneType", "None")
|
.replace("NoneType", "None")
|
||||||
.replace("<class 'float'>", "float")
|
.replace("<class 'float'>", "float")
|
||||||
)
|
)
|
||||||
configs.append(config)
|
config_details[str(config["name"])] = config
|
||||||
found_basic = True
|
found_basic = True
|
||||||
else:
|
else:
|
||||||
new_parent_types = parent_types + nested_parent_types
|
new_parent_types = parent_types + nested_parent_types
|
||||||
@@ -258,18 +277,18 @@ def configuration(
|
|||||||
)
|
)
|
||||||
|
|
||||||
extract_nested_models(field_info, [field_name])
|
extract_nested_models(field_info, [field_name])
|
||||||
return sorted(configs, key=lambda x: x["name"])
|
return config_details
|
||||||
|
|
||||||
|
|
||||||
def get_configuration(eos_host: str, eos_port: Union[str, int]) -> list[dict]:
|
def get_config(eos_host: str, eos_port: Union[str, int]) -> dict[str, Any]:
|
||||||
"""Fetch and process configuration data from the specified EOS server.
|
"""Fetch configuration data from the specified EOS server.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
eos_host (str): The hostname of the EOS server.
|
eos_host (str): The hostname of the EOS server.
|
||||||
eos_port (Union[str, int]): The port of the EOS server.
|
eos_port (Union[str, int]): The port of the EOS server.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List[dict]: A list of processed configuration entries.
|
dict[str, Any]: A dict of configuration data.
|
||||||
"""
|
"""
|
||||||
server = f"http://{eos_host}:{eos_port}"
|
server = f"http://{eos_host}:{eos_port}"
|
||||||
|
|
||||||
@@ -284,7 +303,7 @@ def get_configuration(eos_host: str, eos_port: Union[str, int]) -> list[dict]:
|
|||||||
warning_msg = f"Can not retrieve configuration from {server}: {e}, {detail}"
|
warning_msg = f"Can not retrieve configuration from {server}: {e}, {detail}"
|
||||||
logger.warning(warning_msg)
|
logger.warning(warning_msg)
|
||||||
|
|
||||||
return configuration(ConfigEOS, config)
|
return config
|
||||||
|
|
||||||
|
|
||||||
def ConfigPlanesCard(
|
def ConfigPlanesCard(
|
||||||
@@ -341,7 +360,7 @@ def ConfigPlanesCard(
|
|||||||
# Create cards for all planes
|
# Create cards for all planes
|
||||||
rows = []
|
rows = []
|
||||||
for i in range(0, max_planes):
|
for i in range(0, max_planes):
|
||||||
plane_config = configuration(
|
plane_config = create_config_details(
|
||||||
PVForecastPlaneSetting(),
|
PVForecastPlaneSetting(),
|
||||||
eos_planes_config,
|
eos_planes_config,
|
||||||
values_prefix=["pvforecast", "planes", str(i)],
|
values_prefix=["pvforecast", "planes", str(i)],
|
||||||
@@ -352,10 +371,12 @@ def ConfigPlanesCard(
|
|||||||
plane_value = json.dumps(eos_planes[i])
|
plane_value = json.dumps(eos_planes[i])
|
||||||
else:
|
else:
|
||||||
plane_value = json.dumps(None)
|
plane_value = json.dumps(None)
|
||||||
for config in plane_config:
|
for config_key in sorted(plane_config.keys()):
|
||||||
|
config = plane_config[config_key]
|
||||||
update_error = config_update_latest.get(config["name"], {}).get("error") # type: ignore
|
update_error = config_update_latest.get(config["name"], {}).get("error") # type: ignore
|
||||||
update_value = config_update_latest.get(config["name"], {}).get("value") # type: ignore
|
update_value = config_update_latest.get(config["name"], {}).get("value") # type: ignore
|
||||||
update_open = config_update_latest.get(config["name"], {}).get("open") # type: ignore
|
update_open = config_update_latest.get(config["name"], {}).get("open") # type: ignore
|
||||||
|
update_form_factory = None
|
||||||
if update_open:
|
if update_open:
|
||||||
planes_update_open = True
|
planes_update_open = True
|
||||||
plane_update_open = True
|
plane_update_open = True
|
||||||
@@ -368,6 +389,12 @@ def ConfigPlanesCard(
|
|||||||
error_msg = "update_error or update_value or update_open of wrong type."
|
error_msg = "update_error or update_value or update_open of wrong type."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise TypeError(error_msg)
|
raise TypeError(error_msg)
|
||||||
|
if config["name"].endswith("pvtechchoice"):
|
||||||
|
update_form_factory = make_config_update_value_form(
|
||||||
|
["crystSi", "CIS", "CdTe", "Unknown"]
|
||||||
|
)
|
||||||
|
elif config["name"].endswith("mountingplace"):
|
||||||
|
update_form_factory = make_config_update_value_form(["free", "building"])
|
||||||
plane_rows.append(
|
plane_rows.append(
|
||||||
ConfigCard(
|
ConfigCard(
|
||||||
config["name"],
|
config["name"],
|
||||||
@@ -380,6 +407,7 @@ def ConfigPlanesCard(
|
|||||||
update_error,
|
update_error,
|
||||||
update_value,
|
update_value,
|
||||||
update_open,
|
update_open,
|
||||||
|
update_form_factory,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
rows.append(
|
rows.append(
|
||||||
@@ -396,7 +424,7 @@ def ConfigPlanesCard(
|
|||||||
P(read_only),
|
P(read_only),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
P(plane_value),
|
JsonView(json.loads(plane_value)),
|
||||||
),
|
),
|
||||||
cls="list-none",
|
cls="list-none",
|
||||||
),
|
),
|
||||||
@@ -421,12 +449,12 @@ def ConfigPlanesCard(
|
|||||||
P(read_only),
|
P(read_only),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
P(value),
|
JsonView(json.loads(value)),
|
||||||
),
|
),
|
||||||
cls="list-none",
|
cls="list-none",
|
||||||
),
|
),
|
||||||
Grid(
|
Grid(
|
||||||
P(description),
|
TextView(description),
|
||||||
P(config_type),
|
P(config_type),
|
||||||
),
|
),
|
||||||
# Default
|
# Default
|
||||||
@@ -441,9 +469,10 @@ def ConfigPlanesCard(
|
|||||||
DivRAligned(P("update")),
|
DivRAligned(P("update")),
|
||||||
Grid(
|
Grid(
|
||||||
Form(
|
Form(
|
||||||
|
Input(value="update", type="hidden", id="action"),
|
||||||
Input(value=config_name, type="hidden", id="key"),
|
Input(value=config_name, type="hidden", id="key"),
|
||||||
Input(value=planes_update_value, type="text", id="value"),
|
Input(value=planes_update_value, type="text", id="value"),
|
||||||
hx_put="/eosdash/configuration",
|
hx_put=request_url_for("/eosdash/configuration"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
),
|
),
|
||||||
@@ -454,7 +483,7 @@ def ConfigPlanesCard(
|
|||||||
# Last error
|
# Last error
|
||||||
Grid(
|
Grid(
|
||||||
DivRAligned(P("update error")),
|
DivRAligned(P("update error")),
|
||||||
P(planes_update_error),
|
TextView(planes_update_error),
|
||||||
)
|
)
|
||||||
if planes_update_error
|
if planes_update_error
|
||||||
else None,
|
else None,
|
||||||
@@ -468,33 +497,150 @@ def ConfigPlanesCard(
|
|||||||
|
|
||||||
|
|
||||||
def Configuration(
|
def Configuration(
|
||||||
eos_host: str, eos_port: Union[str, int], configuration: Optional[list[dict]] = None
|
eos_host: str,
|
||||||
|
eos_port: Union[str, int],
|
||||||
|
data: Optional[dict] = None,
|
||||||
) -> Div:
|
) -> Div:
|
||||||
"""Create a visual representation of the configuration.
|
"""Create a visual representation of the configuration.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
eos_host (str): The hostname of the EOS server.
|
eos_host (str): The hostname of the EOS server.
|
||||||
eos_port (Union[str, int]): The port of the EOS server.
|
eos_port (Union[str, int]): The port of the EOS server.
|
||||||
configuration (Optional[list[dict]]): Optional configuration. If not provided it will be
|
data (Optional[dict], optional): Incoming data to trigger config actions. Defaults to None.
|
||||||
retrievd from EOS.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
rows: Rows of configuration details.
|
rows: Rows of configuration details.
|
||||||
"""
|
"""
|
||||||
if not configuration:
|
global config_visible
|
||||||
configuration = get_configuration(eos_host, eos_port)
|
dark = False
|
||||||
|
|
||||||
|
if data and data.get("action", None):
|
||||||
|
if data.get("dark", None) == "true":
|
||||||
|
dark = True
|
||||||
|
if data["action"] == "visible":
|
||||||
|
renderer = data.get("renderer", None)
|
||||||
|
if renderer:
|
||||||
|
config_visible[renderer]["visible"] = bool(data.get(f"{renderer}-visible", False))
|
||||||
|
elif data["action"] == "update":
|
||||||
|
# This data contains a new value for key
|
||||||
|
key = data["key"]
|
||||||
|
value_json_str: str = data.get("value", "")
|
||||||
|
try:
|
||||||
|
value = json.loads(value_json_str)
|
||||||
|
except:
|
||||||
|
if value_json_str in ("None", "none", "Null", "null"):
|
||||||
|
value = None
|
||||||
|
else:
|
||||||
|
value = value_json_str
|
||||||
|
|
||||||
|
error = None
|
||||||
|
config = None
|
||||||
|
try:
|
||||||
|
server = f"http://{eos_host}:{eos_port}"
|
||||||
|
path = key.replace(".", "/")
|
||||||
|
response = requests.put(f"{server}/v1/config/{path}", json=value, timeout=10)
|
||||||
|
response.raise_for_status()
|
||||||
|
config = response.json()
|
||||||
|
except requests.exceptions.HTTPError as err:
|
||||||
|
try:
|
||||||
|
# Try to get 'detail' from the JSON response
|
||||||
|
detail = response.json().get(
|
||||||
|
"detail", f"No error details for value '{value}' '{response.text}'"
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
# Response is not JSON
|
||||||
|
detail = f"No error details for value '{value}' '{response.text}'"
|
||||||
|
error = f"Can not set {key} on {server}: {err}, {detail}"
|
||||||
|
# Mark all updates as closed
|
||||||
|
for k in config_update_latest:
|
||||||
|
config_update_latest[k]["open"] = False
|
||||||
|
# Remember this update as latest one
|
||||||
|
config_update_latest[key] = {
|
||||||
|
"error": error,
|
||||||
|
"result": config,
|
||||||
|
"value": value_json_str,
|
||||||
|
"open": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
# (Re-)read configuration details to be shure we display actual data
|
||||||
|
config = get_config(eos_host, eos_port)
|
||||||
|
|
||||||
|
# Process configuration data
|
||||||
|
config_details = create_config_details(ConfigEOS, config)
|
||||||
|
|
||||||
|
ConfigMenu = Card(
|
||||||
|
# CheckboxGroup to toggle config data visibility
|
||||||
|
Grid(
|
||||||
|
*[
|
||||||
|
LabelCheckboxX(
|
||||||
|
label=config_visible[renderer]["label"],
|
||||||
|
id=f"{renderer}-visible",
|
||||||
|
name=f"{renderer}-visible",
|
||||||
|
value="true",
|
||||||
|
checked=config_visible[renderer]["visible"],
|
||||||
|
hx_post=request_url_for("/eosdash/configuration"),
|
||||||
|
hx_target="#page-content",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
hx_vals='js:{ "action": "visible", "renderer": '
|
||||||
|
+ '"'
|
||||||
|
+ f"{renderer}"
|
||||||
|
+ '", '
|
||||||
|
+ '"dark": window.matchMedia("(prefers-color-scheme: dark)").matches '
|
||||||
|
+ "}",
|
||||||
|
# lbl_cls=f"text-{solution_color[renderer]}",
|
||||||
|
)
|
||||||
|
for renderer in list(config_visible.keys())
|
||||||
|
],
|
||||||
|
cols=4,
|
||||||
|
),
|
||||||
|
header=CardTitle("Choose What's Shown"),
|
||||||
|
)
|
||||||
|
|
||||||
rows = []
|
rows = []
|
||||||
last_category = ""
|
last_category = ""
|
||||||
# find some special configuration values
|
# find some special configuration values
|
||||||
max_planes = 0
|
try:
|
||||||
for config in configuration:
|
max_planes = int(config_details["pvforecast.max_planes"]["value"])
|
||||||
if config["name"] == "pvforecast.max_planes":
|
except:
|
||||||
try:
|
max_planes = 0
|
||||||
max_planes = int(config["value"])
|
logger.debug(f"max_planes: {max_planes}")
|
||||||
except:
|
|
||||||
max_planes = 0
|
try:
|
||||||
|
homeassistant_entity_ids = json.loads(
|
||||||
|
config_details["adapter.homeassistant.homeassistant_entity_ids"]["value"]
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
homeassistant_entity_ids = []
|
||||||
|
logger.debug(f"homeassistant_entity_ids: {homeassistant_entity_ids}")
|
||||||
|
|
||||||
|
eos_solution_entity_ids = []
|
||||||
|
try:
|
||||||
|
eos_solution_entity_ids = json.loads(
|
||||||
|
config_details["adapter.homeassistant.eos_solution_entity_ids"]["value"]
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
eos_solution_entity_ids = []
|
||||||
|
logger.debug(f"eos_solution_entity_ids {eos_solution_entity_ids}")
|
||||||
|
|
||||||
|
eos_device_instruction_entity_ids = []
|
||||||
|
try:
|
||||||
|
eos_device_instruction_entity_ids = json.loads(
|
||||||
|
config_details["adapter.homeassistant.eos_device_instruction_entity_ids"]["value"]
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
eos_device_instruction_entity_ids = []
|
||||||
|
logger.debug(f"eos_device_instruction_entity_ids {eos_device_instruction_entity_ids}")
|
||||||
|
|
||||||
|
devices_measurement_keys = []
|
||||||
|
try:
|
||||||
|
devices_measurement_keys = json.loads(config_details["devices.measurement_keys"]["value"])
|
||||||
|
except:
|
||||||
|
devices_measurement_keys = []
|
||||||
|
logger.debug(f"devices_measurement_keys {devices_measurement_keys}")
|
||||||
|
|
||||||
# build visual representation
|
# build visual representation
|
||||||
for config in configuration:
|
for config_key in sorted(config_details.keys()):
|
||||||
|
config = config_details[config_key]
|
||||||
category = config["name"].split(".")[0]
|
category = config["name"].split(".")[0]
|
||||||
if category != last_category:
|
if category != last_category:
|
||||||
rows.append(H3(category))
|
rows.append(H3(category))
|
||||||
@@ -512,6 +658,12 @@ def Configuration(
|
|||||||
error_msg = "update_error or update_value or update_open of wrong type."
|
error_msg = "update_error or update_value or update_open of wrong type."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise TypeError(error_msg)
|
raise TypeError(error_msg)
|
||||||
|
if (
|
||||||
|
not config_visible["config-visible-read-only"]["visible"]
|
||||||
|
and config["read-only"] != "rw"
|
||||||
|
):
|
||||||
|
# Do not display read only values
|
||||||
|
continue
|
||||||
if (
|
if (
|
||||||
config["type"]
|
config["type"]
|
||||||
== "Optional[list[akkudoktoreos.prediction.pvforecast.PVForecastPlaneSetting]]"
|
== "Optional[list[akkudoktoreos.prediction.pvforecast.PVForecastPlaneSetting]]"
|
||||||
@@ -532,7 +684,47 @@ def Configuration(
|
|||||||
update_open,
|
update_open,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
elif not config["deprecated"]:
|
||||||
|
update_form_factory = None
|
||||||
|
if config["name"].endswith(".provider"):
|
||||||
|
# Special configuration for prediction provider setting
|
||||||
|
try:
|
||||||
|
provider_ids = json.loads(config_details[config["name"] + "s"]["value"])
|
||||||
|
except:
|
||||||
|
provider_ids = []
|
||||||
|
if config["type"].startswith("Optional[list"):
|
||||||
|
update_form_factory = make_config_update_list_form(provider_ids)
|
||||||
|
else:
|
||||||
|
provider_ids.append("None")
|
||||||
|
update_form_factory = make_config_update_value_form(provider_ids)
|
||||||
|
elif config["name"].startswith("adapter.homeassistant.config_entity_ids"):
|
||||||
|
# Home Assistant adapter config entities
|
||||||
|
update_form_factory = make_config_update_map_form(None, homeassistant_entity_ids)
|
||||||
|
elif config["name"].startswith("adapter.homeassistant.load_emr_entity_ids"):
|
||||||
|
# Home Assistant adapter load energy meter readings entities
|
||||||
|
update_form_factory = make_config_update_list_form(homeassistant_entity_ids)
|
||||||
|
elif config["name"].startswith("adapter.homeassistant.pv_production_emr_entity_ids"):
|
||||||
|
# Home Assistant adapter pv energy meter readings entities
|
||||||
|
update_form_factory = make_config_update_list_form(homeassistant_entity_ids)
|
||||||
|
elif config["name"].startswith("adapter.homeassistant.device_measurement_entity_ids"):
|
||||||
|
# Home Assistant adapter device measurement entities
|
||||||
|
update_form_factory = make_config_update_map_form(
|
||||||
|
devices_measurement_keys, homeassistant_entity_ids
|
||||||
|
)
|
||||||
|
elif config["name"].startswith("adapter.homeassistant.device_instruction_entity_ids"):
|
||||||
|
# Home Assistant adapter device instruction entities
|
||||||
|
update_form_factory = make_config_update_list_form(
|
||||||
|
eos_device_instruction_entity_ids
|
||||||
|
)
|
||||||
|
elif config["name"].startswith("adapter.homeassistant.solution_entity_ids"):
|
||||||
|
# Home Assistant adapter optimization solution entities
|
||||||
|
update_form_factory = make_config_update_list_form(eos_solution_entity_ids)
|
||||||
|
elif config["name"].startswith("ems.mode"):
|
||||||
|
# Energy managemnt mode
|
||||||
|
update_form_factory = make_config_update_value_form(
|
||||||
|
["OPTIMIZATION", "PREDICTION", "None"]
|
||||||
|
)
|
||||||
|
|
||||||
rows.append(
|
rows.append(
|
||||||
ConfigCard(
|
ConfigCard(
|
||||||
config["name"],
|
config["name"],
|
||||||
@@ -545,61 +737,8 @@ def Configuration(
|
|||||||
update_error,
|
update_error,
|
||||||
update_value,
|
update_value,
|
||||||
update_open,
|
update_open,
|
||||||
|
update_form_factory,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return Div(*rows, cls="space-y-4")
|
|
||||||
|
|
||||||
|
return Div(ConfigMenu, *rows, cls="space-y-3")
|
||||||
def ConfigKeyUpdate(eos_host: str, eos_port: Union[str, int], key: str, value: str) -> P:
|
|
||||||
"""Update configuration key and create a visual representation of the configuration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
eos_host (str): The hostname of the EOS server.
|
|
||||||
eos_port (Union[str, int]): The port of the EOS server.
|
|
||||||
key (str): configuration key in dot notation
|
|
||||||
value (str): configuration value as json string
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
rows: Rows of configuration details.
|
|
||||||
"""
|
|
||||||
server = f"http://{eos_host}:{eos_port}"
|
|
||||||
path = key.replace(".", "/")
|
|
||||||
try:
|
|
||||||
data = json.loads(value)
|
|
||||||
except:
|
|
||||||
if value in ("None", "none", "Null", "null"):
|
|
||||||
data = None
|
|
||||||
else:
|
|
||||||
data = value
|
|
||||||
|
|
||||||
error = None
|
|
||||||
config = None
|
|
||||||
try:
|
|
||||||
response = requests.put(f"{server}/v1/config/{path}", json=data, timeout=10)
|
|
||||||
response.raise_for_status()
|
|
||||||
config = response.json()
|
|
||||||
except requests.exceptions.HTTPError as err:
|
|
||||||
try:
|
|
||||||
# Try to get 'detail' from the JSON response
|
|
||||||
detail = response.json().get(
|
|
||||||
"detail", f"No error details for data '{data}' '{response.text}'"
|
|
||||||
)
|
|
||||||
except ValueError:
|
|
||||||
# Response is not JSON
|
|
||||||
detail = f"No error details for data '{data}' '{response.text}'"
|
|
||||||
error = f"Can not set {key} on {server}: {err}, {detail}"
|
|
||||||
# Mark all updates as closed
|
|
||||||
for k in config_update_latest:
|
|
||||||
config_update_latest[k]["open"] = False
|
|
||||||
# Remember this update as latest one
|
|
||||||
config_update_latest[key] = {
|
|
||||||
"error": error,
|
|
||||||
"result": config,
|
|
||||||
"value": value,
|
|
||||||
"open": True,
|
|
||||||
}
|
|
||||||
if error or config is None:
|
|
||||||
# Reread configuration to be shure we display actual data
|
|
||||||
return Configuration(eos_host, eos_port)
|
|
||||||
# Use configuration already provided
|
|
||||||
return Configuration(eos_host, eos_port, configuration(ConfigEOS, config))
|
|
||||||
|
|||||||
169
src/akkudoktoreos/server/dash/context.py
Normal file
169
src/akkudoktoreos/server/dash/context.py
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Awaitable, Callable, Optional
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from platformdirs import user_config_dir
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.requests import Request
|
||||||
|
from starlette.responses import Response
|
||||||
|
|
||||||
|
# Home assistant token, if running under Home Assistant
|
||||||
|
HASSIO_TOKEN = os.environ.get("HASSIO_TOKEN")
|
||||||
|
|
||||||
|
# Compute global root path at startup
|
||||||
|
# Will be replaced on first request if Ingress is active
|
||||||
|
ROOT_PATH = "/"
|
||||||
|
|
||||||
|
# EOSdash path prefix
|
||||||
|
EOSDASH_ROOT = "eosdash/"
|
||||||
|
|
||||||
|
# Directory to export files to, or to import files from
|
||||||
|
export_import_directory = (
|
||||||
|
Path(os.environ.get("EOS_DATA_DIR", user_config_dir("net.akkudoktor.eosdash", "akkudoktor")))
|
||||||
|
if not HASSIO_TOKEN
|
||||||
|
else Path("/data")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IngressMiddleware(BaseHTTPMiddleware):
|
||||||
|
"""Middleware to handle Home Assistant Ingress path prefixes.
|
||||||
|
|
||||||
|
This middleware enables FastHTML applications to work seamlessly both with
|
||||||
|
and without Home Assistant Ingress. When deployed as a Home Assistant add-on
|
||||||
|
with Ingress enabled, it automatically handles the path prefix routing.
|
||||||
|
|
||||||
|
Home Assistant Ingress proxies add-on traffic through paths like
|
||||||
|
`/api/hassio_ingress/<token>/`, which requires setting the application's
|
||||||
|
root_path for correct URL generation. This middleware detects the Ingress
|
||||||
|
path from the X-Ingress-Path header and configures the request scope
|
||||||
|
accordingly.
|
||||||
|
|
||||||
|
When running standalone (development or direct access), the middleware
|
||||||
|
passes requests through unchanged, allowing normal operation.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
None
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
>>> from fasthtml.common import FastHTML
|
||||||
|
>>> from starlette.middleware import Middleware
|
||||||
|
>>>
|
||||||
|
>>> app = FastHTML(middleware=[Middleware(IngressMiddleware)])
|
||||||
|
>>>
|
||||||
|
>>> @app.get("/")
|
||||||
|
>>> def home():
|
||||||
|
... return "Hello World"
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- All htmx and route URLs should use relative paths (e.g., "/api/data")
|
||||||
|
- The middleware automatically adapts to both Ingress and direct access
|
||||||
|
- No code changes needed when switching between deployment modes
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def dispatch(
|
||||||
|
self, request: Request, call_next: Callable[[Request], Awaitable[Response]]
|
||||||
|
) -> Response:
|
||||||
|
"""Process the request and set root_path if running under Ingress.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming Starlette Request object.
|
||||||
|
call_next: Callable to invoke the next middleware or route handler.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: The response from the application after processing.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
The X-Ingress-Path header is automatically added by Home Assistant
|
||||||
|
when proxying requests through Ingress.
|
||||||
|
"""
|
||||||
|
global ROOT_PATH
|
||||||
|
|
||||||
|
# Home Assistant passes the ingress path in this header
|
||||||
|
# Try multiple header variations (case-insensitive)
|
||||||
|
ingress_path = (
|
||||||
|
request.headers.get("X-Ingress-Path", "")
|
||||||
|
or request.headers.get("x-ingress-path", "")
|
||||||
|
or request.headers.get("X-INGRESS-PATH", "")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Debug logging - remove after testing
|
||||||
|
logger.debug(f"All headers: {dict(request.headers)}")
|
||||||
|
logger.debug(f"Ingress path: {ingress_path}")
|
||||||
|
logger.debug(f"Request path: {request.url.path}")
|
||||||
|
|
||||||
|
# Only set root_path if we have an ingress path
|
||||||
|
if ingress_path:
|
||||||
|
ROOT_PATH = ingress_path
|
||||||
|
request.scope["root_path"] = ingress_path
|
||||||
|
# Otherwise, root_path remains empty (normal operation)
|
||||||
|
|
||||||
|
response = await call_next(request)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
# Helper functions
|
||||||
|
def request_url_for(path: str, root_path: Optional[str] = None) -> str:
|
||||||
|
"""Generate a full URL including the root_path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Relative path **inside the app** (e.g., "eosdash/footer" or "eosdash/assets/logo.png").
|
||||||
|
root_path: Root path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Absolute URL including the root_path.
|
||||||
|
"""
|
||||||
|
global ROOT_PATH, EOSDASH_ROOT
|
||||||
|
|
||||||
|
# Step 1: fallback to global root
|
||||||
|
if root_path is None:
|
||||||
|
root_path = ROOT_PATH
|
||||||
|
|
||||||
|
# Normalize root path
|
||||||
|
root_path = root_path.rstrip("/") + "/"
|
||||||
|
|
||||||
|
# Normalize path
|
||||||
|
if path.startswith(root_path):
|
||||||
|
# Strip root_path prefix
|
||||||
|
path = path[len(root_path) :]
|
||||||
|
|
||||||
|
# Remove leading / if any
|
||||||
|
path = path.lstrip("/")
|
||||||
|
|
||||||
|
# Strip EOSDASH_ROOT if present
|
||||||
|
if path.startswith(EOSDASH_ROOT):
|
||||||
|
path = path[len(EOSDASH_ROOT) :]
|
||||||
|
|
||||||
|
# Build final URL
|
||||||
|
result = root_path + EOSDASH_ROOT + path.lstrip("/")
|
||||||
|
|
||||||
|
# Normalize accidental double slashes (except leading)
|
||||||
|
while "//" in result[1:]:
|
||||||
|
result = result.replace("//", "/")
|
||||||
|
|
||||||
|
logger.debug(f"URL for path '{path}' with root path '{root_path}': '{result}'")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def safe_asset_path(filepath: str) -> Path:
|
||||||
|
"""Return a safe filesystem path for an asset under dash/assets/.
|
||||||
|
|
||||||
|
This prevents directory traversal attacks by restricting paths to
|
||||||
|
the assets folder.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepath (str): Relative asset path requested by the client.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Absolute Path object pointing to the asset file.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If the filepath attempts to traverse directories using '../'.
|
||||||
|
"""
|
||||||
|
if ".." in filepath or filepath.startswith("/"):
|
||||||
|
raise ValueError(f"Forbidden file path: {filepath}")
|
||||||
|
|
||||||
|
asset_path = Path(__file__).parent / "dash/assets" / filepath
|
||||||
|
return asset_path
|
||||||
@@ -9,8 +9,6 @@ from requests.exceptions import RequestException
|
|||||||
import akkudoktoreos.server.dash.eosstatus as eosstatus
|
import akkudoktoreos.server.dash.eosstatus as eosstatus
|
||||||
from akkudoktoreos.config.config import get_config
|
from akkudoktoreos.config.config import get_config
|
||||||
|
|
||||||
config_eos = get_config()
|
|
||||||
|
|
||||||
|
|
||||||
def get_alive(eos_host: str, eos_port: Union[str, int]) -> str:
|
def get_alive(eos_host: str, eos_port: Union[str, int]) -> str:
|
||||||
"""Fetch alive information from the specified EOS server.
|
"""Fetch alive information from the specified EOS server.
|
||||||
@@ -42,9 +40,9 @@ def get_alive(eos_host: str, eos_port: Union[str, int]) -> str:
|
|||||||
|
|
||||||
def Footer(eos_host: Optional[str], eos_port: Optional[Union[str, int]]) -> str:
|
def Footer(eos_host: Optional[str], eos_port: Optional[Union[str, int]]) -> str:
|
||||||
if eos_host is None:
|
if eos_host is None:
|
||||||
eos_host = config_eos.server.host
|
eos_host = get_config().server.host
|
||||||
if eos_port is None:
|
if eos_port is None:
|
||||||
eos_port = config_eos.server.port
|
eos_port = get_config().server.port
|
||||||
alive_icon = None
|
alive_icon = None
|
||||||
if eos_host is None or eos_port is None:
|
if eos_host is None or eos_port is None:
|
||||||
alive = "EOS server not given: {eos_host}:{eos_port}"
|
alive = "EOS server not given: {eos_host}:{eos_port}"
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
"""Markdown rendering with MonsterUI HTML classes."""
|
"""Markdown rendering with MonsterUI HTML classes."""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import mimetypes
|
||||||
|
from pathlib import Path
|
||||||
from typing import Any, List, Optional, Union
|
from typing import Any, List, Optional, Union
|
||||||
|
|
||||||
from fasthtml.common import FT, Div, NotStr
|
from fasthtml.common import FT, Div, NotStr
|
||||||
@@ -8,113 +11,138 @@ from markdown_it.renderer import RendererHTML
|
|||||||
from markdown_it.token import Token
|
from markdown_it.token import Token
|
||||||
from monsterui.foundations import stringify
|
from monsterui.foundations import stringify
|
||||||
|
|
||||||
|
# Where to find the static data assets
|
||||||
|
ASSETS_DIR = Path(__file__).parent / "assets"
|
||||||
|
|
||||||
|
ASSETS_PREFIX = "/eosdash/assets/"
|
||||||
|
IMAGE_EXTS = {".png", ".jpg", ".jpeg", ".gif", ".webp", ".svg", ".ico"}
|
||||||
|
|
||||||
|
|
||||||
|
def file_to_data_uri(file_path: Path) -> str:
|
||||||
|
"""Convert a file to a data URI.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to the file to convert.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Data URI string with format data:mime/type;base64,encoded_data
|
||||||
|
"""
|
||||||
|
ext = file_path.suffix.lower()
|
||||||
|
|
||||||
|
# Determine MIME type
|
||||||
|
mime, _ = mimetypes.guess_type(str(file_path))
|
||||||
|
if mime is None:
|
||||||
|
mime = f"image/{ext.lstrip('.')}"
|
||||||
|
|
||||||
|
# Read file as bytes and encode to base64
|
||||||
|
raw = file_path.read_bytes()
|
||||||
|
encoded = base64.b64encode(raw).decode("ascii")
|
||||||
|
|
||||||
|
return f"data:{mime};base64,{encoded}"
|
||||||
|
|
||||||
|
|
||||||
def render_heading(
|
def render_heading(
|
||||||
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Custom renderer for Markdown headings.
|
"""Custom renderer for Markdown headings with MonsterUI styling."""
|
||||||
|
|
||||||
Adds specific CSS classes based on the heading level.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
self: The renderer instance.
|
|
||||||
tokens: List of tokens to be rendered.
|
|
||||||
idx: Index of the current token.
|
|
||||||
options: Rendering options.
|
|
||||||
env: Environment sandbox for plugins.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The rendered token as a string.
|
|
||||||
"""
|
|
||||||
if tokens[idx].markup == "#":
|
if tokens[idx].markup == "#":
|
||||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h1 uk-margin")
|
tokens[idx].attrSet(
|
||||||
|
"class",
|
||||||
|
"scroll-m-20 text-4xl font-extrabold tracking-tight lg:text-5xl mt-8 mb-4 border-b pb-2",
|
||||||
|
)
|
||||||
elif tokens[idx].markup == "##":
|
elif tokens[idx].markup == "##":
|
||||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h2 uk-margin")
|
tokens[idx].attrSet(
|
||||||
|
"class", "scroll-m-20 border-b pb-2 text-3xl font-semibold tracking-tight mt-6 mb-3"
|
||||||
|
)
|
||||||
elif tokens[idx].markup == "###":
|
elif tokens[idx].markup == "###":
|
||||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h3 uk-margin")
|
tokens[idx].attrSet("class", "scroll-m-20 text-2xl font-semibold tracking-tight mt-5 mb-2")
|
||||||
elif tokens[idx].markup == "####":
|
elif tokens[idx].markup == "####":
|
||||||
tokens[idx].attrSet("class", "uk-heading-divider uk-h4 uk-margin")
|
tokens[idx].attrSet("class", "scroll-m-20 text-xl font-semibold tracking-tight mt-4 mb-2")
|
||||||
|
|
||||||
# pass token to default renderer.
|
|
||||||
return self.renderToken(tokens, idx, options, env)
|
return self.renderToken(tokens, idx, options, env)
|
||||||
|
|
||||||
|
|
||||||
def render_paragraph(
|
def render_paragraph(
|
||||||
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Custom renderer for Markdown paragraphs.
|
"""Custom renderer for Markdown paragraphs with MonsterUI styling."""
|
||||||
|
tokens[idx].attrSet("class", "leading-7 [&:not(:first-child)]:mt-6")
|
||||||
Adds specific CSS classes.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
self: The renderer instance.
|
|
||||||
tokens: List of tokens to be rendered.
|
|
||||||
idx: Index of the current token.
|
|
||||||
options: Rendering options.
|
|
||||||
env: Environment sandbox for plugins.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The rendered token as a string.
|
|
||||||
"""
|
|
||||||
tokens[idx].attrSet("class", "uk-paragraph")
|
|
||||||
|
|
||||||
# pass token to default renderer.
|
|
||||||
return self.renderToken(tokens, idx, options, env)
|
return self.renderToken(tokens, idx, options, env)
|
||||||
|
|
||||||
|
|
||||||
def render_blockquote(
|
def render_blockquote(
|
||||||
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Custom renderer for Markdown blockquotes.
|
"""Custom renderer for Markdown blockquotes with MonsterUI styling."""
|
||||||
|
tokens[idx].attrSet("class", "mt-6 border-l-2 pl-6 italic border-primary")
|
||||||
|
return self.renderToken(tokens, idx, options, env)
|
||||||
|
|
||||||
Adds specific CSS classes.
|
|
||||||
|
|
||||||
Parameters:
|
def render_list(self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict) -> str:
|
||||||
self: The renderer instance.
|
"""Custom renderer for lists with MonsterUI styling."""
|
||||||
tokens: List of tokens to be rendered.
|
tokens[idx].attrSet("class", "my-6 ml-6 list-disc [&>li]:mt-2")
|
||||||
idx: Index of the current token.
|
return self.renderToken(tokens, idx, options, env)
|
||||||
options: Rendering options.
|
|
||||||
env: Environment sandbox for plugins.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The rendered token as a string.
|
|
||||||
"""
|
|
||||||
tokens[idx].attrSet("class", "uk-blockquote")
|
|
||||||
|
|
||||||
# pass token to default renderer.
|
def render_image(
|
||||||
|
self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict
|
||||||
|
) -> str:
|
||||||
|
"""Custom renderer for Markdown images with MonsterUI styling."""
|
||||||
|
token = tokens[idx]
|
||||||
|
src = token.attrGet("src")
|
||||||
|
alt = token.content or ""
|
||||||
|
|
||||||
|
if src:
|
||||||
|
pos = src.find(ASSETS_PREFIX)
|
||||||
|
if pos != -1:
|
||||||
|
asset_rel = src[pos + len(ASSETS_PREFIX) :]
|
||||||
|
fs_path = ASSETS_DIR / asset_rel
|
||||||
|
|
||||||
|
if fs_path.exists():
|
||||||
|
data_uri = file_to_data_uri(fs_path)
|
||||||
|
token.attrSet("src", data_uri)
|
||||||
|
# MonsterUI/shadcn styling for images
|
||||||
|
token.attrSet("class", "rounded-lg border my-6 max-w-full h-auto")
|
||||||
|
|
||||||
return self.renderToken(tokens, idx, options, env)
|
return self.renderToken(tokens, idx, options, env)
|
||||||
|
|
||||||
|
|
||||||
def render_link(self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict) -> str:
|
def render_link(self: RendererHTML, tokens: List[Token], idx: int, options: dict, env: dict) -> str:
|
||||||
"""Custom renderer for Markdown links.
|
"""Custom renderer for Markdown links with MonsterUI styling."""
|
||||||
|
token = tokens[idx]
|
||||||
|
href = token.attrGet("href")
|
||||||
|
|
||||||
Adds the target attribute to open links in a new tab.
|
if href:
|
||||||
|
pos = href.find(ASSETS_PREFIX)
|
||||||
|
if pos != -1:
|
||||||
|
asset_rel = href[pos + len(ASSETS_PREFIX) :]
|
||||||
|
key = asset_rel.rsplit(".", 1)[0]
|
||||||
|
if key in env:
|
||||||
|
return str(env[key])
|
||||||
|
|
||||||
Parameters:
|
# MonsterUI link styling
|
||||||
self: The renderer instance.
|
token.attrSet(
|
||||||
tokens: List of tokens to be rendered.
|
"class", "font-medium text-primary underline underline-offset-4 hover:text-primary/80"
|
||||||
idx: Index of the current token.
|
)
|
||||||
options: Rendering options.
|
token.attrSet("target", "_blank")
|
||||||
env: Environment sandbox for plugins.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The rendered token as a string.
|
|
||||||
"""
|
|
||||||
tokens[idx].attrSet("class", "uk-link")
|
|
||||||
tokens[idx].attrSet("target", "_blank")
|
|
||||||
|
|
||||||
# pass token to default renderer.
|
|
||||||
return self.renderToken(tokens, idx, options, env)
|
return self.renderToken(tokens, idx, options, env)
|
||||||
|
|
||||||
|
|
||||||
|
# Register all renderers
|
||||||
markdown = MarkdownIt("gfm-like")
|
markdown = MarkdownIt("gfm-like")
|
||||||
markdown.add_render_rule("heading_open", render_heading)
|
markdown.add_render_rule("heading_open", render_heading)
|
||||||
markdown.add_render_rule("paragraph_open", render_paragraph)
|
markdown.add_render_rule("paragraph_open", render_paragraph)
|
||||||
markdown.add_render_rule("blockquote_open", render_blockquote)
|
markdown.add_render_rule("blockquote_open", render_blockquote)
|
||||||
markdown.add_render_rule("link_open", render_link)
|
markdown.add_render_rule("link_open", render_link)
|
||||||
|
markdown.add_render_rule("image", render_image)
|
||||||
|
markdown.add_render_rule("bullet_list_open", render_list)
|
||||||
|
markdown.add_render_rule("ordered_list_open", render_list)
|
||||||
|
|
||||||
|
|
||||||
markdown_cls = "bg-background text-lg ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
# Updated wrapper class to match shadcn/ui theme
|
||||||
|
markdown_cls = "text-foreground space-y-4"
|
||||||
|
|
||||||
|
# markdown_cls = "bg-background text-lg ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
||||||
|
|
||||||
|
|
||||||
def Markdown(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any) -> FT:
|
def Markdown(*c: Any, cls: Optional[Union[str, tuple]] = None, **kwargs: Any) -> FT:
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ from akkudoktoreos.core.emplan import (
|
|||||||
from akkudoktoreos.optimization.optimization import OptimizationSolution
|
from akkudoktoreos.optimization.optimization import OptimizationSolution
|
||||||
from akkudoktoreos.server.dash.bokeh import Bokeh, bokey_apply_theme_to_plot
|
from akkudoktoreos.server.dash.bokeh import Bokeh, bokey_apply_theme_to_plot
|
||||||
from akkudoktoreos.server.dash.components import Error
|
from akkudoktoreos.server.dash.components import Error
|
||||||
|
from akkudoktoreos.server.dash.context import request_url_for
|
||||||
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime
|
||||||
|
|
||||||
# bar width for 1 hour bars (time given in millseconds)
|
# bar width for 1 hour bars (time given in millseconds)
|
||||||
@@ -385,7 +386,7 @@ def SolutionCard(solution: OptimizationSolution, config: SettingsEOS, data: Opti
|
|||||||
name=f"{renderer}-visible",
|
name=f"{renderer}-visible",
|
||||||
value="true",
|
value="true",
|
||||||
checked=solution_visible[renderer],
|
checked=solution_visible[renderer],
|
||||||
hx_post="/eosdash/plan",
|
hx_post=request_url_for("/eosdash/plan"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='js:{ "category": "solution", "action": "visible", "renderer": '
|
hx_vals='js:{ "category": "solution", "action": "visible", "renderer": '
|
||||||
@@ -412,7 +413,7 @@ def SolutionCard(solution: OptimizationSolution, config: SettingsEOS, data: Opti
|
|||||||
name=f"{renderer}-visible",
|
name=f"{renderer}-visible",
|
||||||
value="true",
|
value="true",
|
||||||
checked=solution_visible[renderer],
|
checked=solution_visible[renderer],
|
||||||
hx_post="/eosdash/plan",
|
hx_post=request_url_for("/eosdash/plan"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='js:{ "category": "solution", "action": "visible", "renderer": '
|
hx_vals='js:{ "category": "solution", "action": "visible", "renderer": '
|
||||||
@@ -439,7 +440,7 @@ def SolutionCard(solution: OptimizationSolution, config: SettingsEOS, data: Opti
|
|||||||
name=f"{renderer}-visible",
|
name=f"{renderer}-visible",
|
||||||
value="true",
|
value="true",
|
||||||
checked=solution_visible[renderer],
|
checked=solution_visible[renderer],
|
||||||
hx_post="/eosdash/plan",
|
hx_post=request_url_for("/eosdash/plan"),
|
||||||
hx_target="#page-content",
|
hx_target="#page-content",
|
||||||
hx_swap="innerHTML",
|
hx_swap="innerHTML",
|
||||||
hx_vals='js:{ "category": "solution", "action": "visible", "renderer": '
|
hx_vals='js:{ "category": "solution", "action": "visible", "renderer": '
|
||||||
@@ -595,7 +596,7 @@ def Plan(eos_host: str, eos_port: Union[str, int], data: Optional[dict] = None)
|
|||||||
result.raise_for_status()
|
result.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as err:
|
except requests.exceptions.HTTPError as err:
|
||||||
detail = result.json()["detail"]
|
detail = result.json()["detail"]
|
||||||
return Error(f"Can not retrieve configuration from {server}: {err}, {detail}")
|
return Error(f"Can not retrieve configuration from {server}: {err},\n{detail}")
|
||||||
eosstatus.eos_config = SettingsEOS(**result.json())
|
eosstatus.eos_config = SettingsEOS(**result.json())
|
||||||
|
|
||||||
# Get the optimization solution
|
# Get the optimization solution
|
||||||
@@ -607,7 +608,7 @@ def Plan(eos_host: str, eos_port: Union[str, int], data: Optional[dict] = None)
|
|||||||
solution_json = result.json()
|
solution_json = result.json()
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
detail = result.json()["detail"]
|
detail = result.json()["detail"]
|
||||||
warning_msg = f"Can not retrieve optimization solution from {server}: {e}, {detail}"
|
warning_msg = f"Can not retrieve optimization solution from {server}: {e},\n{detail}"
|
||||||
logger.warning(warning_msg)
|
logger.warning(warning_msg)
|
||||||
return Error(warning_msg)
|
return Error(warning_msg)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -623,7 +624,7 @@ def Plan(eos_host: str, eos_port: Union[str, int], data: Optional[dict] = None)
|
|||||||
plan_json = result.json()
|
plan_json = result.json()
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
detail = result.json()["detail"]
|
detail = result.json()["detail"]
|
||||||
warning_msg = f"Can not retrieve plan from {server}: {e}, {detail}"
|
warning_msg = f"Can not retrieve plan from {server}: {e},\n{detail}"
|
||||||
logger.warning(warning_msg)
|
logger.warning(warning_msg)
|
||||||
return Error(warning_msg)
|
return Error(warning_msg)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from pathlib import Path
|
|
||||||
from typing import Annotated, Any, AsyncGenerator, Dict, List, Optional, Union
|
from typing import Annotated, Any, AsyncGenerator, Dict, List, Optional, Union
|
||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
@@ -33,7 +32,7 @@ from akkudoktoreos.core.emplan import EnergyManagementPlan, ResourceStatus
|
|||||||
from akkudoktoreos.core.ems import get_ems
|
from akkudoktoreos.core.ems import get_ems
|
||||||
from akkudoktoreos.core.emsettings import EnergyManagementMode
|
from akkudoktoreos.core.emsettings import EnergyManagementMode
|
||||||
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
||||||
from akkudoktoreos.core.logging import read_file_log, track_logging_config
|
from akkudoktoreos.core.logging import logging_track_config, read_file_log
|
||||||
from akkudoktoreos.core.pydantic import (
|
from akkudoktoreos.core.pydantic import (
|
||||||
PydanticBaseModel,
|
PydanticBaseModel,
|
||||||
PydanticDateTimeData,
|
PydanticDateTimeData,
|
||||||
@@ -54,11 +53,13 @@ from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
|||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
from akkudoktoreos.prediction.prediction import get_prediction
|
||||||
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||||
from akkudoktoreos.server.rest.error import create_error_page
|
from akkudoktoreos.server.rest.error import create_error_page
|
||||||
|
from akkudoktoreos.server.rest.starteosdash import run_eosdash_supervisor
|
||||||
from akkudoktoreos.server.rest.tasks import repeat_every
|
from akkudoktoreos.server.rest.tasks import repeat_every
|
||||||
from akkudoktoreos.server.server import (
|
from akkudoktoreos.server.server import (
|
||||||
|
drop_root_privileges,
|
||||||
|
fix_data_directories_permissions,
|
||||||
get_default_host,
|
get_default_host,
|
||||||
get_host_ip,
|
get_host_ip,
|
||||||
validate_ip_or_hostname,
|
|
||||||
wait_for_port_free,
|
wait_for_port_free,
|
||||||
)
|
)
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||||
@@ -70,15 +71,18 @@ prediction_eos = get_prediction()
|
|||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
resource_registry_eos = get_resource_registry()
|
resource_registry_eos = get_resource_registry()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------
|
# ------------------------------------
|
||||||
# Logging configuration at import time
|
# Logging configuration at import time
|
||||||
# ------------------------------------
|
# ------------------------------------
|
||||||
|
|
||||||
logger.remove()
|
logger.remove()
|
||||||
track_logging_config(config_eos, "logging", None, None)
|
logging_track_config(config_eos, "logging", None, None)
|
||||||
config_eos.track_nested_value("/logging", track_logging_config)
|
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Configuration change tracking
|
||||||
|
# -----------------------------
|
||||||
|
|
||||||
|
config_eos.track_nested_value("/logging", logging_track_config)
|
||||||
|
|
||||||
# ----------------------------
|
# ----------------------------
|
||||||
# Safe argparse at import time
|
# Safe argparse at import time
|
||||||
@@ -114,6 +118,11 @@ parser.add_argument(
|
|||||||
default=None,
|
default=None,
|
||||||
help="Enable or disable automatic EOSdash startup. Options: True or False (default: value from config)",
|
help="Enable or disable automatic EOSdash startup. Options: True or False (default: value from config)",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--run_as_user",
|
||||||
|
type=str,
|
||||||
|
help="The unprivileged user account the EOS server shall switch to after performing root-level startup tasks.",
|
||||||
|
)
|
||||||
|
|
||||||
# Command line arguments
|
# Command line arguments
|
||||||
args: argparse.Namespace
|
args: argparse.Namespace
|
||||||
@@ -137,7 +146,7 @@ if args and args.log_level is not None:
|
|||||||
# Ensure log_level from command line is in config settings
|
# Ensure log_level from command line is in config settings
|
||||||
if log_level in LOGGING_LEVELS:
|
if log_level in LOGGING_LEVELS:
|
||||||
# Setup console logging level using nested value
|
# Setup console logging level using nested value
|
||||||
# - triggers logging configuration by track_logging_config
|
# - triggers logging configuration by logging_track_config
|
||||||
config_eos.set_nested_value("logging/console_level", log_level)
|
config_eos.set_nested_value("logging/console_level", log_level)
|
||||||
logger.debug(f"logging/console_level configuration set by argument to {log_level}")
|
logger.debug(f"logging/console_level configuration set by argument to {log_level}")
|
||||||
|
|
||||||
@@ -188,105 +197,6 @@ if config_eos.server.startup_eosdash:
|
|||||||
config_eos.set_nested_value("server/eosdash_port", port + 1)
|
config_eos.set_nested_value("server/eosdash_port", port + 1)
|
||||||
|
|
||||||
|
|
||||||
# ----------------------
|
|
||||||
# EOSdash server startup
|
|
||||||
# ----------------------
|
|
||||||
|
|
||||||
|
|
||||||
def start_eosdash(
|
|
||||||
host: str,
|
|
||||||
port: int,
|
|
||||||
eos_host: str,
|
|
||||||
eos_port: int,
|
|
||||||
log_level: str,
|
|
||||||
access_log: bool,
|
|
||||||
reload: bool,
|
|
||||||
eos_dir: str,
|
|
||||||
eos_config_dir: str,
|
|
||||||
) -> subprocess.Popen:
|
|
||||||
"""Start the EOSdash server as a subprocess.
|
|
||||||
|
|
||||||
This function starts the EOSdash server by launching it as a subprocess. It checks if the server
|
|
||||||
is already running on the specified port and either returns the existing process or starts a new
|
|
||||||
one.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
host (str): The hostname for the EOSdash server.
|
|
||||||
port (int): The port for the EOSdash server.
|
|
||||||
eos_host (str): The hostname for the EOS server.
|
|
||||||
eos_port (int): The port for the EOS server.
|
|
||||||
log_level (str): The logging level for the EOSdash server.
|
|
||||||
access_log (bool): Flag to enable or disable access logging.
|
|
||||||
reload (bool): Flag to enable or disable auto-reloading.
|
|
||||||
eos_dir (str): Path to the EOS data directory.
|
|
||||||
eos_config_dir (str): Path to the EOS configuration directory.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
subprocess.Popen: The process of the EOSdash server.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
RuntimeError: If the EOSdash server fails to start.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
validate_ip_or_hostname(host)
|
|
||||||
validate_ip_or_hostname(eos_host)
|
|
||||||
except Exception as ex:
|
|
||||||
error_msg = f"Could not start EOSdash: {ex}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise RuntimeError(error_msg)
|
|
||||||
|
|
||||||
eosdash_path = Path(__file__).parent.resolve().joinpath("eosdash.py")
|
|
||||||
|
|
||||||
# Do a one time check for port free to generate warnings if not so
|
|
||||||
wait_for_port_free(port, timeout=0, waiting_app_name="EOSdash")
|
|
||||||
|
|
||||||
cmd = [
|
|
||||||
sys.executable,
|
|
||||||
"-m",
|
|
||||||
"akkudoktoreos.server.eosdash",
|
|
||||||
"--host",
|
|
||||||
str(host),
|
|
||||||
"--port",
|
|
||||||
str(port),
|
|
||||||
"--eos-host",
|
|
||||||
str(eos_host),
|
|
||||||
"--eos-port",
|
|
||||||
str(eos_port),
|
|
||||||
"--log_level",
|
|
||||||
log_level,
|
|
||||||
"--access_log",
|
|
||||||
str(access_log),
|
|
||||||
"--reload",
|
|
||||||
str(reload),
|
|
||||||
]
|
|
||||||
# Set environment before any subprocess run, to keep custom config dir
|
|
||||||
env = os.environ.copy()
|
|
||||||
env["EOS_DIR"] = eos_dir
|
|
||||||
env["EOS_CONFIG_DIR"] = eos_config_dir
|
|
||||||
|
|
||||||
try:
|
|
||||||
server_process = subprocess.Popen( # noqa: S603
|
|
||||||
cmd,
|
|
||||||
env=env,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
start_new_session=True,
|
|
||||||
)
|
|
||||||
logger.info(f"Started EOSdash with '{cmd}'.")
|
|
||||||
except subprocess.CalledProcessError as ex:
|
|
||||||
error_msg = f"Could not start EOSdash: {ex}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise RuntimeError(error_msg)
|
|
||||||
|
|
||||||
# Check EOSdash is still running
|
|
||||||
if server_process.poll() is not None:
|
|
||||||
error_msg = f"EOSdash finished immediatedly with code: {server_process.returncode}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise RuntimeError(error_msg)
|
|
||||||
|
|
||||||
return server_process
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------------
|
# ----------------------
|
||||||
# EOS REST Server
|
# EOS REST Server
|
||||||
# ----------------------
|
# ----------------------
|
||||||
@@ -389,41 +299,7 @@ async def server_shutdown_task() -> None:
|
|||||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||||
"""Lifespan manager for the app."""
|
"""Lifespan manager for the app."""
|
||||||
# On startup
|
# On startup
|
||||||
if config_eos.server.startup_eosdash:
|
asyncio.create_task(run_eosdash_supervisor())
|
||||||
try:
|
|
||||||
if (
|
|
||||||
config_eos.server.eosdash_host is None
|
|
||||||
or config_eos.server.eosdash_port is None
|
|
||||||
or config_eos.server.host is None
|
|
||||||
or config_eos.server.port is None
|
|
||||||
):
|
|
||||||
raise ValueError(
|
|
||||||
f"Invalid configuration for EOSdash server startup.\n"
|
|
||||||
f"- server/startup_eosdash: {config_eos.server.startup_eosdash}\n"
|
|
||||||
f"- server/eosdash_host: {config_eos.server.eosdash_host}\n"
|
|
||||||
f"- server/eosdash_port: {config_eos.server.eosdash_port}\n"
|
|
||||||
f"- server/host: {config_eos.server.host}\n"
|
|
||||||
f"- server/port: {config_eos.server.port}"
|
|
||||||
)
|
|
||||||
|
|
||||||
log_level = (
|
|
||||||
config_eos.logging.console_level if config_eos.logging.console_level else "info"
|
|
||||||
)
|
|
||||||
|
|
||||||
eosdash_process = start_eosdash(
|
|
||||||
host=str(config_eos.server.eosdash_host),
|
|
||||||
port=config_eos.server.eosdash_port,
|
|
||||||
eos_host=str(config_eos.server.host),
|
|
||||||
eos_port=config_eos.server.port,
|
|
||||||
log_level=log_level,
|
|
||||||
access_log=True,
|
|
||||||
reload=False,
|
|
||||||
eos_dir=str(config_eos.general.data_folder_path),
|
|
||||||
eos_config_dir=str(config_eos.general.config_folder_path),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to start EOSdash server. Error: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
load_eos_state()
|
load_eos_state()
|
||||||
|
|
||||||
@@ -606,7 +482,7 @@ async def fastapi_admin_server_shutdown_post() -> dict:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/health")
|
@app.get("/v1/health", tags=["health"])
|
||||||
def fastapi_health_get(): # type: ignore
|
def fastapi_health_get(): # type: ignore
|
||||||
"""Health check endpoint to verify that the EOS server is alive."""
|
"""Health check endpoint to verify that the EOS server is alive."""
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
@@ -1190,7 +1066,7 @@ def fastapi_energy_management_optimization_solution_get() -> OptimizationSolutio
|
|||||||
if solution is None:
|
if solution is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=404,
|
status_code=404,
|
||||||
detail="Can not get the optimization solution. Did you configure automatic optimization?",
|
detail="Can not get the optimization solution.\nDid you configure automatic optimization?",
|
||||||
)
|
)
|
||||||
return solution
|
return solution
|
||||||
|
|
||||||
@@ -1202,7 +1078,7 @@ def fastapi_energy_management_plan_get() -> EnergyManagementPlan:
|
|||||||
if plan is None:
|
if plan is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=404,
|
status_code=404,
|
||||||
detail="Can not get the energy management plan. Did you configure automatic optimization?",
|
detail="Can not get the energy management plan.\nDid you configure automatic optimization?",
|
||||||
)
|
)
|
||||||
return plan
|
return plan
|
||||||
|
|
||||||
@@ -1256,7 +1132,7 @@ async def fastapi_strompreis() -> list[float]:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=404,
|
status_code=404,
|
||||||
detail=f"Can not get the electricity price forecast: {e}. Did you configure the electricity price forecast provider?",
|
detail=f"Can not get the electricity price forecast: {e}.\nDid you configure the electricity price forecast provider?",
|
||||||
)
|
)
|
||||||
|
|
||||||
return elecprice
|
return elecprice
|
||||||
@@ -1360,7 +1236,7 @@ async def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=404,
|
status_code=404,
|
||||||
detail=f"Can not get the total load forecast: {e}. Did you configure the load forecast provider?",
|
detail=f"Can not get the total load forecast: {e}.\nDid you configure the load forecast provider?",
|
||||||
)
|
)
|
||||||
|
|
||||||
return prediction_list
|
return prediction_list
|
||||||
@@ -1421,7 +1297,7 @@ async def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=404,
|
status_code=404,
|
||||||
detail=f"Can not get the total load forecast: {e}. Did you configure the load forecast provider?",
|
detail=f"Can not get the total load forecast: {e}.\nDid you configure the load forecast provider?",
|
||||||
)
|
)
|
||||||
|
|
||||||
return prediction_list
|
return prediction_list
|
||||||
@@ -1616,6 +1492,17 @@ def run_eos() -> None:
|
|||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
|
if args:
|
||||||
|
run_as_user = args.run_as_user
|
||||||
|
else:
|
||||||
|
run_as_user = None
|
||||||
|
|
||||||
|
# Switch data directories ownership to user
|
||||||
|
fix_data_directories_permissions(run_as_user=run_as_user)
|
||||||
|
|
||||||
|
# Switch privileges to run_as_user
|
||||||
|
drop_root_privileges(run_as_user=run_as_user)
|
||||||
|
|
||||||
# Wait for EOS port to be free - e.g. in case of restart
|
# Wait for EOS port to be free - e.g. in case of restart
|
||||||
wait_for_port_free(port, timeout=120, waiting_app_name="EOS")
|
wait_for_port_free(port, timeout=120, waiting_app_name="EOS")
|
||||||
|
|
||||||
@@ -1628,6 +1515,8 @@ def run_eos() -> None:
|
|||||||
log_level="info", # Fix log level for uvicorn to info
|
log_level="info", # Fix log level for uvicorn to info
|
||||||
access_log=True, # Fix server access logging to True
|
access_log=True, # Fix server access logging to True
|
||||||
reload=reload,
|
reload=reload,
|
||||||
|
proxy_headers=True,
|
||||||
|
forwarded_allow_ips="*",
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception("Failed to start uvicorn server.")
|
logger.exception("Failed to start uvicorn server.")
|
||||||
|
|||||||
@@ -6,25 +6,37 @@ from pathlib import Path
|
|||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from fasthtml.common import FileResponse, JSONResponse
|
from fasthtml.common import Base, FileResponse, JSONResponse
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from monsterui.core import FastHTML, Theme
|
from monsterui.core import FastHTML, Theme
|
||||||
|
from starlette.middleware import Middleware
|
||||||
|
from starlette.requests import Request
|
||||||
|
|
||||||
from akkudoktoreos.config.config import get_config
|
from akkudoktoreos.config.config import get_config
|
||||||
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
from akkudoktoreos.core.logabc import LOGGING_LEVELS
|
||||||
from akkudoktoreos.core.logging import track_logging_config
|
from akkudoktoreos.core.logging import logging_track_config
|
||||||
from akkudoktoreos.core.version import __version__
|
from akkudoktoreos.core.version import __version__
|
||||||
from akkudoktoreos.server.dash.about import About
|
|
||||||
|
|
||||||
# Pages
|
# Pages
|
||||||
|
from akkudoktoreos.server.dash.about import About
|
||||||
from akkudoktoreos.server.dash.admin import Admin
|
from akkudoktoreos.server.dash.admin import Admin
|
||||||
|
|
||||||
|
# helpers
|
||||||
from akkudoktoreos.server.dash.bokeh import BokehJS
|
from akkudoktoreos.server.dash.bokeh import BokehJS
|
||||||
from akkudoktoreos.server.dash.components import Page
|
from akkudoktoreos.server.dash.components import Page
|
||||||
from akkudoktoreos.server.dash.configuration import ConfigKeyUpdate, Configuration
|
from akkudoktoreos.server.dash.configuration import Configuration
|
||||||
|
from akkudoktoreos.server.dash.context import (
|
||||||
|
IngressMiddleware,
|
||||||
|
safe_asset_path,
|
||||||
|
)
|
||||||
from akkudoktoreos.server.dash.footer import Footer
|
from akkudoktoreos.server.dash.footer import Footer
|
||||||
from akkudoktoreos.server.dash.plan import Plan
|
from akkudoktoreos.server.dash.plan import Plan
|
||||||
from akkudoktoreos.server.dash.prediction import Prediction
|
from akkudoktoreos.server.dash.prediction import Prediction
|
||||||
from akkudoktoreos.server.server import get_default_host, wait_for_port_free
|
from akkudoktoreos.server.server import (
|
||||||
|
drop_root_privileges,
|
||||||
|
get_default_host,
|
||||||
|
wait_for_port_free,
|
||||||
|
)
|
||||||
from akkudoktoreos.utils.stringutil import str2bool
|
from akkudoktoreos.utils.stringutil import str2bool
|
||||||
|
|
||||||
config_eos = get_config()
|
config_eos = get_config()
|
||||||
@@ -35,8 +47,8 @@ config_eos = get_config()
|
|||||||
# ------------------------------------
|
# ------------------------------------
|
||||||
|
|
||||||
logger.remove()
|
logger.remove()
|
||||||
track_logging_config(config_eos, "logging", None, None)
|
logging_track_config(config_eos, "logging", None, None)
|
||||||
config_eos.track_nested_value("/logging", track_logging_config)
|
config_eos.track_nested_value("/logging", logging_track_config)
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------
|
# ----------------------------
|
||||||
@@ -83,6 +95,12 @@ parser.add_argument(
|
|||||||
default=False,
|
default=False,
|
||||||
help="Enable or disable auto-reload. Useful for development. Options: True or False (default: False)",
|
help="Enable or disable auto-reload. Useful for development. Options: True or False (default: False)",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--run_as_user",
|
||||||
|
type=str,
|
||||||
|
help="The unprivileged user account the EOSdash server shall run if started in root-level.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Command line arguments
|
# Command line arguments
|
||||||
args: argparse.Namespace
|
args: argparse.Namespace
|
||||||
@@ -110,7 +128,7 @@ else:
|
|||||||
# Ensure log_level from command line is in config settings
|
# Ensure log_level from command line is in config settings
|
||||||
if config_eosdash["log_level"] in LOGGING_LEVELS:
|
if config_eosdash["log_level"] in LOGGING_LEVELS:
|
||||||
# Setup console logging level using nested value
|
# Setup console logging level using nested value
|
||||||
# - triggers logging configuration by track_logging_config
|
# - triggers logging configuration by logging_track_config
|
||||||
config_eos.set_nested_value("logging/console_level", config_eosdash["log_level"])
|
config_eos.set_nested_value("logging/console_level", config_eosdash["log_level"])
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"logging/console_level configuration set by argument to {config_eosdash['log_level']}"
|
f"logging/console_level configuration set by argument to {config_eosdash['log_level']}"
|
||||||
@@ -180,9 +198,11 @@ hdrs = (
|
|||||||
|
|
||||||
# The EOSdash application
|
# The EOSdash application
|
||||||
app: FastHTML = FastHTML(
|
app: FastHTML = FastHTML(
|
||||||
title="EOSdash",
|
title="EOSdash", # Default page title
|
||||||
hdrs=hdrs,
|
hdrs=hdrs, # Additional FT elements to add to <HEAD>
|
||||||
secret_key=os.getenv("EOS_SERVER__EOSDASH_SESSKEY"),
|
# htmx=True, # Include HTMX header?
|
||||||
|
middleware=[Middleware(IngressMiddleware)],
|
||||||
|
secret_key=os.getenv("EOS_SERVER__EOSDASH_SESSKEY"), # Signing key for sessions
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -199,37 +219,60 @@ def eos_server() -> tuple[str, int]:
|
|||||||
return config_eosdash["eos_host"], config_eosdash["eos_port"]
|
return config_eosdash["eos_host"], config_eosdash["eos_port"]
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------
|
||||||
|
# Routes
|
||||||
|
# -------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
@app.get("/favicon.ico")
|
@app.get("/favicon.ico")
|
||||||
def get_eosdash_favicon(): # type: ignore
|
def get_eosdash_favicon(request: Request): # type: ignore
|
||||||
"""Get favicon."""
|
"""Get the EOSdash favicon.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FileResponse: The favicon file.
|
||||||
|
"""
|
||||||
return FileResponse(path=favicon_filepath)
|
return FileResponse(path=favicon_filepath)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/")
|
@app.get("/")
|
||||||
def get_eosdash(): # type: ignore
|
def get_eosdash(request: Request): # type: ignore
|
||||||
"""Serves the main EOSdash page.
|
"""Serve the main EOSdash page with navigation links.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Page: The main dashboard page with navigation links and footer.
|
Page: The main dashboard page with navigation links and footer.
|
||||||
"""
|
"""
|
||||||
return Page(
|
root_path: str = request.scope.get("root_path", "")
|
||||||
None,
|
|
||||||
{
|
return (
|
||||||
"Plan": "/eosdash/plan",
|
Base(href=f"{root_path}/") if root_path else None,
|
||||||
"Prediction": "/eosdash/prediction",
|
Page(
|
||||||
"Config": "/eosdash/configuration",
|
None,
|
||||||
"Admin": "/eosdash/admin",
|
{
|
||||||
"About": "/eosdash/about",
|
"Plan": "/eosdash/plan",
|
||||||
},
|
"Prediction": "/eosdash/prediction",
|
||||||
About(),
|
"Config": "/eosdash/configuration",
|
||||||
Footer(*eos_server()),
|
"Admin": "/eosdash/admin",
|
||||||
"/eosdash/footer",
|
"About": "/eosdash/about",
|
||||||
|
},
|
||||||
|
About(),
|
||||||
|
Footer(*eos_server()),
|
||||||
|
"/eosdash/footer",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/footer")
|
@app.get("/eosdash/footer")
|
||||||
def get_eosdash_footer(): # type: ignore
|
def get_eosdash_footer(request: Request): # type: ignore
|
||||||
"""Serves the EOSdash Foooter information.
|
"""Serve the EOSdash Footer information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Footer: The Footer component.
|
Footer: The Footer component.
|
||||||
@@ -238,8 +281,11 @@ def get_eosdash_footer(): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/about")
|
@app.get("/eosdash/about")
|
||||||
def get_eosdash_about(): # type: ignore
|
def get_eosdash_about(request: Request): # type: ignore
|
||||||
"""Serves the EOSdash About page.
|
"""Serve the EOSdash About page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
About: The About page component.
|
About: The About page component.
|
||||||
@@ -248,8 +294,11 @@ def get_eosdash_about(): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/admin")
|
@app.get("/eosdash/admin")
|
||||||
def get_eosdash_admin(): # type: ignore
|
def get_eosdash_admin(request: Request): # type: ignore
|
||||||
"""Serves the EOSdash Admin page.
|
"""Serve the EOSdash Admin page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Admin: The Admin page component.
|
Admin: The Admin page component.
|
||||||
@@ -258,10 +307,12 @@ def get_eosdash_admin(): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.post("/eosdash/admin")
|
@app.post("/eosdash/admin")
|
||||||
def post_eosdash_admin(data: dict): # type: ignore
|
def post_eosdash_admin(request: Request, data: dict): # type: ignore
|
||||||
"""Provide control data to the Admin page.
|
"""Provide control data to the Admin page.
|
||||||
|
|
||||||
This endpoint is called from within the Admin page on user actions.
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
data (dict): User-submitted data from the Admin page.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Admin: The Admin page component.
|
Admin: The Admin page component.
|
||||||
@@ -270,8 +321,11 @@ def post_eosdash_admin(data: dict): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/configuration")
|
@app.get("/eosdash/configuration")
|
||||||
def get_eosdash_configuration(): # type: ignore
|
def get_eosdash_configuration(request: Request): # type: ignore
|
||||||
"""Serves the EOSdash Configuration page.
|
"""Serve the EOSdash Configuration page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Configuration: The Configuration page component.
|
Configuration: The Configuration page component.
|
||||||
@@ -280,13 +334,40 @@ def get_eosdash_configuration(): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.put("/eosdash/configuration")
|
@app.put("/eosdash/configuration")
|
||||||
def put_eosdash_configuration(data: dict): # type: ignore
|
def put_eosdash_configuration(request: Request, data: dict): # type: ignore
|
||||||
return ConfigKeyUpdate(*eos_server(), data["key"], data["value"])
|
"""Update a configuration key/value pair.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
data (dict): Dictionary containing 'key' and 'value' to trigger configuration update.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configuration: The Configuration page component with updated configuration.
|
||||||
|
"""
|
||||||
|
return Configuration(*eos_server(), data)
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/eosdash/configuration")
|
||||||
|
def post_eosdash_configuration(request: Request, data: dict): # type: ignore
|
||||||
|
"""Provide control data to the configuration page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
data (dict): User-submitted data from the configuration page.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configuration: The Configuration page component with updated configuration.
|
||||||
|
"""
|
||||||
|
return Configuration(*eos_server(), data)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/plan")
|
@app.get("/eosdash/plan")
|
||||||
def get_eosdash_plan(data: dict): # type: ignore
|
def get_eosdash_plan(request: Request, data: dict): # type: ignore
|
||||||
"""Serves the EOSdash Plan page.
|
"""Serve the EOSdash Plan page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
data (dict): Optional query data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Plan: The Plan page component.
|
Plan: The Plan page component.
|
||||||
@@ -295,10 +376,12 @@ def get_eosdash_plan(data: dict): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.post("/eosdash/plan")
|
@app.post("/eosdash/plan")
|
||||||
def post_eosdash_plan(data: dict): # type: ignore
|
def post_eosdash_plan(request: Request, data: dict): # type: ignore
|
||||||
"""Provide control data to the Plan page.
|
"""Provide control data to the Plan page.
|
||||||
|
|
||||||
This endpoint is called from within the Plan page on user actions.
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
data (dict): User-submitted data from the Plan page.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Plan: The Plan page component.
|
Plan: The Plan page component.
|
||||||
@@ -307,8 +390,12 @@ def post_eosdash_plan(data: dict): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/prediction")
|
@app.get("/eosdash/prediction")
|
||||||
def get_eosdash_prediction(data: dict): # type: ignore
|
def get_eosdash_prediction(request: Request, data: dict): # type: ignore
|
||||||
"""Serves the EOSdash Prediction page.
|
"""Serve the EOSdash Prediction page.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
data (dict): Optional query data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Prediction: The Prediction page component.
|
Prediction: The Prediction page component.
|
||||||
@@ -317,8 +404,15 @@ def get_eosdash_prediction(data: dict): # type: ignore
|
|||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/health")
|
@app.get("/eosdash/health")
|
||||||
def get_eosdash_health(): # type: ignore
|
def get_eosdash_health(request: Request): # type: ignore
|
||||||
"""Health check endpoint to verify that the EOSdash server is alive."""
|
"""Health check endpoint to verify the EOSdash server is alive.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
JSONResponse: Server status including PID and version.
|
||||||
|
"""
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
{
|
{
|
||||||
"status": "alive",
|
"status": "alive",
|
||||||
@@ -328,13 +422,37 @@ def get_eosdash_health(): # type: ignore
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/eosdash/assets/{fname:path}.{ext:static}")
|
@app.get("/eosdash/assets/{filepath:path}")
|
||||||
def get_eosdash_assets(fname: str, ext: str): # type: ignore
|
def get_eosdash_assets(request: Request, filepath: str): # type: ignore
|
||||||
"""Get assets."""
|
"""Serve static assets for EOSdash safely.
|
||||||
asset_filepath = Path(__file__).parent.joinpath(f"dash/assets/{fname}.{ext}")
|
|
||||||
|
Args:
|
||||||
|
request (Request): The incoming FastHTML request.
|
||||||
|
filepath (str): Relative path of the asset under dash/assets/.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FileResponse: The requested asset file if it exists.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
404: If the file does not exist.
|
||||||
|
403: If the file path is forbidden (directory traversal attempt).
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
asset_filepath = safe_asset_path(filepath)
|
||||||
|
except ValueError:
|
||||||
|
return {"error": "Forbidden"}, 403
|
||||||
|
|
||||||
|
if not asset_filepath.exists() or not asset_filepath.is_file():
|
||||||
|
return {"error": "File not found"}, 404
|
||||||
|
|
||||||
return FileResponse(path=asset_filepath)
|
return FileResponse(path=asset_filepath)
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------
|
||||||
|
# Run the EOSdash server
|
||||||
|
# ----------------------
|
||||||
|
|
||||||
|
|
||||||
def run_eosdash() -> None:
|
def run_eosdash() -> None:
|
||||||
"""Run the EOSdash server with the specified configurations.
|
"""Run the EOSdash server with the specified configurations.
|
||||||
|
|
||||||
@@ -348,6 +466,14 @@ def run_eosdash() -> None:
|
|||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
|
if args:
|
||||||
|
run_as_user = args.run_as_user
|
||||||
|
else:
|
||||||
|
run_as_user = None
|
||||||
|
|
||||||
|
# Drop root privileges if running as root
|
||||||
|
drop_root_privileges(run_as_user=run_as_user)
|
||||||
|
|
||||||
# Wait for EOSdash port to be free - e.g. in case of restart
|
# Wait for EOSdash port to be free - e.g. in case of restart
|
||||||
wait_for_port_free(config_eosdash["eosdash_port"], timeout=120, waiting_app_name="EOSdash")
|
wait_for_port_free(config_eosdash["eosdash_port"], timeout=120, waiting_app_name="EOSdash")
|
||||||
|
|
||||||
@@ -359,6 +485,8 @@ def run_eosdash() -> None:
|
|||||||
log_level=config_eosdash["log_level"].lower(),
|
log_level=config_eosdash["log_level"].lower(),
|
||||||
access_log=config_eosdash["access_log"],
|
access_log=config_eosdash["access_log"],
|
||||||
reload=config_eosdash["reload"],
|
reload=config_eosdash["reload"],
|
||||||
|
proxy_headers=True,
|
||||||
|
forwarded_allow_ips="*",
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
|
|||||||
269
src/akkudoktoreos/server/rest/starteosdash.py
Normal file
269
src/akkudoktoreos/server/rest/starteosdash.py
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
from akkudoktoreos.server.server import (
|
||||||
|
validate_ip_or_hostname,
|
||||||
|
wait_for_port_free,
|
||||||
|
)
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
|
||||||
|
|
||||||
|
# Loguru to HA stdout
|
||||||
|
logger.add(sys.stdout, format="{time} | {level} | {message}", enqueue=True)
|
||||||
|
|
||||||
|
|
||||||
|
LOG_PATTERN = re.compile(
|
||||||
|
r"""
|
||||||
|
(?:(?P<timestamp>^\S+\s+\S+)\s*\|\s*)? # Optional timestamp
|
||||||
|
(?P<level>TRACE|DEBUG|INFO|WARNING|ERROR|CRITICAL)\s*\|\s* # Log level
|
||||||
|
(?:
|
||||||
|
(?P<file_path>[A-Za-z0-9_\-./]+) # Full file path or filename
|
||||||
|
:
|
||||||
|
(?P<line>\d+) # Line number
|
||||||
|
\s*\|\s*
|
||||||
|
)?
|
||||||
|
(?:(?P<function>[A-Za-z0-9_<>-]+)\s*\|\s*)? # Optional function name
|
||||||
|
(?P<msg>.*) # Message
|
||||||
|
""",
|
||||||
|
re.VERBOSE,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def forward_stream(stream: asyncio.StreamReader, prefix: str = "") -> None:
|
||||||
|
"""Continuously read log lines from a subprocess and re-log them via Loguru.
|
||||||
|
|
||||||
|
The function reads lines from an ``asyncio.StreamReader`` originating from a
|
||||||
|
subprocess (typically the subprocess's stdout or stderr), parses the log
|
||||||
|
metadata if present (log level, file path, line number, function), and
|
||||||
|
forwards the log entry to Loguru. If the line cannot be parsed, it is logged
|
||||||
|
as an ``INFO`` message with generic metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
stream (asyncio.StreamReader):
|
||||||
|
An asynchronous stream to read from, usually ``proc.stdout`` or
|
||||||
|
``proc.stderr`` from ``asyncio.create_subprocess_exec``.
|
||||||
|
prefix (str, optional):
|
||||||
|
A string prefix added to each forwarded log line. Useful for
|
||||||
|
distinguishing between multiple subprocess sources.
|
||||||
|
Defaults to an empty string.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- If the subprocess log line includes a file path (e.g.,
|
||||||
|
``/app/server/main.py:42``), both ``file.name`` and ``file.path`` will
|
||||||
|
be set accordingly in the forwarded Loguru log entry.
|
||||||
|
- If metadata cannot be extracted, fallback values
|
||||||
|
(``subprocess.py`` and ``/subprocess/subprocess.py``) are used.
|
||||||
|
- The function runs until ``stream`` reaches EOF.
|
||||||
|
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
line = await stream.readline()
|
||||||
|
if not line:
|
||||||
|
break # End of stream
|
||||||
|
|
||||||
|
raw = line.decode(errors="replace").rstrip()
|
||||||
|
match = LOG_PATTERN.search(raw)
|
||||||
|
|
||||||
|
if match:
|
||||||
|
data = match.groupdict()
|
||||||
|
|
||||||
|
level = data["level"] or "INFO"
|
||||||
|
message = data["msg"]
|
||||||
|
|
||||||
|
# ---- Extract file path and name ----
|
||||||
|
file_path = data["file_path"]
|
||||||
|
if file_path:
|
||||||
|
if "/" in file_path:
|
||||||
|
file_name = file_path.rsplit("/", 1)[1]
|
||||||
|
else:
|
||||||
|
file_name = file_path
|
||||||
|
else:
|
||||||
|
file_name = "subprocess.py"
|
||||||
|
file_path = f"/subprocess/{file_name}"
|
||||||
|
|
||||||
|
# ---- Extract function and line ----
|
||||||
|
func_name = data["function"] or "<subprocess>"
|
||||||
|
line_no = int(data["line"]) if data["line"] else 1
|
||||||
|
|
||||||
|
# ---- Patch logger with realistic metadata ----
|
||||||
|
patched = logger.patch(
|
||||||
|
lambda r: r.update(
|
||||||
|
{
|
||||||
|
"file": {
|
||||||
|
"name": file_name,
|
||||||
|
"path": file_path,
|
||||||
|
},
|
||||||
|
"line": line_no,
|
||||||
|
"function": func_name,
|
||||||
|
"name": "EOSdash",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
patched.log(level, f"{prefix}{message}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Fallback: unstructured log line
|
||||||
|
file_name = "subprocess.py"
|
||||||
|
file_path = f"/subprocess/{file_name}"
|
||||||
|
|
||||||
|
logger.patch(
|
||||||
|
lambda r: r.update(
|
||||||
|
{
|
||||||
|
"file": {
|
||||||
|
"name": file_name,
|
||||||
|
"path": file_path,
|
||||||
|
},
|
||||||
|
"line": 1,
|
||||||
|
"function": "<subprocess>",
|
||||||
|
"name": "EOSdash",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
).info(f"{prefix}{raw}")
|
||||||
|
|
||||||
|
|
||||||
|
async def run_eosdash_supervisor() -> None:
|
||||||
|
"""Starts EOSdash, pipes its logs, restarts it if it crashes.
|
||||||
|
|
||||||
|
Runs forever.
|
||||||
|
"""
|
||||||
|
eosdash_path = Path(__file__).parent.resolve().joinpath("eosdash.py")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
|
||||||
|
if not config_eos.server.startup_eosdash:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if (
|
||||||
|
config_eos.server.eosdash_host is None
|
||||||
|
or config_eos.server.eosdash_port is None
|
||||||
|
or config_eos.server.host is None
|
||||||
|
or config_eos.server.port is None
|
||||||
|
):
|
||||||
|
error_msg = (
|
||||||
|
f"Invalid configuration for EOSdash server startup.\n"
|
||||||
|
f"- server/eosdash_host: {config_eos.server.eosdash_host}\n"
|
||||||
|
f"- server/eosdash_port: {config_eos.server.eosdash_port}\n"
|
||||||
|
f"- server/host: {config_eos.server.host}\n"
|
||||||
|
f"- server/port: {config_eos.server.port}"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get all the parameters
|
||||||
|
host = str(config_eos.server.eosdash_host)
|
||||||
|
port = config_eos.server.eosdash_port
|
||||||
|
eos_host = str(config_eos.server.host)
|
||||||
|
eos_port = config_eos.server.port
|
||||||
|
access_log = True
|
||||||
|
reload = False
|
||||||
|
log_level = config_eos.logging.console_level if config_eos.logging.console_level else "info"
|
||||||
|
|
||||||
|
try:
|
||||||
|
validate_ip_or_hostname(host)
|
||||||
|
validate_ip_or_hostname(eos_host)
|
||||||
|
except Exception as ex:
|
||||||
|
error_msg = f"Could not start EOSdash: {ex}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if eos_host != host:
|
||||||
|
# EOSdash runs on a different server - we can not start.
|
||||||
|
error_msg = (
|
||||||
|
f"EOSdash server startup not possible on different hosts.\n"
|
||||||
|
f"- server/eosdash_host: {config_eos.server.eosdash_host}\n"
|
||||||
|
f"- server/host: {config_eos.server.host}"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Do a one time check for port free to generate warnings if not so
|
||||||
|
wait_for_port_free(port, timeout=0, waiting_app_name="EOSdash")
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"akkudoktoreos.server.eosdash",
|
||||||
|
"--host",
|
||||||
|
str(host),
|
||||||
|
"--port",
|
||||||
|
str(port),
|
||||||
|
"--eos-host",
|
||||||
|
str(eos_host),
|
||||||
|
"--eos-port",
|
||||||
|
str(eos_port),
|
||||||
|
"--log_level",
|
||||||
|
log_level,
|
||||||
|
"--access_log",
|
||||||
|
str(access_log),
|
||||||
|
"--reload",
|
||||||
|
str(reload),
|
||||||
|
]
|
||||||
|
# Set environment before any subprocess run, to keep custom config dir
|
||||||
|
eos_dir = str(config_eos.package_root_path)
|
||||||
|
eos_data_dir = str(config_eos.general.data_folder_path)
|
||||||
|
eos_config_dir = str(config_eos.general.config_folder_path)
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["EOS_DIR"] = eos_dir
|
||||||
|
env["EOS_DATA_DIR"] = eos_data_dir
|
||||||
|
env["EOS_CONFIG_DIR"] = eos_config_dir
|
||||||
|
|
||||||
|
logger.info("Starting EOSdash subprocess...")
|
||||||
|
|
||||||
|
# Start EOSdash server
|
||||||
|
try:
|
||||||
|
proc = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd, env=env, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
|
logger.error(
|
||||||
|
"Failed to start EOSdash: 'python' executable '{sys.executable}' not found."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
except PermissionError:
|
||||||
|
logger.error("Failed to start EOSdash: permission denied on 'eosdash.py'.")
|
||||||
|
continue
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.warning("EOSdash startup cancelled (shutdown?).")
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(f"Unexpected error launching EOSdash: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if proc.stdout is None:
|
||||||
|
logger.error("Failed to forward EOSdash output to EOS pipe.")
|
||||||
|
else:
|
||||||
|
# Forward log
|
||||||
|
asyncio.create_task(forward_stream(proc.stdout, prefix="[EOSdash] "))
|
||||||
|
|
||||||
|
if proc.stderr is None:
|
||||||
|
logger.error("Failed to forward EOSdash error output to EOS pipe.")
|
||||||
|
else:
|
||||||
|
# Forward log
|
||||||
|
asyncio.create_task(forward_stream(proc.stderr, prefix="[EOSdash-ERR] "))
|
||||||
|
|
||||||
|
# If we reach here, the subprocess started successfully
|
||||||
|
logger.info("EOSdash subprocess started successfully.")
|
||||||
|
|
||||||
|
# Wait for exit
|
||||||
|
try:
|
||||||
|
exit_code = await proc.wait()
|
||||||
|
logger.error(f"EOSdash exited with code {exit_code}")
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.warning("EOSdash wait cancelled (shutdown?).")
|
||||||
|
return
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(f"Error while waiting for EOSdash to terminate: {e}")
|
||||||
|
|
||||||
|
# Restart after a delay
|
||||||
|
logger.info("Restarting EOSdash...")
|
||||||
@@ -3,10 +3,10 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Any, Callable, Coroutine, Union
|
from typing import Any, Callable, Coroutine, Union
|
||||||
|
|
||||||
|
import loguru
|
||||||
from starlette.concurrency import run_in_threadpool
|
from starlette.concurrency import run_in_threadpool
|
||||||
|
|
||||||
NoArgsNoReturnFuncT = Callable[[], None]
|
NoArgsNoReturnFuncT = Callable[[], None]
|
||||||
@@ -37,7 +37,7 @@ def repeat_every(
|
|||||||
*,
|
*,
|
||||||
seconds: float,
|
seconds: float,
|
||||||
wait_first: float | None = None,
|
wait_first: float | None = None,
|
||||||
logger: logging.Logger | None = None,
|
logger: loguru.logger | None = None,
|
||||||
raise_exceptions: bool = False,
|
raise_exceptions: bool = False,
|
||||||
max_repetitions: int | None = None,
|
max_repetitions: int | None = None,
|
||||||
on_complete: NoArgsNoReturnAnyFuncT | None = None,
|
on_complete: NoArgsNoReturnAnyFuncT | None = None,
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
"""Server Module."""
|
"""Server Module."""
|
||||||
|
|
||||||
|
import grp
|
||||||
import ipaddress
|
import ipaddress
|
||||||
|
import os
|
||||||
|
import pwd
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
@@ -148,6 +151,179 @@ def wait_for_port_free(port: int, timeout: int = 0, waiting_app_name: str = "App
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def drop_root_privileges(run_as_user: Optional[str] = None) -> bool:
|
||||||
|
"""Drop root privileges and switch execution to a less privileged user.
|
||||||
|
|
||||||
|
This function transitions the running process from root (UID 0) to the
|
||||||
|
specified unprivileged user. It sets UID, GID, supplementary groups, and
|
||||||
|
updates environment variables to reflect the new user context.
|
||||||
|
|
||||||
|
If the process is not running as root, no privilege changes are made.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
run_as_user (str | None):
|
||||||
|
The name of the target user to switch to.
|
||||||
|
If ``None`` (default), the current effective user is used and
|
||||||
|
no privilege change is attempted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool:
|
||||||
|
``True`` if privileges were successfully dropped OR the process is
|
||||||
|
already running as the target user.
|
||||||
|
``False`` if privilege dropping failed.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- This must be called very early during startup, before opening files,
|
||||||
|
creating sockets, or starting threads.
|
||||||
|
- Dropping privileges is irreversible within the same process.
|
||||||
|
- The target user must exist inside the container (valid entry in
|
||||||
|
``/etc/passwd`` and ``/etc/group``).
|
||||||
|
"""
|
||||||
|
# Determine current user
|
||||||
|
current_user = pwd.getpwuid(os.geteuid()).pw_name
|
||||||
|
|
||||||
|
# No action needed if already running as the desired user
|
||||||
|
if run_as_user is None or run_as_user == current_user:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Cannot switch users unless running as root
|
||||||
|
if os.geteuid() != 0:
|
||||||
|
logger.error(
|
||||||
|
f"Privilege switch requested to '{run_as_user}' "
|
||||||
|
f"but process is not root (running as '{current_user}')."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Resolve target user info
|
||||||
|
try:
|
||||||
|
pw_record = pwd.getpwnam(run_as_user)
|
||||||
|
except KeyError:
|
||||||
|
logger.error(f"Privilege switch failed: user '{run_as_user}' does not exist.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
user_uid: int = pw_record.pw_uid
|
||||||
|
user_gid: int = pw_record.pw_gid
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get all groups where the user is listed as a member
|
||||||
|
supplementary_groups: list[int] = [
|
||||||
|
g.gr_gid for g in grp.getgrall() if run_as_user in g.gr_mem
|
||||||
|
]
|
||||||
|
|
||||||
|
# Ensure the primary group is included (it usually is NOT in gr_mem)
|
||||||
|
if user_gid not in supplementary_groups:
|
||||||
|
supplementary_groups.append(user_gid)
|
||||||
|
|
||||||
|
# Apply groups, gid, uid (in that order)
|
||||||
|
os.setgroups(supplementary_groups)
|
||||||
|
os.setgid(user_gid)
|
||||||
|
os.setuid(user_uid)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Privilege switch failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Update environment variables to reflect the new user identity
|
||||||
|
os.environ["HOME"] = pw_record.pw_dir
|
||||||
|
os.environ["LOGNAME"] = run_as_user
|
||||||
|
os.environ["USER"] = run_as_user
|
||||||
|
|
||||||
|
# Restrictive umask
|
||||||
|
os.umask(0o077)
|
||||||
|
|
||||||
|
# Verify that privilege drop was successful
|
||||||
|
if os.geteuid() != user_uid or os.getegid() != user_gid:
|
||||||
|
logger.error(
|
||||||
|
f"Privilege drop sanity check failed: now uid={os.geteuid()}, gid={os.getegid()}, "
|
||||||
|
f"expected uid={user_uid}, gid={user_gid}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Switched privileges to user '{run_as_user}' "
|
||||||
|
f"(uid={user_uid}, gid={user_gid}, groups={supplementary_groups})"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def fix_data_directories_permissions(run_as_user: Optional[str] = None) -> None:
|
||||||
|
"""Ensure correct ownership for data directories.
|
||||||
|
|
||||||
|
This function recursively updates the owner and group of the data directories and all of its
|
||||||
|
subdirectories and files so that they belong to the given user.
|
||||||
|
|
||||||
|
The function may require root privileges to change file ownership. It logs an error message
|
||||||
|
if a path ownership can not be updated.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
run_as_user (Optional[str]): The user who should own the data directories and files.
|
||||||
|
Defaults to current one.
|
||||||
|
"""
|
||||||
|
from akkudoktoreos.config.config import get_config
|
||||||
|
|
||||||
|
config_eos = get_config()
|
||||||
|
|
||||||
|
base_dirs = [
|
||||||
|
config_eos.general.data_folder_path,
|
||||||
|
config_eos.general.data_output_path,
|
||||||
|
config_eos.general.config_folder_path,
|
||||||
|
config_eos.cache.path(),
|
||||||
|
]
|
||||||
|
|
||||||
|
error_msg: Optional[str] = None
|
||||||
|
|
||||||
|
if run_as_user is None:
|
||||||
|
# Get current user - try to ensure current user can access the data directories
|
||||||
|
run_as_user = pwd.getpwuid(os.geteuid()).pw_name
|
||||||
|
|
||||||
|
try:
|
||||||
|
pw_record = pwd.getpwnam(run_as_user)
|
||||||
|
except KeyError as e:
|
||||||
|
error_msg = f"Data directories '{base_dirs}' permission fix failed: user '{run_as_user}' does not exist."
|
||||||
|
logger.error(error_msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
uid = pw_record.pw_uid
|
||||||
|
gid = pw_record.pw_gid
|
||||||
|
|
||||||
|
# Walk directory tree and fix permissions
|
||||||
|
for base_dir in base_dirs:
|
||||||
|
if base_dir is None:
|
||||||
|
continue
|
||||||
|
# ensure base dir exists
|
||||||
|
try:
|
||||||
|
base_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Could not setup data dir '{base_dir}': {e}")
|
||||||
|
continue
|
||||||
|
for root, dirs, files in os.walk(base_dir):
|
||||||
|
for name in dirs + files:
|
||||||
|
path = os.path.join(root, name)
|
||||||
|
try:
|
||||||
|
os.chown(path, uid, gid)
|
||||||
|
except PermissionError as e:
|
||||||
|
error_msg = f"Permission denied while updating ownership of '{path}' to user '{run_as_user}'"
|
||||||
|
logger.error(error_msg)
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = (
|
||||||
|
f"Updating ownership failed of '{path}' to user '{run_as_user}': {e}"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
# Also fix the base directory itself
|
||||||
|
try:
|
||||||
|
os.chown(base_dir, uid, gid)
|
||||||
|
except PermissionError as e:
|
||||||
|
error_msg = (
|
||||||
|
f"Permission denied while updating ownership of '{path}' to user '{run_as_user}'"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Updating ownership failed of '{path}' to user '{run_as_user}': {e}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
|
||||||
|
if error_msg is None:
|
||||||
|
logger.info(f"Updated ownership of '{base_dirs}' recursively to user '{run_as_user}'.")
|
||||||
|
|
||||||
|
|
||||||
class ServerCommonSettings(SettingsBaseModel):
|
class ServerCommonSettings(SettingsBaseModel):
|
||||||
"""Server Configuration."""
|
"""Server Configuration."""
|
||||||
|
|
||||||
|
|||||||
@@ -665,11 +665,14 @@ def to_time(
|
|||||||
- int (e.g. 14 → 14:00)
|
- int (e.g. 14 → 14:00)
|
||||||
- float (e.g. 14.5 → 14:30)
|
- float (e.g. 14.5 → 14:30)
|
||||||
- tuple like (14,), (14, 30), (14, 30, 15)
|
- tuple like (14,), (14, 30), (14, 30, 15)
|
||||||
|
|
||||||
in_timezone: Optional timezone name or object (e.g., "Europe/Berlin").
|
in_timezone: Optional timezone name or object (e.g., "Europe/Berlin").
|
||||||
Defaults to the local timezone.
|
Defaults to the local timezone.
|
||||||
|
|
||||||
to_naive: If True, return a timezone-naive Time object.
|
to_naive: If True, return a timezone-naive Time object.
|
||||||
|
|
||||||
as_string: If True, return time as "HH:mm:ss ZZ".
|
as_string: If True, return time as "HH:mm:ss ZZ".
|
||||||
If a format string is provided, it's passed to `pendulum.Time.format()`.
|
If a format string is provided, it's passed to `pendulum.Time.format()`.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Time or str: A time object or its formatted string.
|
Time or str: A time object or its formatted string.
|
||||||
@@ -1637,106 +1640,233 @@ def to_datetime(
|
|||||||
return dt
|
return dt
|
||||||
|
|
||||||
|
|
||||||
|
# to duration helper
|
||||||
|
def duration_to_iso8601(duration: pendulum.Duration) -> str:
|
||||||
|
"""Convert pendulum.Duration to ISO-8601 duration string."""
|
||||||
|
total_seconds = int(duration.total_seconds())
|
||||||
|
|
||||||
|
days, rem = divmod(total_seconds, 86400)
|
||||||
|
hours, rem = divmod(rem, 3600)
|
||||||
|
minutes, seconds = divmod(rem, 60)
|
||||||
|
|
||||||
|
parts = ["P"]
|
||||||
|
if days:
|
||||||
|
parts.append(f"{days}D")
|
||||||
|
|
||||||
|
time_parts = []
|
||||||
|
if hours:
|
||||||
|
time_parts.append(f"{hours}H")
|
||||||
|
if minutes:
|
||||||
|
time_parts.append(f"{minutes}M")
|
||||||
|
if seconds:
|
||||||
|
time_parts.append(f"{seconds}S")
|
||||||
|
|
||||||
|
if time_parts:
|
||||||
|
parts.append("T")
|
||||||
|
parts.extend(time_parts)
|
||||||
|
elif len(parts) == 1: # zero duration
|
||||||
|
parts.append("T0S")
|
||||||
|
|
||||||
|
return "".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
def to_duration(
|
def to_duration(
|
||||||
input_value: Union[
|
input_value: Union[
|
||||||
Duration, datetime.timedelta, str, int, float, Tuple[int, int, int, int], List[int]
|
Duration, datetime.timedelta, str, int, float, Tuple[int, int, int, int], List[int]
|
||||||
],
|
],
|
||||||
) -> Duration:
|
as_string: Literal[False] | None = None,
|
||||||
"""Converts various input types into a Duration object using pendulum.
|
) -> Duration: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def to_duration(
|
||||||
|
input_value: Union[
|
||||||
|
Duration, datetime.timedelta, str, int, float, Tuple[int, int, int, int], List[int]
|
||||||
|
],
|
||||||
|
as_string: str | Literal[True] = True,
|
||||||
|
) -> str: ...
|
||||||
|
|
||||||
|
|
||||||
|
def to_duration(
|
||||||
|
input_value: Union[
|
||||||
|
Duration, datetime.timedelta, str, int, float, Tuple[int, int, int, int], List[int]
|
||||||
|
],
|
||||||
|
as_string: Optional[Union[str, bool]] = None,
|
||||||
|
) -> Union[Duration, str]:
|
||||||
|
"""Converts various input types into a `pendulum.Duration` or a formatted duration string.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
input_value (Union[Duration, timedelta, str, int, float, tuple, list]): Input to be converted
|
input_value (Union[Duration, timedelta, str, int, float, tuple, list]):
|
||||||
into a timedelta:
|
The input value to convert into a duration.
|
||||||
- str: A duration string like "2 days", "5 hours", "30 minutes", or a combination.
|
Supported types include:
|
||||||
- int/float: Number representing seconds.
|
|
||||||
- tuple/list: A tuple or list in the format (days, hours, minutes, seconds).
|
- `pendulum.Duration`: Returned unchanged unless formatting is requested.
|
||||||
|
- `datetime.timedelta`: Converted based on total seconds.
|
||||||
|
- `str`: A duration expression (e.g., `"15 minutes"`, `"2 hours"`),
|
||||||
|
or a string parsed by Pendulum.
|
||||||
|
- `int` or `float`: Interpreted as a number of seconds.
|
||||||
|
- `tuple` or `list`: Must be `(days, hours, minutes, seconds)`.
|
||||||
|
|
||||||
|
as_string (Optional[Union[str, bool]]):
|
||||||
|
Controls the output format of the returned duration:
|
||||||
|
|
||||||
|
- `None` or `False` (default):
|
||||||
|
Returns a `pendulum.Duration` object.
|
||||||
|
- `True`:
|
||||||
|
Returns an ISO-8601 duration string (e.g., `"PT15M"`).
|
||||||
|
- `"human"`:
|
||||||
|
Returns a human-readable form (e.g., `"15 minutes"`).
|
||||||
|
- `"pandas"`:
|
||||||
|
Returns a Pandas frequency string such as:
|
||||||
|
- `"1h"` for 1 hour
|
||||||
|
- `"15min"` for 15 minutes
|
||||||
|
- `"900s"` for 900 seconds
|
||||||
|
- `str`:
|
||||||
|
A custom format pattern. The following format tokens are supported:
|
||||||
|
- `{S}` → total seconds
|
||||||
|
- `{M}` → total minutes (integer)
|
||||||
|
- `{H}` → total hours (integer)
|
||||||
|
- `{f}` → human-friendly representation (Pendulum `in_words()`)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
`"Duration: {M} minutes"` → `"Duration: 15 minutes"`
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
duration: A Duration object corresponding to the input value.
|
Union[Duration, str]:
|
||||||
|
- A `pendulum.Duration` if no formatting is requested.
|
||||||
|
- A formatted string depending on the `as_string` option.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If the input format is not supported.
|
ValueError:
|
||||||
|
- If the input type is unsupported.
|
||||||
|
- If a duration string cannot be parsed.
|
||||||
|
- If `as_string` contains an unsupported format option.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
>>> to_duration("2 days 5 hours")
|
>>> to_duration("15 minutes")
|
||||||
timedelta(days=2, seconds=18000)
|
<Duration [900 seconds]>
|
||||||
|
|
||||||
>>> to_duration(3600)
|
>>> to_duration("15 minutes", as_string=True)
|
||||||
timedelta(seconds=3600)
|
'PT15M'
|
||||||
|
|
||||||
>>> to_duration((1, 2, 30, 15))
|
>>> to_duration("15 minutes", as_string="human")
|
||||||
timedelta(days=1, seconds=90315)
|
'15 minutes'
|
||||||
|
|
||||||
|
>>> to_duration("90 seconds", as_string="pandas")
|
||||||
|
'90S'
|
||||||
|
|
||||||
|
>>> to_duration("15 minutes", as_string="{M}m")
|
||||||
|
'15m'
|
||||||
"""
|
"""
|
||||||
|
# ---- normalize to pendulum.Duration ----
|
||||||
|
duration = None
|
||||||
|
|
||||||
if isinstance(input_value, Duration):
|
if isinstance(input_value, Duration):
|
||||||
return input_value
|
duration = input_value
|
||||||
|
|
||||||
if isinstance(input_value, datetime.timedelta):
|
elif isinstance(input_value, datetime.timedelta):
|
||||||
return pendulum.duration(seconds=input_value.total_seconds())
|
duration = pendulum.duration(seconds=input_value.total_seconds())
|
||||||
|
|
||||||
if isinstance(input_value, (int, float)):
|
elif isinstance(input_value, (int, float)):
|
||||||
# Handle integers or floats as seconds
|
duration = pendulum.duration(seconds=input_value)
|
||||||
return pendulum.duration(seconds=input_value)
|
|
||||||
|
|
||||||
elif isinstance(input_value, (tuple, list)):
|
elif isinstance(input_value, (tuple, list)):
|
||||||
# Handle tuple or list: (days, hours, minutes, seconds)
|
if len(input_value) != 4:
|
||||||
if len(input_value) == 4:
|
error_msg = f"Expected tuple/list length 4, got {len(input_value)}"
|
||||||
days, hours, minutes, seconds = input_value
|
|
||||||
return pendulum.duration(days=days, hours=hours, minutes=minutes, seconds=seconds)
|
|
||||||
else:
|
|
||||||
error_msg = f"Expected a tuple or list of length 4, got {len(input_value)}"
|
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
|
days, hours, minutes, seconds = input_value
|
||||||
|
duration = pendulum.duration(days=days, hours=hours, minutes=minutes, seconds=seconds)
|
||||||
|
|
||||||
elif isinstance(input_value, str):
|
elif isinstance(input_value, str):
|
||||||
# Use pendulum's parsing for human-readable duration strings
|
# first try pendulum.parse
|
||||||
try:
|
try:
|
||||||
parsed = pendulum.parse(input_value)
|
parsed = pendulum.parse(input_value)
|
||||||
if isinstance(parsed, pendulum.Duration):
|
if isinstance(parsed, pendulum.Duration):
|
||||||
return parsed # Already a duration
|
duration = parsed # Already a duration
|
||||||
else:
|
else:
|
||||||
# It's a DateTime, calculate duration from start of day
|
# It's a DateTime, calculate duration from start of day
|
||||||
return parsed - parsed.start_of("day")
|
duration = parsed - parsed.start_of("day")
|
||||||
except pendulum.parsing.exceptions.ParserError as e:
|
except pendulum.parsing.exceptions.ParserError as e:
|
||||||
logger.trace(f"Invalid Pendulum time string format '{input_value}': {e}")
|
logger.trace(f"Invalid Pendulum time string format '{input_value}': {e}")
|
||||||
|
|
||||||
# Handle strings like "2 days 5 hours 30 minutes"
|
# Mitigate ReDoS vulnerability (#494) by checking input string length.
|
||||||
total_seconds = 0
|
if len(input_value) > MAX_DURATION_STRING_LENGTH:
|
||||||
time_units = {
|
error_msg = (
|
||||||
"day": 86400, # 24 * 60 * 60
|
f"Input string exceeds maximum allowed length ({MAX_DURATION_STRING_LENGTH})."
|
||||||
"hour": 3600,
|
)
|
||||||
"minute": 60,
|
|
||||||
"second": 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Mitigate ReDoS vulnerability (#494) by checking input string length.
|
|
||||||
if len(input_value) > MAX_DURATION_STRING_LENGTH:
|
|
||||||
raise ValueError(
|
|
||||||
f"Input string exceeds maximum allowed length ({MAX_DURATION_STRING_LENGTH})."
|
|
||||||
)
|
|
||||||
# Regular expression to match time components like '2 days', '5 hours', etc.
|
|
||||||
matches = re.findall(r"(\d+)\s*(days?|hours?|minutes?|seconds?)", input_value)
|
|
||||||
|
|
||||||
if not matches:
|
|
||||||
error_msg = f"Invalid time string format '{input_value}'"
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
for value, unit in matches:
|
|
||||||
unit = unit.lower().rstrip("s") # Normalize unit
|
|
||||||
if unit in time_units:
|
|
||||||
total_seconds += int(value) * time_units[unit]
|
|
||||||
else:
|
|
||||||
error_msg = f"Unsupported time unit: {unit}"
|
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
return pendulum.duration(seconds=total_seconds)
|
# Handle strings like "2 days 5 hours 30 minutes"
|
||||||
|
matches = re.findall(r"(\d+)\s*(days?|hours?|minutes?|seconds?)", input_value)
|
||||||
|
if not matches:
|
||||||
|
error_msg = f"Invalid time string format '{input_value}'"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
total_seconds = 0
|
||||||
|
time_units = {
|
||||||
|
"day": 86400,
|
||||||
|
"hour": 3600,
|
||||||
|
"minute": 60,
|
||||||
|
"second": 1,
|
||||||
|
}
|
||||||
|
for value, unit in matches:
|
||||||
|
unit = unit.lower().rstrip("s") # Normalize unit
|
||||||
|
if unit in time_units:
|
||||||
|
total_seconds += int(value) * time_units[unit]
|
||||||
|
else:
|
||||||
|
error_msg = f"Unsupported time unit: {unit}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
duration = pendulum.duration(seconds=total_seconds)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
error_msg = f"Unsupported input type: {type(input_value)}"
|
error_msg = f"Unsupported input type: {type(input_value)}"
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
# ---- now apply as_string rules ----
|
||||||
|
if not as_string:
|
||||||
|
return duration
|
||||||
|
|
||||||
|
total_seconds = int(duration.total_seconds())
|
||||||
|
|
||||||
|
# Boolean True → ISO-8601
|
||||||
|
if as_string is True:
|
||||||
|
return duration_to_iso8601(duration)
|
||||||
|
|
||||||
|
# Human-readable
|
||||||
|
if as_string == "human":
|
||||||
|
return duration.in_words()
|
||||||
|
|
||||||
|
# Pandas frequency
|
||||||
|
if as_string == "pandas":
|
||||||
|
# hours?
|
||||||
|
if total_seconds % 3600 == 0:
|
||||||
|
return f"{total_seconds // 3600}h"
|
||||||
|
# minutes?
|
||||||
|
if total_seconds % 60 == 0:
|
||||||
|
return f"{total_seconds // 60}min"
|
||||||
|
# else seconds (fallback)
|
||||||
|
return f"{total_seconds}s"
|
||||||
|
|
||||||
|
# Custom format string
|
||||||
|
if isinstance(as_string, str):
|
||||||
|
return as_string.format(
|
||||||
|
S=total_seconds,
|
||||||
|
M=total_seconds // 60,
|
||||||
|
H=total_seconds // 3600,
|
||||||
|
f=duration.in_words(),
|
||||||
|
)
|
||||||
|
|
||||||
|
error_msg = f"Unsupported as_string value: {as_string}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def to_timezone(
|
def to_timezone(
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ from akkudoktoreos.server.server import get_default_host
|
|||||||
# Adapt pytest logging handling to Loguru logging
|
# Adapt pytest logging handling to Loguru logging
|
||||||
# -----------------------------------------------
|
# -----------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def caplog(caplog: LogCaptureFixture):
|
def caplog(caplog: LogCaptureFixture):
|
||||||
"""Propagate Loguru logs to the pytest caplog handler."""
|
"""Propagate Loguru logs to the pytest caplog handler."""
|
||||||
@@ -430,13 +429,20 @@ def server_base(
|
|||||||
eos_dir = str(eos_tmp_dir.name)
|
eos_dir = str(eos_tmp_dir.name)
|
||||||
|
|
||||||
class Starter(ProcessStarter):
|
class Starter(ProcessStarter):
|
||||||
|
# Set environment for server run
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["EOS_DIR"] = eos_dir
|
||||||
|
env["EOS_CONFIG_DIR"] = eos_dir
|
||||||
|
if extra_env:
|
||||||
|
env.update(extra_env)
|
||||||
|
|
||||||
# assure server to be installed
|
# assure server to be installed
|
||||||
try:
|
try:
|
||||||
project_dir = Path(__file__).parent.parent
|
project_dir = Path(__file__).parent.parent
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[sys.executable, "-c", "import", "akkudoktoreos.server.eos"],
|
[sys.executable, "-c", "import", "akkudoktoreos.server.eos"],
|
||||||
check=True,
|
check=True,
|
||||||
env=os.environ,
|
env=env,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
cwd=project_dir,
|
cwd=project_dir,
|
||||||
@@ -444,20 +450,13 @@ def server_base(
|
|||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[sys.executable, "-m", "pip", "install", "-e", str(project_dir)],
|
[sys.executable, "-m", "pip", "install", "-e", str(project_dir)],
|
||||||
env=os.environ,
|
env=env,
|
||||||
check=True,
|
check=True,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
cwd=project_dir,
|
cwd=project_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set environment for server run
|
|
||||||
env = os.environ.copy()
|
|
||||||
env["EOS_DIR"] = eos_dir
|
|
||||||
env["EOS_CONFIG_DIR"] = eos_dir
|
|
||||||
if extra_env:
|
|
||||||
env.update(extra_env)
|
|
||||||
|
|
||||||
# Set command to start server process
|
# Set command to start server process
|
||||||
args = [
|
args = [
|
||||||
sys.executable,
|
sys.executable,
|
||||||
@@ -487,6 +486,25 @@ def server_base(
|
|||||||
logger.debug(f"[xprocess] Exception during health check: {e}")
|
logger.debug(f"[xprocess] Exception during health check: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def wait_callback(self):
|
||||||
|
"""Assert that process is ready to answer queries using provided
|
||||||
|
callback funtion. Will raise TimeoutError if self.callback does not
|
||||||
|
return True before self.timeout seconds"""
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
while True:
|
||||||
|
time.sleep(1.0)
|
||||||
|
if self.startup_check():
|
||||||
|
return True
|
||||||
|
if datetime.now() > self._max_time:
|
||||||
|
info = self.process.getinfo("eos")
|
||||||
|
error_msg = (
|
||||||
|
f"The provided startup check could not assert process responsiveness\n"
|
||||||
|
f"within the specified time interval of {self.timeout} seconds.\n"
|
||||||
|
f"Server log is in '{info.logpath}'.\n"
|
||||||
|
)
|
||||||
|
raise TimeoutError(error_msg)
|
||||||
|
|
||||||
# Kill all running eos and eosdash process - just to be sure
|
# Kill all running eos and eosdash process - just to be sure
|
||||||
cleanup_eos_eosdash(host, port, eosdash_host, eosdash_port, server_timeout)
|
cleanup_eos_eosdash(host, port, eosdash_host, eosdash_port, server_timeout)
|
||||||
|
|
||||||
@@ -494,10 +512,12 @@ def server_base(
|
|||||||
config_file_path = Path(eos_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME)
|
config_file_path = Path(eos_dir).joinpath(ConfigEOS.CONFIG_FILE_NAME)
|
||||||
with config_file_path.open(mode="w", encoding="utf-8", newline="\n") as fd:
|
with config_file_path.open(mode="w", encoding="utf-8", newline="\n") as fd:
|
||||||
json.dump({}, fd)
|
json.dump({}, fd)
|
||||||
|
logger.info(f"Created empty config file in {config_file_path}.")
|
||||||
|
|
||||||
# ensure process is running and return its logfile
|
# ensure process is running and return its logfile
|
||||||
pid, logfile = xprocess.ensure("eos", Starter)
|
pid, logfile = xprocess.ensure("eos", Starter)
|
||||||
logger.info(f"Started EOS ({pid}). This may take very long (up to {server_timeout} seconds).")
|
logger.info(f"Started EOS ({pid}). This may take very long (up to {server_timeout} seconds).")
|
||||||
|
logger.info(f"EOS_DIR: {Starter.env["EOS_DIR"]}, EOS_CONFIG_DIR: {Starter.env["EOS_CONFIG_DIR"]}")
|
||||||
logger.info(f"View xprocess logfile at: {logfile}")
|
logger.info(f"View xprocess logfile at: {logfile}")
|
||||||
|
|
||||||
yield {
|
yield {
|
||||||
@@ -509,7 +529,7 @@ def server_base(
|
|||||||
"timeout": server_timeout,
|
"timeout": server_timeout,
|
||||||
}
|
}
|
||||||
|
|
||||||
# clean up whole process tree afterwards
|
# clean up whole process tree afterwards
|
||||||
xprocess.getinfo("eos").terminate()
|
xprocess.getinfo("eos").terminate()
|
||||||
|
|
||||||
# Cleanup any EOS process left.
|
# Cleanup any EOS process left.
|
||||||
|
|||||||
79
tests/test_adapter.py
Normal file
79
tests/test_adapter.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
"""
|
||||||
|
Tests for Adapter and AdapterContainer integration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TypeAlias
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from akkudoktoreos.adapter.adapter import (
|
||||||
|
Adapter,
|
||||||
|
AdapterCommonSettings,
|
||||||
|
get_adapter,
|
||||||
|
)
|
||||||
|
from akkudoktoreos.adapter.adapterabc import AdapterContainer
|
||||||
|
from akkudoktoreos.adapter.homeassistant import HomeAssistantAdapter
|
||||||
|
from akkudoktoreos.adapter.nodered import NodeREDAdapter
|
||||||
|
|
||||||
|
# ---------- Typed aliases for fixtures ----------
|
||||||
|
AdapterFixture: TypeAlias = Adapter
|
||||||
|
SettingsFixture: TypeAlias = AdapterCommonSettings
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- Fixtures ----------
|
||||||
|
@pytest.fixture
|
||||||
|
def adapter() -> AdapterFixture:
|
||||||
|
"""Fixture returning a fully initialized Adapter instance."""
|
||||||
|
return get_adapter()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def settings() -> SettingsFixture:
|
||||||
|
"""Fixture providing default adapter common settings."""
|
||||||
|
return AdapterCommonSettings()
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- Test Class ----------
|
||||||
|
class TestAdapter:
|
||||||
|
def test_is_adapter_container(self, adapter: AdapterFixture) -> None:
|
||||||
|
"""Adapter should be an AdapterContainer and an Adapter."""
|
||||||
|
assert isinstance(adapter, AdapterContainer)
|
||||||
|
assert isinstance(adapter, Adapter)
|
||||||
|
|
||||||
|
def test_providers_present(self, adapter: AdapterFixture) -> None:
|
||||||
|
"""Adapter must contain HA and NodeRED providers."""
|
||||||
|
assert len(adapter.providers) == 2
|
||||||
|
assert any(isinstance(p, HomeAssistantAdapter) for p in adapter.providers)
|
||||||
|
assert any(isinstance(p, NodeREDAdapter) for p in adapter.providers)
|
||||||
|
|
||||||
|
def test_adapter_order(self, adapter: AdapterFixture) -> None:
|
||||||
|
"""Provider order should match HomeAssistantAdapter -> NodeREDAdapter."""
|
||||||
|
assert isinstance(adapter.providers[0], HomeAssistantAdapter)
|
||||||
|
assert isinstance(adapter.providers[1], NodeREDAdapter)
|
||||||
|
|
||||||
|
# ----- AdapterCommonSettings -----
|
||||||
|
|
||||||
|
def test_settings_default_provider(self, settings: SettingsFixture) -> None:
|
||||||
|
"""Default provider should be None."""
|
||||||
|
assert settings.provider is None
|
||||||
|
|
||||||
|
def test_settings_accepts_single_provider(self, settings: SettingsFixture) -> None:
|
||||||
|
"""Settings should accept a single provider literal."""
|
||||||
|
settings.provider = ["HomeAssistant"]
|
||||||
|
assert settings.provider == ["HomeAssistant"]
|
||||||
|
|
||||||
|
def test_settings_accepts_multiple_providers(self, settings: SettingsFixture) -> None:
|
||||||
|
"""Settings should accept multiple provider literals."""
|
||||||
|
settings.provider = ["HomeAssistant", "NodeRED"]
|
||||||
|
assert isinstance(settings.provider, list)
|
||||||
|
assert settings.provider == ["HomeAssistant", "NodeRED"]
|
||||||
|
|
||||||
|
def test_provider_sub_settings(self, settings: SettingsFixture) -> None:
|
||||||
|
"""sub-settings (homeassistant & nodered) must be initialized."""
|
||||||
|
assert hasattr(settings, "homeassistant")
|
||||||
|
assert hasattr(settings, "nodered")
|
||||||
|
assert settings.homeassistant is not None
|
||||||
|
assert settings.nodered is not None
|
||||||
127
tests/test_adapternodered.py
Normal file
127
tests/test_adapternodered.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from akkudoktoreos.adapter.adapter import AdapterCommonSettings
|
||||||
|
from akkudoktoreos.adapter.nodered import NodeREDAdapter, NodeREDAdapterCommonSettings
|
||||||
|
from akkudoktoreos.core.emplan import DDBCInstruction, FRBCInstruction
|
||||||
|
from akkudoktoreos.core.ems import EnergyManagementStage
|
||||||
|
from akkudoktoreos.utils.datetimeutil import DateTime, compare_datetimes, to_datetime
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_ems() -> MagicMock:
|
||||||
|
m = MagicMock()
|
||||||
|
m.stage.return_value = EnergyManagementStage.DATA_ACQUISITION
|
||||||
|
m.plan.return_value.get_active_instructions.return_value = []
|
||||||
|
return m
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def adapter(config_eos, mock_ems: MagicMock) -> NodeREDAdapter:
|
||||||
|
"""Fully Pydantic-safe NodeREDAdapter fixture."""
|
||||||
|
# Set nested value - also fills None values
|
||||||
|
config_eos.set_nested_value("adapter/provider", ["NodeRED"])
|
||||||
|
|
||||||
|
ad = NodeREDAdapter()
|
||||||
|
|
||||||
|
# Mark update datetime invalid
|
||||||
|
ad.update_datetime = None
|
||||||
|
|
||||||
|
# Assign EMS
|
||||||
|
object.__setattr__(ad, "ems", mock_ems)
|
||||||
|
|
||||||
|
return ad
|
||||||
|
|
||||||
|
|
||||||
|
class TestNodeREDAdapter:
|
||||||
|
|
||||||
|
def test_provider_id(self, adapter: NodeREDAdapter):
|
||||||
|
assert adapter.provider_id() == "NodeRED"
|
||||||
|
|
||||||
|
def test_enabled_detection_single(self, adapter: NodeREDAdapter):
|
||||||
|
adapter.config.adapter.provider = ["NodeRED"]
|
||||||
|
assert adapter.enabled() is True
|
||||||
|
adapter.config.adapter.provider = ["HomeAssistant"]
|
||||||
|
assert adapter.enabled() is False
|
||||||
|
adapter.config.adapter.provider = ["HomeAssistant", "NodeRED"]
|
||||||
|
assert adapter.enabled() is True
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_update_datetime(self, mock_get, adapter: NodeREDAdapter):
|
||||||
|
adapter.ems.stage.return_value = EnergyManagementStage.DATA_ACQUISITION
|
||||||
|
mock_get.return_value.status_code = 200
|
||||||
|
mock_get.return_value.json.return_value = {"foo": "bar"}
|
||||||
|
now = to_datetime()
|
||||||
|
|
||||||
|
adapter.update_data(force_enable=True)
|
||||||
|
|
||||||
|
mock_get.assert_called_once()
|
||||||
|
assert compare_datetimes(adapter.update_datetime, now).approximately_equal
|
||||||
|
|
||||||
|
@patch("requests.get")
|
||||||
|
def test_update_data_data_acquisition_success(self, mock_get , adapter: NodeREDAdapter):
|
||||||
|
adapter.ems.stage.return_value = EnergyManagementStage.DATA_ACQUISITION
|
||||||
|
mock_get.return_value.status_code = 200
|
||||||
|
mock_get.return_value.json.return_value = {"foo": "bar"}
|
||||||
|
|
||||||
|
adapter.update_data(force_enable=True)
|
||||||
|
|
||||||
|
mock_get.assert_called_once()
|
||||||
|
url, = mock_get.call_args[0]
|
||||||
|
assert "/eos/data_aquisition" in url
|
||||||
|
|
||||||
|
@patch("requests.get", side_effect=Exception("boom"))
|
||||||
|
def test_update_data_data_acquisition_failure(self, mock_get, adapter: NodeREDAdapter):
|
||||||
|
adapter.ems.stage.return_value = EnergyManagementStage.DATA_ACQUISITION
|
||||||
|
with pytest.raises(RuntimeError):
|
||||||
|
adapter.update_data(force_enable=True)
|
||||||
|
|
||||||
|
@patch("requests.post")
|
||||||
|
def test_update_data_control_dispatch_instructions(self, mock_post, adapter: NodeREDAdapter):
|
||||||
|
adapter.ems.stage.return_value = EnergyManagementStage.CONTROL_DISPATCH
|
||||||
|
|
||||||
|
instr1 = DDBCInstruction(
|
||||||
|
id="res1@extra", operation_mode_id="X", operation_mode_factor=0.5,
|
||||||
|
actuator_id="dummy", execution_time=to_datetime()
|
||||||
|
)
|
||||||
|
instr2 = FRBCInstruction(
|
||||||
|
id="resA", operation_mode_id="Y", operation_mode_factor=0.25,
|
||||||
|
actuator_id="dummy", execution_time=to_datetime()
|
||||||
|
)
|
||||||
|
adapter.ems.plan.return_value.get_active_instructions.return_value = [instr1, instr2]
|
||||||
|
|
||||||
|
mock_post.return_value.status_code = 200
|
||||||
|
mock_post.return_value.json.return_value = {}
|
||||||
|
|
||||||
|
adapter.update_data(force_enable=True)
|
||||||
|
|
||||||
|
_, kwargs = mock_post.call_args
|
||||||
|
payload = kwargs["json"]
|
||||||
|
assert payload["res1_op_mode"] == "X"
|
||||||
|
assert payload["res1_op_factor"] == 0.5
|
||||||
|
assert payload["resA_op_mode"] == "Y"
|
||||||
|
assert payload["resA_op_factor"] == 0.25
|
||||||
|
url, = mock_post.call_args[0]
|
||||||
|
assert "/eos/control_dispatch" in url
|
||||||
|
|
||||||
|
@patch("requests.post")
|
||||||
|
def test_update_data_disabled_provider(self, mock_post, adapter: NodeREDAdapter):
|
||||||
|
adapter.config.adapter.provider = ["HomeAssistant"] # NodeRED disabled
|
||||||
|
adapter.update_data(force_enable=False)
|
||||||
|
mock_post.assert_not_called()
|
||||||
|
|
||||||
|
@patch("requests.post")
|
||||||
|
def test_update_data_force_enable_overrides_disabled(self, mock_post, adapter: NodeREDAdapter):
|
||||||
|
adapter.config.adapter.provider = ["HomeAssistant"]
|
||||||
|
adapter.ems.stage.return_value = EnergyManagementStage.CONTROL_DISPATCH
|
||||||
|
mock_post.return_value.status_code = 200
|
||||||
|
mock_post.return_value.json.return_value = {}
|
||||||
|
|
||||||
|
adapter.update_data(force_enable=True)
|
||||||
|
|
||||||
|
mock_post.assert_called_once()
|
||||||
@@ -52,7 +52,9 @@ def test_config_constants(config_eos):
|
|||||||
def test_computed_paths(config_eos):
|
def test_computed_paths(config_eos):
|
||||||
"""Test computed paths for output and cache."""
|
"""Test computed paths for output and cache."""
|
||||||
# Don't actually try to create the data folder
|
# Don't actually try to create the data folder
|
||||||
with patch("pathlib.Path.mkdir"):
|
with patch("pathlib.Path.mkdir"), \
|
||||||
|
patch("pathlib.Path.is_dir", return_value=True), \
|
||||||
|
patch("pathlib.Path.exists", return_value=True):
|
||||||
config_eos.merge_settings_from_dict(
|
config_eos.merge_settings_from_dict(
|
||||||
{
|
{
|
||||||
"general": {
|
"general": {
|
||||||
@@ -371,7 +373,7 @@ def test_config_common_settings_timezone_none_when_coordinates_missing():
|
|||||||
BATTERY_DEFAULT_CHARGE_RATES,
|
BATTERY_DEFAULT_CHARGE_RATES,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
KeyError,
|
TypeError,
|
||||||
),
|
),
|
||||||
# Invalid index (no number)
|
# Invalid index (no number)
|
||||||
(
|
(
|
||||||
@@ -383,7 +385,7 @@ def test_config_common_settings_timezone_none_when_coordinates_missing():
|
|||||||
BATTERY_DEFAULT_CHARGE_RATES,
|
BATTERY_DEFAULT_CHARGE_RATES,
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
KeyError,
|
IndexError,
|
||||||
),
|
),
|
||||||
# Unset value (set None)
|
# Unset value (set None)
|
||||||
(
|
(
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user