mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-10-11 20:06:18 +00:00
Compare commits
18 Commits
NormannK-p
...
dl_dev-arc
Author | SHA1 | Date | |
---|---|---|---|
|
87ac127817 | ||
|
480adf8100 | ||
|
90688a36f2 | ||
|
6516455071 | ||
|
84683cd195 | ||
|
26762e5e93 | ||
|
56403fe053 | ||
|
5bd8321e95 | ||
|
c1dd31528b | ||
|
1658b491d2 | ||
|
af5e4a753a | ||
|
e0b1ece524 | ||
|
437d38f508 | ||
|
95be7b914f | ||
|
3257dac92b | ||
|
1e1bac9fdb | ||
|
d74a56b75a | ||
|
be26457563 |
@@ -1,8 +1,8 @@
|
|||||||
.git/
|
.git/
|
||||||
.github/
|
.github/
|
||||||
eos-data/
|
**/__pycache__/
|
||||||
mariadb-data/
|
**/*.pyc
|
||||||
test_data/
|
**/*.egg-info/
|
||||||
.dockerignore
|
.dockerignore
|
||||||
.env
|
.env
|
||||||
.gitignore
|
.gitignore
|
||||||
@@ -12,4 +12,4 @@ LICENSE
|
|||||||
Makefile
|
Makefile
|
||||||
NOTICE
|
NOTICE
|
||||||
README.md
|
README.md
|
||||||
.venv
|
.venv/
|
||||||
|
6
.env
6
.env
@@ -1,5 +1,7 @@
|
|||||||
EOS_VERSION=main
|
EOS_VERSION=main
|
||||||
EOS_PORT=8503
|
EOS_SERVER__PORT=8503
|
||||||
EOSDASH_PORT=8504
|
EOS_SERVER__EOSDASH_PORT=8504
|
||||||
|
|
||||||
PYTHON_VERSION=3.12.6
|
PYTHON_VERSION=3.12.6
|
||||||
|
BASE_IMAGE=python
|
||||||
|
IMAGE_SUFFIX=-slim
|
||||||
|
9
.github/dependabot.yml
vendored
9
.github/dependabot.yml
vendored
@@ -5,16 +5,7 @@
|
|||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
# Update dependencies on the main branch
|
|
||||||
- package-ecosystem: "pip" # See documentation for possible values
|
- package-ecosystem: "pip" # See documentation for possible values
|
||||||
directory: "/" # Location of package manifests
|
directory: "/" # Location of package manifests
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
target-branch: "main" # Target the main branch
|
|
||||||
|
|
||||||
# Update dependencies on the feature/config-nested branch
|
|
||||||
- package-ecosystem: "pip"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
target-branch: "feature/config-nested" # Target the specific feature branch
|
|
||||||
|
89
.github/workflows/docker-build.yml
vendored
89
.github/workflows/docker-build.yml
vendored
@@ -7,13 +7,11 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'main'
|
- 'main'
|
||||||
- 'feature/config-overhaul'
|
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- 'v*'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- 'main'
|
- '**'
|
||||||
- 'feature/config-overhaul'
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DOCKERHUB_REPO: akkudoktor/eos
|
DOCKERHUB_REPO: akkudoktor/eos
|
||||||
@@ -40,7 +38,9 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
if ${{ github.event_name == 'pull_request' }}; then
|
if ${{ github.event_name == 'pull_request' }}; then
|
||||||
echo 'matrix=[
|
echo 'matrix=[
|
||||||
{"platform": "linux/arm64"}
|
{"platform": {"name": "linux/amd64"}},
|
||||||
|
{"platform": {"name": "linux/arm64"}},
|
||||||
|
{"platform": {"name": "linux/386"}},
|
||||||
]' | tr -d '[:space:]' >> $GITHUB_OUTPUT
|
]' | tr -d '[:space:]' >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo 'matrix=[]' >> $GITHUB_OUTPUT
|
echo 'matrix=[]' >> $GITHUB_OUTPUT
|
||||||
@@ -58,13 +58,69 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
platform:
|
platform:
|
||||||
- linux/amd64
|
- name: linux/amd64
|
||||||
- linux/arm64
|
base: python
|
||||||
|
python: 3.12 # pendulum not yet on pypi for 3.13
|
||||||
|
rustup_install: ""
|
||||||
|
apt_packages: ""
|
||||||
|
apt_build_packages: ""
|
||||||
|
pip_extra_url: ""
|
||||||
|
- name: linux/arm64
|
||||||
|
base: python
|
||||||
|
python: 3.12 # pendulum not yet on pypi for 3.13
|
||||||
|
rustup_install: ""
|
||||||
|
apt_packages: ""
|
||||||
|
apt_build_packages: ""
|
||||||
|
pip_extra_url: ""
|
||||||
|
- name: linux/arm/v6
|
||||||
|
base: python
|
||||||
|
python: 3.11 # highest version on piwheels
|
||||||
|
rustup_install: true
|
||||||
|
# numpy: libopenblas0
|
||||||
|
# h5py: libhdf5-hl-310
|
||||||
|
#apt_packages: "libopenblas0 libhdf5-hl-310"
|
||||||
|
apt_packages: "" #TODO verify
|
||||||
|
# pendulum: git (apply patch)
|
||||||
|
# matplotlib (countourpy): g++
|
||||||
|
# fastapi (MarkupSafe): gcc
|
||||||
|
# rustup installer: curl
|
||||||
|
apt_build_packages: "curl git g++"
|
||||||
|
pip_extra_url: "https://www.piwheels.org/simple" # armv6/v7 packages
|
||||||
|
- name: linux/arm/v7
|
||||||
|
base: python
|
||||||
|
python: 3.11 # highest version on piwheels
|
||||||
|
rustup_install: true
|
||||||
|
# numpy: libopenblas0
|
||||||
|
# h5py: libhdf5-hl-310
|
||||||
|
#apt_packages: "libopenblas0 libhdf5-hl-310"
|
||||||
|
apt_packages: "" #TODO verify
|
||||||
|
# pendulum: git (apply patch)
|
||||||
|
# matplotlib (countourpy): g++
|
||||||
|
# fastapi (MarkupSafe): gcc
|
||||||
|
# rustup installer: curl
|
||||||
|
apt_build_packages: "curl git g++"
|
||||||
|
pip_extra_url: "https://www.piwheels.org/simple" # armv6/v7 packages
|
||||||
|
- name: linux/386
|
||||||
|
# Get 32bit distributor fix for pendulum, not yet officially released.
|
||||||
|
# Needs Debian testing instead of python:xyz which is based on Debian stable.
|
||||||
|
base: debian
|
||||||
|
python: trixie
|
||||||
|
rustup_install: ""
|
||||||
|
# numpy: libopenblas0
|
||||||
|
# h5py: libhdf5-hl-310
|
||||||
|
apt_packages: "python3-pendulum python3-pip libopenblas0 libhdf5-hl-310"
|
||||||
|
# numpy: g++, libc-dev
|
||||||
|
# skikit: pkgconf python3-dev, libopenblas-dev
|
||||||
|
# uvloop: make
|
||||||
|
# h5py: libhdf5-dev
|
||||||
|
# many others g++/gcc
|
||||||
|
apt_build_packages: "g++ pkgconf libc-dev python3-dev make libopenblas-dev libhdf5-dev"
|
||||||
|
pip_extra_url: ""
|
||||||
exclude: ${{ fromJSON(needs.platform-excludes.outputs.excludes) }}
|
exclude: ${{ fromJSON(needs.platform-excludes.outputs.excludes) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
platform=${{ matrix.platform }}
|
platform=${{ matrix.platform.name }}
|
||||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
@@ -98,7 +154,8 @@ jobs:
|
|||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
# skip for pull requests
|
# skip for pull requests
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
#TODO: uncomment again
|
||||||
|
#if: ${{ github.event_name != 'pull_request' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -106,8 +163,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
# skip for pull requests
|
#if: ${{ github.event_name != 'pull_request' }}
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
@@ -116,10 +172,19 @@ jobs:
|
|||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
platforms: ${{ matrix.platform }}
|
platforms: ${{ matrix.platform.name }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
annotations: ${{ steps.meta.outputs.annotations }}
|
annotations: ${{ steps.meta.outputs.annotations }}
|
||||||
outputs: type=image,"name=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,"push=${{ github.event_name != 'pull_request' }}","annotation-index.org.opencontainers.image.description=${{ env.EOS_REPO_DESCRIPTION }}"
|
#TODO: uncomment again
|
||||||
|
#outputs: type=image,"name=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,"push=${{ github.event_name != 'pull_request' }}","annotation-index.org.opencontainers.image.description=${{ env.EOS_REPO_DESCRIPTION }}"
|
||||||
|
outputs: type=image,"name=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=true,"annotation-index.org.opencontainers.image.description=${{ env.EOS_REPO_DESCRIPTION }}"
|
||||||
|
build-args: |
|
||||||
|
BASE_IMAGE=${{ matrix.platform.base }}
|
||||||
|
PYTHON_VERSION=${{ matrix.platform.python }}
|
||||||
|
PIP_EXTRA_INDEX_URL=${{ matrix.platform.pip_extra_url }}
|
||||||
|
APT_PACKAGES=${{ matrix.platform.apt_packages }}
|
||||||
|
APT_BUILD_PACKAGES=${{ matrix.platform.apt_build_packages }}
|
||||||
|
RUSTUP_INSTALL=${{ matrix.platform.rustup_install }}
|
||||||
|
|
||||||
- name: Generate artifact attestation DockerHub
|
- name: Generate artifact attestation DockerHub
|
||||||
uses: actions/attest-build-provenance@v2
|
uses: actions/attest-build-provenance@v2
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -260,3 +260,6 @@ tests/testdata/new_optimize_result*
|
|||||||
tests/testdata/openapi-new.json
|
tests/testdata/openapi-new.json
|
||||||
tests/testdata/openapi-new.md
|
tests/testdata/openapi-new.md
|
||||||
tests/testdata/config-new.md
|
tests/testdata/config-new.md
|
||||||
|
|
||||||
|
# FastHTML session key
|
||||||
|
.sesskey
|
||||||
|
@@ -33,12 +33,3 @@ repos:
|
|||||||
- "pandas-stubs==2.2.3.241009"
|
- "pandas-stubs==2.2.3.241009"
|
||||||
- "numpy==2.1.3"
|
- "numpy==2.1.3"
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
- repo: https://github.com/jackdewinter/pymarkdown
|
|
||||||
rev: main
|
|
||||||
hooks:
|
|
||||||
- id: pymarkdown
|
|
||||||
files: ^docs/
|
|
||||||
exclude: ^docs/_generated
|
|
||||||
args:
|
|
||||||
- --config=docs/pymarkdown.json
|
|
||||||
- scan
|
|
||||||
|
@@ -6,7 +6,7 @@ The `EOS` project is in early development, therefore we encourage contribution i
|
|||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
Latest development documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedocs.io/en/latest/).
|
Latest development documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedocs.io/en/main/).
|
||||||
|
|
||||||
## Bug Reports
|
## Bug Reports
|
||||||
|
|
||||||
@@ -33,7 +33,6 @@ See also [README.md](README.md).
|
|||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
pip install -r requirements-dev.txt
|
pip install -r requirements-dev.txt
|
||||||
pip install -e .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Install make to get access to helpful shortcuts (documentation generation, manual formatting, etc.).
|
Install make to get access to helpful shortcuts (documentation generation, manual formatting, etc.).
|
||||||
|
94
Dockerfile
94
Dockerfile
@@ -1,22 +1,20 @@
|
|||||||
ARG PYTHON_VERSION=3.12.7
|
ARG PYTHON_VERSION=3.12.8
|
||||||
FROM python:${PYTHON_VERSION}-slim
|
ARG BASE_IMAGE=python
|
||||||
|
ARG IMAGE_SUFFIX=-slim
|
||||||
|
FROM ${BASE_IMAGE}:${PYTHON_VERSION}${IMAGE_SUFFIX} AS base
|
||||||
|
|
||||||
LABEL source="https://github.com/Akkudoktor-EOS/EOS"
|
LABEL source="https://github.com/Akkudoktor-EOS/EOS"
|
||||||
|
|
||||||
ENV VIRTUAL_ENV="/opt/venv"
|
|
||||||
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
|
|
||||||
ENV MPLCONFIGDIR="/tmp/mplconfigdir"
|
ENV MPLCONFIGDIR="/tmp/mplconfigdir"
|
||||||
ENV EOS_DIR="/opt/eos"
|
ENV EOS_DIR="/opt/eos"
|
||||||
ENV EOS_CACHE_DIR="${EOS_DIR}/cache"
|
ENV EOS_CACHE_DIR="${EOS_DIR}/cache"
|
||||||
ENV EOS_OUTPUT_DIR="${EOS_DIR}/output"
|
ENV EOS_OUTPUT_DIR="${EOS_DIR}/output"
|
||||||
ENV EOS_CONFIG_DIR="${EOS_DIR}/config"
|
ENV EOS_CONFIG_DIR="${EOS_DIR}/config"
|
||||||
|
|
||||||
# Overwrite when starting the container in a production environment
|
|
||||||
ENV EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
|
||||||
|
|
||||||
WORKDIR ${EOS_DIR}
|
WORKDIR ${EOS_DIR}
|
||||||
|
|
||||||
RUN adduser --system --group --no-create-home eos \
|
# Use useradd over adduser to support both debian:x-slim and python:x-slim base images
|
||||||
|
RUN useradd --system --no-create-home --shell /usr/sbin/nologin eos \
|
||||||
&& mkdir -p "${MPLCONFIGDIR}" \
|
&& mkdir -p "${MPLCONFIGDIR}" \
|
||||||
&& chown eos "${MPLCONFIGDIR}" \
|
&& chown eos "${MPLCONFIGDIR}" \
|
||||||
&& mkdir -p "${EOS_CACHE_DIR}" \
|
&& mkdir -p "${EOS_CACHE_DIR}" \
|
||||||
@@ -26,13 +24,85 @@ RUN adduser --system --group --no-create-home eos \
|
|||||||
&& mkdir -p "${EOS_CONFIG_DIR}" \
|
&& mkdir -p "${EOS_CONFIG_DIR}" \
|
||||||
&& chown eos "${EOS_CONFIG_DIR}"
|
&& chown eos "${EOS_CONFIG_DIR}"
|
||||||
|
|
||||||
|
ARG APT_PACKAGES
|
||||||
|
ENV APT_PACKAGES="${APT_PACKAGES}"
|
||||||
|
RUN --mount=type=cache,sharing=locked,target=/var/lib/apt/lists \
|
||||||
|
--mount=type=cache,sharing=locked,target=/var/cache/apt \
|
||||||
|
rm /etc/apt/apt.conf.d/docker-clean; \
|
||||||
|
if [ -n "${APT_PACKAGES}" ]; then \
|
||||||
|
apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends ${APT_PACKAGES}; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
FROM base AS build
|
||||||
|
ARG APT_BUILD_PACKAGES
|
||||||
|
ENV APT_BUILD_PACKAGES="${APT_BUILD_PACKAGES}"
|
||||||
|
RUN --mount=type=cache,sharing=locked,target=/var/lib/apt/lists \
|
||||||
|
--mount=type=cache,sharing=locked,target=/var/cache/apt \
|
||||||
|
rm /etc/apt/apt.conf.d/docker-clean; \
|
||||||
|
if [ -n "${APT_BUILD_PACKAGES}" ]; then \
|
||||||
|
apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends ${APT_BUILD_PACKAGES}; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
ARG RUSTUP_INSTALL
|
||||||
|
ENV RUSTUP_INSTALL="${RUSTUP_INSTALL}"
|
||||||
|
ENV RUSTUP_HOME=/opt/rust
|
||||||
|
ENV CARGO_HOME=/opt/rust
|
||||||
|
ENV PATH="$RUSTUP_HOME/bin:$PATH"
|
||||||
|
ARG PIP_EXTRA_INDEX_URL
|
||||||
|
ENV PIP_EXTRA_INDEX_URL="${PIP_EXTRA_INDEX_URL}"
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
--mount=type=tmpfs,target=/root/.cargo \
|
||||||
|
dpkgArch=$(dpkg --print-architecture) \
|
||||||
|
&& if [ -n "${RUSTUP_INSTALL}" ]; then \
|
||||||
|
case "$dpkgArch" in \
|
||||||
|
# armv6
|
||||||
|
armel) \
|
||||||
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal --target arm-unknown-linux-gnueabi --no-modify-path \
|
||||||
|
;; \
|
||||||
|
*) \
|
||||||
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal --no-modify-path \
|
||||||
|
;; \
|
||||||
|
esac \
|
||||||
|
&& rustc --version \
|
||||||
|
&& cargo --version; \
|
||||||
|
fi \
|
||||||
|
# Install 32bit fix for pendulum, can be removed after next pendulum release (> 3.0.0)
|
||||||
|
&& case "$dpkgArch" in \
|
||||||
|
# armv7/armv6
|
||||||
|
armhf|armel) \
|
||||||
|
git clone https://github.com/python-pendulum/pendulum.git \
|
||||||
|
&& git -C pendulum checkout -b 3.0.0 3.0.0 \
|
||||||
|
# Apply 32bit patch
|
||||||
|
&& git -C pendulum -c user.name=ci -c user.email=ci@github.com cherry-pick b84b97625cdea00f8ab150b8b35aa5ccaaf36948 \
|
||||||
|
&& cd pendulum \
|
||||||
|
# Use pip3 over pip to support both debian:x and python:x base images
|
||||||
|
&& pip3 install maturin \
|
||||||
|
&& maturin build --release --out dist \
|
||||||
|
&& pip3 install dist/*.whl --break-system-packages \
|
||||||
|
&& cd - \
|
||||||
|
;; \
|
||||||
|
esac
|
||||||
|
|
||||||
|
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Use tmpfs for cargo due to qemu (multiarch) limitations
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
pip install -r requirements.txt
|
--mount=type=tmpfs,target=/root/.cargo \
|
||||||
|
# Use pip3 over pip to support both debian:x and python:x base images
|
||||||
|
pip3 install -r requirements.txt --break-system-packages
|
||||||
|
|
||||||
|
FROM base AS final
|
||||||
|
# Copy all python dependencies previously installed or built to the final stage.
|
||||||
|
COPY --from=build /usr/local/ /usr/local/
|
||||||
|
COPY --from=build /opt/eos/requirements.txt .
|
||||||
|
|
||||||
COPY pyproject.toml .
|
COPY pyproject.toml .
|
||||||
RUN mkdir -p src && pip install -e .
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
# Use pip3 over pip to support both debian:x and python:x base images
|
||||||
|
mkdir -p src && pip3 install -e . --break-system-packages
|
||||||
|
|
||||||
COPY src src
|
COPY src src
|
||||||
|
|
||||||
@@ -42,7 +112,7 @@ ENTRYPOINT []
|
|||||||
EXPOSE 8503
|
EXPOSE 8503
|
||||||
EXPOSE 8504
|
EXPOSE 8504
|
||||||
|
|
||||||
ENV server_eosdash_host=0.0.0.0
|
# Use python3 over python to support both debian:x and python:x base images
|
||||||
CMD ["python", "src/akkudoktoreos/server/eos.py", "--host", "0.0.0.0"]
|
CMD ["python3", "src/akkudoktoreos/server/eos.py", "--host", "0.0.0.0"]
|
||||||
|
|
||||||
VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"]
|
VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"]
|
||||||
|
4
Makefile
4
Makefile
@@ -17,8 +17,8 @@ help:
|
|||||||
@echo " docker-build - Rebuild docker image"
|
@echo " docker-build - Rebuild docker image"
|
||||||
@echo " docs - Generate HTML documentation (in build/docs/html/)."
|
@echo " docs - Generate HTML documentation (in build/docs/html/)."
|
||||||
@echo " read-docs - Read HTML documentation in your browser."
|
@echo " read-docs - Read HTML documentation in your browser."
|
||||||
@echo " gen-docs - Generate openapi.json and docs/_generated/*."
|
@echo " gen-docs - Generate openapi.json and docs/_generated/*.""
|
||||||
@echo " clean-docs - Remove generated documentation."
|
@echo " clean-docs - Remove generated documentation.""
|
||||||
@echo " run - Run EOS production server in the virtual environment."
|
@echo " run - Run EOS production server in the virtual environment."
|
||||||
@echo " run-dev - Run EOS development server in the virtual environment (automatically reloads)."
|
@echo " run-dev - Run EOS development server in the virtual environment (automatically reloads)."
|
||||||
@echo " dist - Create distribution (in dist/)."
|
@echo " dist - Create distribution (in dist/)."
|
||||||
|
20
README.md
20
README.md
@@ -8,19 +8,9 @@ Documentation can be found at [Akkudoktor-EOS](https://akkudoktor-eos.readthedoc
|
|||||||
|
|
||||||
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||||
|
|
||||||
## System requirements
|
|
||||||
|
|
||||||
- Python >= 3.11, < 3.13
|
|
||||||
- Architecture: amd64, aarch64 (armv8)
|
|
||||||
- OS: Linux, Windows, macOS
|
|
||||||
|
|
||||||
Note: For Python 3.13 some dependencies (e.g. [Pendulum](https://github.com/python-pendulum/Pendulum)) are not yet available on https://pypi.org and have to be manually compiled (a recent [Rust](https://www.rust-lang.org/tools/install) installation is required).
|
|
||||||
|
|
||||||
Other architectures (e.g. armv6, armv7) are unsupported for now, because a multitude of dependencies are not available on https://piwheels.org and have to be built manually (a recent Rust installation and [GCC](https://gcc.gnu.org/) are required, Python 3.11 is recommended).
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Docker images (amd64/aarch64) can be found at [akkudoktor/eos](https://hub.docker.com/r/akkudoktor/eos).
|
The project requires Python 3.10 or newer. Official docker images can be found at [akkudoktor/eos](https://hub.docker.com/r/akkudoktor/eos).
|
||||||
|
|
||||||
Following sections describe how to locally start the EOS server on `http://localhost:8503`.
|
Following sections describe how to locally start the EOS server on `http://localhost:8503`.
|
||||||
|
|
||||||
@@ -33,7 +23,6 @@ Linux:
|
|||||||
```bash
|
```bash
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.venv/bin/pip install -r requirements.txt
|
.venv/bin/pip install -r requirements.txt
|
||||||
.venv/bin/pip install -e .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Windows:
|
Windows:
|
||||||
@@ -41,10 +30,9 @@ Windows:
|
|||||||
```cmd
|
```cmd
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.venv\Scripts\pip install -r requirements.txt
|
.venv\Scripts\pip install -r requirements.txt
|
||||||
.venv\Scripts\pip install -e .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Finally, start the EOS server to access it at `http://localhost:8503` (API docs at `http://localhost:8503/docs`):
|
Finally, start the EOS server:
|
||||||
|
|
||||||
Linux:
|
Linux:
|
||||||
|
|
||||||
@@ -60,12 +48,12 @@ Windows:
|
|||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
|
||||||
Start EOS with following command to access it at `http://localhost:8503` (API docs at `http://localhost:8503/docs`):
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker compose up
|
docker compose up
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you are running the EOS container on a system hosting multiple services, such as a Synology NAS, and want to allow external network access to EOS, please ensure that the default exported ports (8503, 8504) are available on the host. On Synology systems, these ports might already be in use (refer to [this guide](https://kb.synology.com/en-me/DSM/tutorial/What_network_ports_are_used_by_Synology_services)). If the ports are occupied, you will need to reconfigure the exported ports accordingly.
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
This project uses the `EOS.config.json` file to manage configuration settings.
|
This project uses the `EOS.config.json` file to manage configuration settings.
|
||||||
|
@@ -11,14 +11,21 @@ services:
|
|||||||
dockerfile: "Dockerfile"
|
dockerfile: "Dockerfile"
|
||||||
args:
|
args:
|
||||||
PYTHON_VERSION: "${PYTHON_VERSION}"
|
PYTHON_VERSION: "${PYTHON_VERSION}"
|
||||||
|
BASE_IMAGE: "${BASE_IMAGE}"
|
||||||
|
IMAGE_SUFFIX: "${IMAGE_SUFFIX}"
|
||||||
|
APT_PACKAGES: "${APT_PACKAGES:-}"
|
||||||
|
APT_BUILD_PACKAGES: "${APT_BUILD_PACKAGES:-}"
|
||||||
|
PIP_EXTRA_INDEX_URL: "${PIP_EXTRA_INDEX_URL:-}"
|
||||||
|
RUSTUP_INSTALL: "${RUSTUP_INSTALL:-}"
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
environment:
|
environment:
|
||||||
- EOS_CONFIG_DIR=config
|
- EOS_CONFIG_DIR=config
|
||||||
- latitude=52.2
|
|
||||||
- longitude=13.4
|
|
||||||
- elecprice_provider=ElecPriceAkkudoktor
|
|
||||||
- elecprice_charges_kwh=0.21
|
|
||||||
- EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
- EOS_SERVER__EOSDASH_SESSKEY=s3cr3t
|
||||||
|
- EOS_PREDICTION__LATITUDE=52.2
|
||||||
|
- EOS_PREDICTION__LONGITUDE=13.4
|
||||||
|
- EOS_ELECPRICE__PROVIDER=ElecPriceAkkudoktor
|
||||||
|
- EOS_ELECPRICE__CHARGES_KWH=0.21
|
||||||
ports:
|
ports:
|
||||||
# Configure what ports to expose on host
|
- "${EOS_SERVER__PORT}:${EOS_SERVER__PORT}"
|
||||||
- "${EOS_PORT}:8503"
|
- "${EOS_SERVER__EOSDASH_PORT}:${EOS_SERVER__EOSDASH_PORT}"
|
||||||
- "${EOSDASH_PORT}:8504"
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -63,7 +63,7 @@ Args:
|
|||||||
year_energy (float): Yearly energy consumption in Wh.
|
year_energy (float): Yearly energy consumption in Wh.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set LoadAkkudoktor as load_provider, then update data with
|
Set LoadAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=load_mean' instead.
|
'/v1/prediction/list?key=load_mean' instead.
|
||||||
@@ -91,6 +91,8 @@ Fastapi Optimize
|
|||||||
|
|
||||||
- `start_hour` (query, optional): Defaults to current hour of the day.
|
- `start_hour` (query, optional): Defaults to current hour of the day.
|
||||||
|
|
||||||
|
- `ngen` (query, optional): No description provided.
|
||||||
|
|
||||||
**Request Body**:
|
**Request Body**:
|
||||||
|
|
||||||
- `application/json`: {
|
- `application/json`: {
|
||||||
@@ -121,7 +123,7 @@ If no forecast values are available the missing ones at the start of the series
|
|||||||
filled with the first available forecast value.
|
filled with the first available forecast value.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set PVForecastAkkudoktor as pvforecast_provider, then update data with
|
Set PVForecastAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=pvforecast_ac_power' and
|
'/v1/prediction/list?key=pvforecast_ac_power' and
|
||||||
@@ -151,7 +153,7 @@ Note:
|
|||||||
Electricity price charges are added.
|
Electricity price charges are added.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set ElecPriceAkkudoktor as elecprice_provider, then update data with
|
Set ElecPriceAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
||||||
@@ -190,11 +192,11 @@ Returns:
|
|||||||
Fastapi Config Put
|
Fastapi Config Put
|
||||||
|
|
||||||
```
|
```
|
||||||
Write the provided settings into the current settings.
|
Update the current config with the provided settings.
|
||||||
|
|
||||||
The existing settings are completely overwritten. Note that for any setting
|
Note that for any setting value that is None or unset, the configuration will fall back to
|
||||||
value that is None, the configuration will fall back to values from other sources such as
|
values from other sources such as environment variables, the EOS configuration file, or default
|
||||||
environment variables, the EOS configuration file, or default values.
|
values.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
settings (SettingsEOS): The settings to write into the current settings.
|
settings (SettingsEOS): The settings to write into the current settings.
|
||||||
@@ -203,311 +205,11 @@ Returns:
|
|||||||
configuration (ConfigEOS): The current configuration after the write.
|
configuration (ConfigEOS): The current configuration after the write.
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters**:
|
**Request Body**:
|
||||||
|
|
||||||
- `server_eos_host` (query, optional): EOS server IP address.
|
- `application/json`: {
|
||||||
|
"$ref": "#/components/schemas/SettingsEOS"
|
||||||
- `server_eos_port` (query, optional): EOS server IP port number.
|
}
|
||||||
|
|
||||||
- `server_eos_verbose` (query, optional): Enable debug output
|
|
||||||
|
|
||||||
- `server_eos_startup_eosdash` (query, optional): EOS server to start EOSdash server.
|
|
||||||
|
|
||||||
- `server_eosdash_host` (query, optional): EOSdash server IP address.
|
|
||||||
|
|
||||||
- `server_eosdash_port` (query, optional): EOSdash server IP port number.
|
|
||||||
|
|
||||||
- `weatherimport_file_path` (query, optional): Path to the file to import weather data from.
|
|
||||||
|
|
||||||
- `weatherimport_json` (query, optional): JSON string, dictionary of weather forecast value lists.
|
|
||||||
|
|
||||||
- `weather_provider` (query, optional): Weather provider id of provider to be used.
|
|
||||||
|
|
||||||
- `pvforecastimport_file_path` (query, optional): Path to the file to import PV forecast data from.
|
|
||||||
|
|
||||||
- `pvforecastimport_json` (query, optional): JSON string, dictionary of PV forecast value lists.
|
|
||||||
|
|
||||||
- `pvforecast_provider` (query, optional): PVForecast provider id of provider to be used.
|
|
||||||
|
|
||||||
- `pvforecast0_surface_tilt` (query, optional): Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast0_surface_azimuth` (query, optional): Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
|
||||||
|
|
||||||
- `pvforecast0_userhorizon` (query, optional): Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
|
||||||
|
|
||||||
- `pvforecast0_peakpower` (query, optional): Nominal power of PV system in kW.
|
|
||||||
|
|
||||||
- `pvforecast0_pvtechchoice` (query, optional): PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
|
||||||
|
|
||||||
- `pvforecast0_mountingplace` (query, optional): Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
|
||||||
|
|
||||||
- `pvforecast0_loss` (query, optional): Sum of PV system losses in percent
|
|
||||||
|
|
||||||
- `pvforecast0_trackingtype` (query, optional): Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
|
||||||
|
|
||||||
- `pvforecast0_optimal_surface_tilt` (query, optional): Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast0_optimalangles` (query, optional): Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast0_albedo` (query, optional): Proportion of the light hitting the ground that it reflects back.
|
|
||||||
|
|
||||||
- `pvforecast0_module_model` (query, optional): Model of the PV modules of this plane.
|
|
||||||
|
|
||||||
- `pvforecast0_inverter_model` (query, optional): Model of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast0_inverter_paco` (query, optional): AC power rating of the inverter. [W]
|
|
||||||
|
|
||||||
- `pvforecast0_modules_per_string` (query, optional): Number of the PV modules of the strings of this plane.
|
|
||||||
|
|
||||||
- `pvforecast0_strings_per_inverter` (query, optional): Number of the strings of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast1_surface_tilt` (query, optional): Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast1_surface_azimuth` (query, optional): Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
|
||||||
|
|
||||||
- `pvforecast1_userhorizon` (query, optional): Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
|
||||||
|
|
||||||
- `pvforecast1_peakpower` (query, optional): Nominal power of PV system in kW.
|
|
||||||
|
|
||||||
- `pvforecast1_pvtechchoice` (query, optional): PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
|
||||||
|
|
||||||
- `pvforecast1_mountingplace` (query, optional): Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
|
||||||
|
|
||||||
- `pvforecast1_loss` (query, optional): Sum of PV system losses in percent
|
|
||||||
|
|
||||||
- `pvforecast1_trackingtype` (query, optional): Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
|
||||||
|
|
||||||
- `pvforecast1_optimal_surface_tilt` (query, optional): Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast1_optimalangles` (query, optional): Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast1_albedo` (query, optional): Proportion of the light hitting the ground that it reflects back.
|
|
||||||
|
|
||||||
- `pvforecast1_module_model` (query, optional): Model of the PV modules of this plane.
|
|
||||||
|
|
||||||
- `pvforecast1_inverter_model` (query, optional): Model of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast1_inverter_paco` (query, optional): AC power rating of the inverter. [W]
|
|
||||||
|
|
||||||
- `pvforecast1_modules_per_string` (query, optional): Number of the PV modules of the strings of this plane.
|
|
||||||
|
|
||||||
- `pvforecast1_strings_per_inverter` (query, optional): Number of the strings of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast2_surface_tilt` (query, optional): Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast2_surface_azimuth` (query, optional): Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
|
||||||
|
|
||||||
- `pvforecast2_userhorizon` (query, optional): Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
|
||||||
|
|
||||||
- `pvforecast2_peakpower` (query, optional): Nominal power of PV system in kW.
|
|
||||||
|
|
||||||
- `pvforecast2_pvtechchoice` (query, optional): PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
|
||||||
|
|
||||||
- `pvforecast2_mountingplace` (query, optional): Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
|
||||||
|
|
||||||
- `pvforecast2_loss` (query, optional): Sum of PV system losses in percent
|
|
||||||
|
|
||||||
- `pvforecast2_trackingtype` (query, optional): Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
|
||||||
|
|
||||||
- `pvforecast2_optimal_surface_tilt` (query, optional): Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast2_optimalangles` (query, optional): Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast2_albedo` (query, optional): Proportion of the light hitting the ground that it reflects back.
|
|
||||||
|
|
||||||
- `pvforecast2_module_model` (query, optional): Model of the PV modules of this plane.
|
|
||||||
|
|
||||||
- `pvforecast2_inverter_model` (query, optional): Model of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast2_inverter_paco` (query, optional): AC power rating of the inverter. [W]
|
|
||||||
|
|
||||||
- `pvforecast2_modules_per_string` (query, optional): Number of the PV modules of the strings of this plane.
|
|
||||||
|
|
||||||
- `pvforecast2_strings_per_inverter` (query, optional): Number of the strings of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast3_surface_tilt` (query, optional): Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast3_surface_azimuth` (query, optional): Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
|
||||||
|
|
||||||
- `pvforecast3_userhorizon` (query, optional): Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
|
||||||
|
|
||||||
- `pvforecast3_peakpower` (query, optional): Nominal power of PV system in kW.
|
|
||||||
|
|
||||||
- `pvforecast3_pvtechchoice` (query, optional): PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
|
||||||
|
|
||||||
- `pvforecast3_mountingplace` (query, optional): Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
|
||||||
|
|
||||||
- `pvforecast3_loss` (query, optional): Sum of PV system losses in percent
|
|
||||||
|
|
||||||
- `pvforecast3_trackingtype` (query, optional): Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
|
||||||
|
|
||||||
- `pvforecast3_optimal_surface_tilt` (query, optional): Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast3_optimalangles` (query, optional): Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast3_albedo` (query, optional): Proportion of the light hitting the ground that it reflects back.
|
|
||||||
|
|
||||||
- `pvforecast3_module_model` (query, optional): Model of the PV modules of this plane.
|
|
||||||
|
|
||||||
- `pvforecast3_inverter_model` (query, optional): Model of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast3_inverter_paco` (query, optional): AC power rating of the inverter. [W]
|
|
||||||
|
|
||||||
- `pvforecast3_modules_per_string` (query, optional): Number of the PV modules of the strings of this plane.
|
|
||||||
|
|
||||||
- `pvforecast3_strings_per_inverter` (query, optional): Number of the strings of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast4_surface_tilt` (query, optional): Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast4_surface_azimuth` (query, optional): Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
|
||||||
|
|
||||||
- `pvforecast4_userhorizon` (query, optional): Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
|
||||||
|
|
||||||
- `pvforecast4_peakpower` (query, optional): Nominal power of PV system in kW.
|
|
||||||
|
|
||||||
- `pvforecast4_pvtechchoice` (query, optional): PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
|
||||||
|
|
||||||
- `pvforecast4_mountingplace` (query, optional): Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
|
||||||
|
|
||||||
- `pvforecast4_loss` (query, optional): Sum of PV system losses in percent
|
|
||||||
|
|
||||||
- `pvforecast4_trackingtype` (query, optional): Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
|
||||||
|
|
||||||
- `pvforecast4_optimal_surface_tilt` (query, optional): Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast4_optimalangles` (query, optional): Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast4_albedo` (query, optional): Proportion of the light hitting the ground that it reflects back.
|
|
||||||
|
|
||||||
- `pvforecast4_module_model` (query, optional): Model of the PV modules of this plane.
|
|
||||||
|
|
||||||
- `pvforecast4_inverter_model` (query, optional): Model of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast4_inverter_paco` (query, optional): AC power rating of the inverter. [W]
|
|
||||||
|
|
||||||
- `pvforecast4_modules_per_string` (query, optional): Number of the PV modules of the strings of this plane.
|
|
||||||
|
|
||||||
- `pvforecast4_strings_per_inverter` (query, optional): Number of the strings of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast5_surface_tilt` (query, optional): Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast5_surface_azimuth` (query, optional): Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
|
||||||
|
|
||||||
- `pvforecast5_userhorizon` (query, optional): Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
|
||||||
|
|
||||||
- `pvforecast5_peakpower` (query, optional): Nominal power of PV system in kW.
|
|
||||||
|
|
||||||
- `pvforecast5_pvtechchoice` (query, optional): PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
|
||||||
|
|
||||||
- `pvforecast5_mountingplace` (query, optional): Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
|
||||||
|
|
||||||
- `pvforecast5_loss` (query, optional): Sum of PV system losses in percent
|
|
||||||
|
|
||||||
- `pvforecast5_trackingtype` (query, optional): Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
|
||||||
|
|
||||||
- `pvforecast5_optimal_surface_tilt` (query, optional): Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast5_optimalangles` (query, optional): Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
|
||||||
|
|
||||||
- `pvforecast5_albedo` (query, optional): Proportion of the light hitting the ground that it reflects back.
|
|
||||||
|
|
||||||
- `pvforecast5_module_model` (query, optional): Model of the PV modules of this plane.
|
|
||||||
|
|
||||||
- `pvforecast5_inverter_model` (query, optional): Model of the inverter of this plane.
|
|
||||||
|
|
||||||
- `pvforecast5_inverter_paco` (query, optional): AC power rating of the inverter. [W]
|
|
||||||
|
|
||||||
- `pvforecast5_modules_per_string` (query, optional): Number of the PV modules of the strings of this plane.
|
|
||||||
|
|
||||||
- `pvforecast5_strings_per_inverter` (query, optional): Number of the strings of the inverter of this plane.
|
|
||||||
|
|
||||||
- `load_import_file_path` (query, optional): Path to the file to import load data from.
|
|
||||||
|
|
||||||
- `load_import_json` (query, optional): JSON string, dictionary of load forecast value lists.
|
|
||||||
|
|
||||||
- `loadakkudoktor_year_energy` (query, optional): Yearly energy consumption (kWh).
|
|
||||||
|
|
||||||
- `load_provider` (query, optional): Load provider id of provider to be used.
|
|
||||||
|
|
||||||
- `elecpriceimport_file_path` (query, optional): Path to the file to import elecprice data from.
|
|
||||||
|
|
||||||
- `elecpriceimport_json` (query, optional): JSON string, dictionary of electricity price forecast value lists.
|
|
||||||
|
|
||||||
- `elecprice_provider` (query, optional): Electricity price provider id of provider to be used.
|
|
||||||
|
|
||||||
- `elecprice_charges_kwh` (query, optional): Electricity price charges (€/kWh).
|
|
||||||
|
|
||||||
- `prediction_hours` (query, optional): Number of hours into the future for predictions
|
|
||||||
|
|
||||||
- `prediction_historic_hours` (query, optional): Number of hours into the past for historical predictions data
|
|
||||||
|
|
||||||
- `latitude` (query, optional): Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)
|
|
||||||
|
|
||||||
- `longitude` (query, optional): Longitude in decimal degrees, within -180 to 180 (°)
|
|
||||||
|
|
||||||
- `optimization_hours` (query, optional): Number of hours into the future for optimizations.
|
|
||||||
|
|
||||||
- `optimization_penalty` (query, optional): Penalty factor used in optimization.
|
|
||||||
|
|
||||||
- `optimization_ev_available_charge_rates_percent` (query, optional): Charge rates available for the EV in percent of maximum charge.
|
|
||||||
|
|
||||||
- `measurement_load0_name` (query, optional): Name of the load0 source (e.g. 'Household', 'Heat Pump')
|
|
||||||
|
|
||||||
- `measurement_load1_name` (query, optional): Name of the load1 source (e.g. 'Household', 'Heat Pump')
|
|
||||||
|
|
||||||
- `measurement_load2_name` (query, optional): Name of the load2 source (e.g. 'Household', 'Heat Pump')
|
|
||||||
|
|
||||||
- `measurement_load3_name` (query, optional): Name of the load3 source (e.g. 'Household', 'Heat Pump')
|
|
||||||
|
|
||||||
- `measurement_load4_name` (query, optional): Name of the load4 source (e.g. 'Household', 'Heat Pump')
|
|
||||||
|
|
||||||
- `battery_provider` (query, optional): Id of Battery simulation provider.
|
|
||||||
|
|
||||||
- `battery_capacity` (query, optional): Battery capacity [Wh].
|
|
||||||
|
|
||||||
- `battery_initial_soc` (query, optional): Battery initial state of charge [%].
|
|
||||||
|
|
||||||
- `battery_soc_min` (query, optional): Battery minimum state of charge [%].
|
|
||||||
|
|
||||||
- `battery_soc_max` (query, optional): Battery maximum state of charge [%].
|
|
||||||
|
|
||||||
- `battery_charging_efficiency` (query, optional): Battery charging efficiency [%].
|
|
||||||
|
|
||||||
- `battery_discharging_efficiency` (query, optional): Battery discharging efficiency [%].
|
|
||||||
|
|
||||||
- `battery_max_charging_power` (query, optional): Battery maximum charge power [W].
|
|
||||||
|
|
||||||
- `bev_provider` (query, optional): Id of Battery Electric Vehicle simulation provider.
|
|
||||||
|
|
||||||
- `bev_capacity` (query, optional): Battery Electric Vehicle capacity [Wh].
|
|
||||||
|
|
||||||
- `bev_initial_soc` (query, optional): Battery Electric Vehicle initial state of charge [%].
|
|
||||||
|
|
||||||
- `bev_soc_max` (query, optional): Battery Electric Vehicle maximum state of charge [%].
|
|
||||||
|
|
||||||
- `bev_charging_efficiency` (query, optional): Battery Electric Vehicle charging efficiency [%].
|
|
||||||
|
|
||||||
- `bev_discharging_efficiency` (query, optional): Battery Electric Vehicle discharging efficiency [%].
|
|
||||||
|
|
||||||
- `bev_max_charging_power` (query, optional): Battery Electric Vehicle maximum charge power [W].
|
|
||||||
|
|
||||||
- `dishwasher_provider` (query, optional): Id of Dish Washer simulation provider.
|
|
||||||
|
|
||||||
- `dishwasher_consumption` (query, optional): Dish Washer energy consumption [Wh].
|
|
||||||
|
|
||||||
- `dishwasher_duration` (query, optional): Dish Washer usage duration [h].
|
|
||||||
|
|
||||||
- `inverter_provider` (query, optional): Id of PV Inverter simulation provider.
|
|
||||||
|
|
||||||
- `inverter_power_max` (query, optional): Inverter maximum power [W].
|
|
||||||
|
|
||||||
- `logging_level_default` (query, optional): EOS default logging level.
|
|
||||||
|
|
||||||
- `data_folder_path` (query, optional): Path to EOS data directory.
|
|
||||||
|
|
||||||
- `data_output_subpath` (query, optional): Sub-path for the EOS output data directory.
|
|
||||||
|
|
||||||
- `data_cache_subpath` (query, optional): Sub-path for the EOS cache data directory.
|
|
||||||
|
|
||||||
**Responses**:
|
**Responses**:
|
||||||
|
|
||||||
@@ -517,25 +219,6 @@ Returns:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## GET /v1/config/file
|
|
||||||
|
|
||||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_file_get_v1_config_file_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_file_get_v1_config_file_get)
|
|
||||||
|
|
||||||
Fastapi Config File Get
|
|
||||||
|
|
||||||
```
|
|
||||||
Get the settings as defined by the EOS configuration file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
settings (SettingsEOS): The settings defined by the EOS configuration file.
|
|
||||||
```
|
|
||||||
|
|
||||||
**Responses**:
|
|
||||||
|
|
||||||
- **200**: Successful Response
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## PUT /v1/config/file
|
## PUT /v1/config/file
|
||||||
|
|
||||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_file_put_v1_config_file_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_file_put_v1_config_file_put)
|
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_file_put_v1_config_file_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_file_put_v1_config_file_put)
|
||||||
@@ -555,14 +238,14 @@ Returns:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## POST /v1/config/update
|
## PUT /v1/config/reset
|
||||||
|
|
||||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_update_post_v1_config_update_post), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_update_post_v1_config_update_post)
|
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_update_post_v1_config_reset_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_update_post_v1_config_reset_put)
|
||||||
|
|
||||||
Fastapi Config Update Post
|
Fastapi Config Update Post
|
||||||
|
|
||||||
```
|
```
|
||||||
Update the configuration from the EOS configuration file.
|
Reset the configuration to the EOS configuration file.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
configuration (ConfigEOS): The current configuration after update.
|
configuration (ConfigEOS): The current configuration after update.
|
||||||
@@ -574,37 +257,6 @@ Returns:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## PUT /v1/config/value
|
|
||||||
|
|
||||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_config_value_put_v1_config_value_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_config_value_put_v1_config_value_put)
|
|
||||||
|
|
||||||
Fastapi Config Value Put
|
|
||||||
|
|
||||||
```
|
|
||||||
Set the configuration option in the settings.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): configuration key
|
|
||||||
value (Any): configuration value
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
configuration (ConfigEOS): The current configuration after the write.
|
|
||||||
```
|
|
||||||
|
|
||||||
**Parameters**:
|
|
||||||
|
|
||||||
- `key` (query, required): configuration key
|
|
||||||
|
|
||||||
- `value` (query, required): configuration value
|
|
||||||
|
|
||||||
**Responses**:
|
|
||||||
|
|
||||||
- **200**: Successful Response
|
|
||||||
|
|
||||||
- **422**: Validation Error
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## PUT /v1/measurement/data
|
## PUT /v1/measurement/data
|
||||||
|
|
||||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_measurement_data_put_v1_measurement_data_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_measurement_data_put_v1_measurement_data_put)
|
**Links**: [local](http://localhost:8503/docs#/default/fastapi_measurement_data_put_v1_measurement_data_put), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_measurement_data_put_v1_measurement_data_put)
|
||||||
@@ -874,6 +526,31 @@ Args:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## GET /v1/prediction/providers
|
||||||
|
|
||||||
|
**Links**: [local](http://localhost:8503/docs#/default/fastapi_prediction_providers_get_v1_prediction_providers_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_prediction_providers_get_v1_prediction_providers_get)
|
||||||
|
|
||||||
|
Fastapi Prediction Providers Get
|
||||||
|
|
||||||
|
```
|
||||||
|
Get a list of available prediction providers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
enabled (bool): Return enabled/disabled providers. If unset, return all providers.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**:
|
||||||
|
|
||||||
|
- `enabled` (query, optional): No description provided.
|
||||||
|
|
||||||
|
**Responses**:
|
||||||
|
|
||||||
|
- **200**: Successful Response
|
||||||
|
|
||||||
|
- **422**: Validation Error
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## GET /v1/prediction/series
|
## GET /v1/prediction/series
|
||||||
|
|
||||||
**Links**: [local](http://localhost:8503/docs#/default/fastapi_prediction_series_get_v1_prediction_series_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_prediction_series_get_v1_prediction_series_get)
|
**Links**: [local](http://localhost:8503/docs#/default/fastapi_prediction_series_get_v1_prediction_series_get), [eos](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/Akkudoktor-EOS/EOS/refs/heads/main/openapi.json#/default/fastapi_prediction_series_get_v1_prediction_series_get)
|
||||||
|
3
docs/_static/eos.css
vendored
Normal file
3
docs/_static/eos.css
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
.wy-nav-content {
|
||||||
|
max-width: 90% !important;
|
||||||
|
}
|
BIN
docs/_static/introduction/integration.png
vendored
BIN
docs/_static/introduction/integration.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 58 KiB |
BIN
docs/_static/introduction/introduction.png
vendored
BIN
docs/_static/introduction/introduction.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 22 KiB |
BIN
docs/_static/introduction/overview.png
vendored
BIN
docs/_static/introduction/overview.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 60 KiB |
File diff suppressed because one or more lines are too long
BIN
docs/_static/optimization_timeframes.png
vendored
BIN
docs/_static/optimization_timeframes.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 664 KiB |
9
docs/akkudoktoreos/about.md
Normal file
9
docs/akkudoktoreos/about.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
# About Akkudoktor EOS
|
||||||
|
|
||||||
|
The Energy System Simulation and Optimization System (EOS) provides a comprehensive solution for
|
||||||
|
simulating and optimizing an energy system based on renewable energy sources. With a focus on
|
||||||
|
photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements),
|
||||||
|
heat pumps, electric vehicles, and consideration of electricity price data, this system enables
|
||||||
|
forecasting and optimization of energy flow and costs over a specified period.
|
@@ -20,22 +20,17 @@ EOS Architecture
|
|||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy
|
The configuration controls all aspects of EOS: optimization, prediction, measurement, and energy management.
|
||||||
management.
|
|
||||||
|
|
||||||
### Energy Management
|
### Energy Management
|
||||||
|
|
||||||
Energy management is the overall process to provide planning data for scheduling the different
|
Energy management is the overall process to provide planning data for scheduling the different devices in your system in an optimal way. Energy management cares for the update of predictions and the optimization of the planning based on the simulated behavior of the devices. The planning is on the hour. Sub-hour energy management is left
|
||||||
devices in your system in an optimal way. Energy management cares for the update of predictions and
|
|
||||||
the optimization of the planning based on the simulated behavior of the devices. The planning is on
|
|
||||||
the hour. Sub-hour energy management is left
|
|
||||||
|
|
||||||
### Optimization
|
### Optimization
|
||||||
|
|
||||||
### Device Simulations
|
### Device Simulations
|
||||||
|
|
||||||
Device simulations simulate devices' behavior based on internal logic and predicted data. They
|
Device simulations simulate devices' behavior based on internal logic and predicted data. They provide the data needed for optimization.
|
||||||
provide the data needed for optimization.
|
|
||||||
|
|
||||||
### Predictions
|
### Predictions
|
||||||
|
|
||||||
@@ -43,8 +38,7 @@ Predictions provide predicted future data to be used by the optimization.
|
|||||||
|
|
||||||
### Measurements
|
### Measurements
|
||||||
|
|
||||||
Measurements are utilized to refine predictions using real data from your system, thereby enhancing
|
Measurements are utilized to refine predictions using real data from your system, thereby enhancing accuracy.
|
||||||
accuracy.
|
|
||||||
|
|
||||||
### EOS Server
|
### EOS Server
|
||||||
|
|
||||||
|
@@ -7,10 +7,9 @@ management.
|
|||||||
|
|
||||||
## Storing Configuration
|
## Storing Configuration
|
||||||
|
|
||||||
EOS stores configuration data in a **key-value store**, where a `configuration key` refers to the
|
EOS stores configuration data in a `nested structure`. Note that configuration changes inside EOS
|
||||||
unique identifier used to store and retrieve specific configuration data. Note that the key-value
|
are updated in memory, meaning all changes will be lost upon restarting the EOS REST server if not
|
||||||
store is memory-based, meaning all stored data will be lost upon restarting the EOS REST server if
|
saved to the `EOS configuration file`.
|
||||||
not saved to the `EOS configuration file`.
|
|
||||||
|
|
||||||
Some `configuration keys` are read-only and cannot be altered. These keys are either set up by other
|
Some `configuration keys` are read-only and cannot be altered. These keys are either set up by other
|
||||||
means, such as environment variables, or determined from other information.
|
means, such as environment variables, or determined from other information.
|
||||||
@@ -25,37 +24,37 @@ Use endpoint `PUT /v1/config/file` to save the current configuration to the
|
|||||||
|
|
||||||
### Load Configuration File
|
### Load Configuration File
|
||||||
|
|
||||||
Use endpoint `POST /v1/config/update` to update the configuration from the `EOS configuration file`.
|
Use endpoint `POST /v1/config/reset` to reset the configuration to the values in the
|
||||||
|
`EOS configuration file`.
|
||||||
|
|
||||||
## Configuration Sources and Priorities
|
## Configuration Sources and Priorities
|
||||||
|
|
||||||
The configuration sources and their priorities are as follows:
|
The configuration sources and their priorities are as follows:
|
||||||
|
|
||||||
1. `Settings`: Provided during runtime by the REST interface
|
1. **Runtime Config Updates**: Provided during runtime by the REST interface
|
||||||
2. `Environment Variables`: Defined at startup of the REST server and during runtime
|
2. **Environment Variables**: Defined at startup of the REST server and during runtime
|
||||||
3. `EOS Configuration File`: Read at startup of the REST server and on request
|
3. **EOS Configuration File**: Read at startup of the REST server and on request
|
||||||
4. `Default Values`
|
4. **Default Values**
|
||||||
|
|
||||||
### Settings
|
### Runtime Config Updates
|
||||||
|
|
||||||
Settings are sets of configuration data that take precedence over all other configuration data from
|
The EOS configuration can be updated at runtime. Note that those updates are not persistent
|
||||||
different sources. Note that settings are not persistent. To make the current configuration with the
|
automatically. However it is possible to save the configuration to the `EOS configuration file`.
|
||||||
current settings persistent, save the configuration to the `EOS configuration file`.
|
|
||||||
|
|
||||||
Use the following endpoints to change the current configuration settings:
|
Use the following endpoints to change the current runtime configuration:
|
||||||
|
|
||||||
- `PUT /v1/config`: Replaces the entire configuration settings.
|
- `PUT /v1/config`: Update the entire or parts of the configuration.
|
||||||
- `PUT /v1/config/value`: Sets a specific configuration option.
|
|
||||||
|
|
||||||
### Environment Variables
|
### Environment Variables
|
||||||
|
|
||||||
All `configuration keys` can be set by environment variables with the same name. EOS recognizes the
|
All `configuration keys` can be set by environment variables prefixed with `EOS_` and separated by
|
||||||
following special environment variables:
|
`__` for nested structures. Environment variables are case insensitive.
|
||||||
|
|
||||||
|
EOS recognizes the following special environment variables (case sensitive):
|
||||||
|
|
||||||
- `EOS_CONFIG_DIR`: The directory to search for an EOS configuration file.
|
- `EOS_CONFIG_DIR`: The directory to search for an EOS configuration file.
|
||||||
- `EOS_DIR`: The directory used by EOS for data, which will also be searched for an EOS
|
- `EOS_DIR`: The directory used by EOS for data, which will also be searched for an EOS
|
||||||
configuration file.
|
configuration file.
|
||||||
- `EOS_LOGGING_LEVEL`: The logging level to use in EOS.
|
|
||||||
|
|
||||||
### EOS Configuration File
|
### EOS Configuration File
|
||||||
|
|
||||||
@@ -66,7 +65,7 @@ If you do not have a configuration file, it will be automatically created on the
|
|||||||
the REST server in a system-dependent location.
|
the REST server in a system-dependent location.
|
||||||
|
|
||||||
To determine the location of the configuration file used by EOS, ask the REST server. The endpoint
|
To determine the location of the configuration file used by EOS, ask the REST server. The endpoint
|
||||||
`GET /v1/config` provides the `config_file_path` configuration key.
|
`GET /v1/config` provides the `general.config_file_path` configuration key.
|
||||||
|
|
||||||
EOS searches for the configuration file in the following order:
|
EOS searches for the configuration file in the following order:
|
||||||
|
|
||||||
@@ -75,9 +74,15 @@ EOS searches for the configuration file in the following order:
|
|||||||
3. A platform-specific default directory for EOS
|
3. A platform-specific default directory for EOS
|
||||||
4. The current working directory
|
4. The current working directory
|
||||||
|
|
||||||
The first available configuration file found in these directories is loaded. If no configuration
|
The first configuration file available in these directories is loaded. If no configuration file is
|
||||||
file is found, a default configuration file is created in the platform-specific default directory,
|
found, a default configuration file is created, and the default settings are written to it. The
|
||||||
and default settings are loaded into it.
|
location of the created configuration file follows the same order in which EOS searches for
|
||||||
|
configuration files, and it depends on whether the relevant environment variables are set.
|
||||||
|
|
||||||
|
Use the following endpoints to interact with the configuration file:
|
||||||
|
|
||||||
|
- `PUT /v1/config/file`: Save the current configuration to the configuration file.
|
||||||
|
- `PUT /v1/config/reset`: Reload the configuration file, all unsaved runtime configuration is reset.
|
||||||
|
|
||||||
### Default Values
|
### Default Values
|
||||||
|
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
(integration-page)=
|
|
||||||
|
|
||||||
# Integration
|
# Integration
|
||||||
|
|
||||||
@@ -18,19 +17,18 @@ APIs, and online services in creative and practical ways.
|
|||||||
|
|
||||||
Andreas Schmitz uses [Node-RED](https://nodered.org/) as part of his home automation setup.
|
Andreas Schmitz uses [Node-RED](https://nodered.org/) as part of his home automation setup.
|
||||||
|
|
||||||
### Node-Red Resources
|
### Resources
|
||||||
|
|
||||||
- [Installation Guide (German)](https://meintechblog.de/2024/09/05/andreas-schmitz-joerg-installiert-mein-energieoptimierungssystem/)
|
- [Installation Guide (German)](https://meintechblog.de/2024/09/05/andreas-schmitz-joerg-installiert-mein-energieoptimierungssystem/) — A detailed guide on integrating an early version of EOS with
|
||||||
\— A detailed guide on integrating an early version of EOS with `Node-RED`.
|
`Node-RED`.
|
||||||
|
|
||||||
## Home Assistant
|
## Home Assistant
|
||||||
|
|
||||||
[Home Assistant](https://www.home-assistant.io/) is an open-source home automation platform that
|
[Home Assistant](https://www.home-assistant.io/) is an open-source home automation platform that
|
||||||
emphasizes local control and user privacy.
|
emphasizes local control and user privacy.
|
||||||
|
|
||||||
(duetting-solution)=
|
### Resources
|
||||||
|
|
||||||
### Home Assistant Resources
|
|
||||||
|
|
||||||
- Duetting's [EOS Home Assistant Addon](https://github.com/Duetting/ha_eos_addon) — Additional
|
- Duetting's [EOS Home Assistant Addon](https://github.com/Duetting/ha_eos_addon) — Additional
|
||||||
details can be found in this [discussion thread](https://github.com/Akkudoktor-EOS/EOS/discussions/294).
|
details can be found in this
|
||||||
|
[discussion thread](https://github.com/Akkudoktor-EOS/EOS/discussions/294).
|
||||||
|
@@ -1,180 +0,0 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
# Introduction
|
|
||||||
|
|
||||||
The Energy System Simulation and Optimization System (EOS) provides a comprehensive
|
|
||||||
solution for simulating and optimizing an energy system based on renewable energy
|
|
||||||
sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load
|
|
||||||
management (consumer requirements), heat pumps, electric vehicles, and consideration of
|
|
||||||
electricity price data, this system enables forecasting and optimization of energy flow
|
|
||||||
and costs over a specified period.
|
|
||||||
|
|
||||||
After successfully installing a PV system with or without battery storage, most owners
|
|
||||||
first priority is often to charge the electric car with surplus energy in order to use
|
|
||||||
the electricity generated by the PV system cost-effectively for electromobility.
|
|
||||||
|
|
||||||
After initial experiences, the desire to include battery storage and dynamic electricity
|
|
||||||
prices in the solution soon arises. The market already offers various commercial and
|
|
||||||
non-commercial solutions for this, such as the popular open source hardware and software
|
|
||||||
solutions evcc or openWB.
|
|
||||||
|
|
||||||
Some solutions take into account the current values of the system such as PV power
|
|
||||||
output, battery storage charge level or the current electricity price to decide whether
|
|
||||||
to charge the electric car with PV surplus or from the grid (e.g. openWB), some use
|
|
||||||
historical consumption values and PV forecast data for their calculations, but leave out
|
|
||||||
the current electricity prices and charging the battery storage from the power grid
|
|
||||||
(Predbat). Others are specialiced on working in combination with a specific smart home
|
|
||||||
solution (e.g. emhass). Still others focus on certain consumers, such as the electric car,
|
|
||||||
or are currently working on integrating the forecast values (evcc). And some are commercial
|
|
||||||
devices that require an electrician to install them and expect a certain ecosystem
|
|
||||||
(e.g. Sunny Home Manager).
|
|
||||||
|
|
||||||
The Akkudoktor EOS
|
|
||||||
|
|
||||||
- takes into account historical, current and forecast data such as consumption values, PV
|
|
||||||
forecast data, electricity price forecast, battery storage and electric car charge levels
|
|
||||||
- the simulation also takes into account the possibility of charging the battery storage
|
|
||||||
from the grid at low electricity prices
|
|
||||||
- is not limited to certain consumers, but includes electric cars, heat pumps or more
|
|
||||||
powerful consumers such as tumble dryers
|
|
||||||
- is independent of a specific smart home solution and can also be integrated into
|
|
||||||
self-developed solutions if desired
|
|
||||||
- is a free and independent open source software solution
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
The challenge is to charge (electric car) or start the consumers (washing machine, dryer)
|
|
||||||
at the right time and to do so as cost-efficiently as possible. If PV yield forecast,
|
|
||||||
battery storage and dynamic electricity price forecasts are included in the calculation,
|
|
||||||
the possibilities increase, but unfortunately so does the complexity.
|
|
||||||
|
|
||||||
The Akkudoktor EOS addresses this challenge by simulating energy flows in the household
|
|
||||||
based on target values, forecast data and current operating data over a 48-hour
|
|
||||||
observation period, running through a large number of different scenarios and finally
|
|
||||||
providing a cost-optimized plan for the current day controlling the relevant consumers.
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- Technical requirements
|
|
||||||
- Input data
|
|
||||||
|
|
||||||
### Technical requirements
|
|
||||||
|
|
||||||
- reasonably fast computer on which EOS is installed
|
|
||||||
- controllable energy system consisting of photovoltaic system, solar battery storage,
|
|
||||||
energy intensive consumers that must provide the appropriate interfaces
|
|
||||||
- integration solution for integrating the energy system and EOS
|
|
||||||
|
|
||||||
### Input Data
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
The EOS requires various types of data for the simulation:
|
|
||||||
|
|
||||||
Forecast data
|
|
||||||
|
|
||||||
- PV yield forecast
|
|
||||||
- Expected household consumption
|
|
||||||
- Electricity price forecast
|
|
||||||
- Forecast temperature trend (if heatpump is used)
|
|
||||||
|
|
||||||
Basic data and current operating data
|
|
||||||
|
|
||||||
- Current charge level of the battery storage
|
|
||||||
- Value of electricity in the battery storage
|
|
||||||
- Current charge level of the electric car
|
|
||||||
- Energy consumption and running time of dishwasher, washing machine and tumble dryer
|
|
||||||
|
|
||||||
Target values
|
|
||||||
|
|
||||||
- Charge level the electric car should reach in the next few hours
|
|
||||||
- Consumers to run in the next few hours
|
|
||||||
|
|
||||||
There are various service providers available for PV forecasting that calculate forecast
|
|
||||||
data for a PV system based on the various influencing factors, such as system size,
|
|
||||||
orientation, location, time of year and weather conditions. EOS also offers a
|
|
||||||
[PV forecasting service](#prediction-page) which can be used. This service uses
|
|
||||||
public data in the background.
|
|
||||||
|
|
||||||
For the forecast of household consumption EOS provides a standard load curve for an
|
|
||||||
average day based on annual household consumption that you can fetch via API. This data
|
|
||||||
was compiled based on data from several households and provides an initial usable basis.
|
|
||||||
Alternatively your own collected historical data could be used to reflect your personal
|
|
||||||
consumption behaviour.
|
|
||||||
|
|
||||||
## Simulation Results
|
|
||||||
|
|
||||||
Based on the input data, the EOS uses a genetic algorithm to create a cost-optimized
|
|
||||||
schedule for the coming hours from numerous simulations of the overall system.
|
|
||||||
|
|
||||||
The plan created contains for each of the coming hours
|
|
||||||
|
|
||||||
- Control information
|
|
||||||
- whether and with what power the battery storage should be charged from the grid
|
|
||||||
- when the battery storage should be charged via the PV system
|
|
||||||
- whether discharging the battery storage is permitted or not
|
|
||||||
- when and with what power the electric car should be charged
|
|
||||||
- when a household appliance should be activated
|
|
||||||
- Energy history information
|
|
||||||
- Total load of the house
|
|
||||||
- Grid consumption
|
|
||||||
- Feed-in
|
|
||||||
- Load of the planned household appliances
|
|
||||||
- Charge level of the battery storage
|
|
||||||
- Charge level of the electric car
|
|
||||||
- Active losses
|
|
||||||
- Cost information
|
|
||||||
- Revenue per hour (when fed into the grid)
|
|
||||||
- Total costs per hour (when drawn from the grid)
|
|
||||||
- Overall balance (revenue-costs)
|
|
||||||
- Cost development
|
|
||||||
|
|
||||||
If required, the simulation result can also be created and downloaded in graphical
|
|
||||||
form as a PDF from EOS.
|
|
||||||
|
|
||||||
## Integration
|
|
||||||
|
|
||||||
The Akkudoktor EOS can be integrated into a wide variety of systems with a variety
|
|
||||||
of components.
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
However, the components are not integrated by the EOS itself, but must be intergrated by
|
|
||||||
the user using an integration solution and currently requires some effort and technical
|
|
||||||
know-how.
|
|
||||||
|
|
||||||
Any [integration](#integration-page) solution that can act as an intermediary between the
|
|
||||||
components and the REST API of EOS can be used. One possible solution that enables the
|
|
||||||
integration of components and EOS is Node-RED. Another solution could be Home Assistant
|
|
||||||
usings its built in features.
|
|
||||||
|
|
||||||
Access to the data and functions of the components can be done in a variety of ways.
|
|
||||||
Node-RED offers a large number of types of nodes that allow access via the protocols
|
|
||||||
commonly used in this area, such as Modbus or MQTT. Access to any existing databases,
|
|
||||||
such as InfluxDB or PostgreSQL, is also possible via nodes provided by Node-RED.
|
|
||||||
|
|
||||||
It becomes easier if a smart home solution like Homa Assistant, openHAB or ioBroker or
|
|
||||||
solutions such as evcc or openWB are already in use. In this case, these smart home
|
|
||||||
solutions already take over the technical integration and communication with the components
|
|
||||||
at a technical level and Node-RED offers nodes for accessing these solutions, so that the
|
|
||||||
corresponding sources can be easily integrated into a flow.
|
|
||||||
|
|
||||||
In Home Assistant you could use an automation to prepare the input payload for EOS and
|
|
||||||
then use the RESTful integration to call EOS. Based on this concept there is already a
|
|
||||||
home assistand add-on created by [Duetting](#duetting-solution).
|
|
||||||
|
|
||||||
The plan created by EOS must also be executed via the chosen integration solution,
|
|
||||||
with the respective devices receiving their instructions according to the plan.
|
|
||||||
|
|
||||||
## Limitations
|
|
||||||
|
|
||||||
The plan calculated by EOS is cost-optimized due to the genetic algorithm used, but not
|
|
||||||
necessarily cost-optimal, since genetic algorithms do not always find the global optimum,
|
|
||||||
but usually find good local optima very quickly in a large solution space.
|
|
||||||
|
|
||||||
## Links
|
|
||||||
|
|
||||||
- [German Video explaining the basic concept and installation process for the early version of EOS (YouTube)](https://www.youtube.com/live/ftQULW4-1ts?si=oDdBBifCpUmiCXaY)
|
|
||||||
- [German Forum of Akkudoktor EOS](https://akkudoktor.net/c/der-akkudoktor/eos)
|
|
||||||
- [Akkudoktor-EOS GitHub Repository](https://github.com/Akkudoktor-EOS/EOS)
|
|
||||||
- [Latest EOS Documentation](https://akkudoktor-eos.readthedocs.io/en/latest/)
|
|
@@ -5,9 +5,9 @@
|
|||||||
Measurements are utilized to refine predictions using real data from your system, thereby enhancing
|
Measurements are utilized to refine predictions using real data from your system, thereby enhancing
|
||||||
accuracy.
|
accuracy.
|
||||||
|
|
||||||
- Household Load Measurement
|
- **Household Load Measurement**
|
||||||
- Grid Export Measurement
|
- **Grid Export Measurement**
|
||||||
- Grid Import Measurement
|
- **Grid Import Measurement**
|
||||||
|
|
||||||
## Storing Measurements
|
## Storing Measurements
|
||||||
|
|
||||||
@@ -56,21 +56,21 @@ A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html)
|
|||||||
The EOS measurement store provides for storing meter readings of loads. There are currently five loads
|
The EOS measurement store provides for storing meter readings of loads. There are currently five loads
|
||||||
foreseen. The associated `measurement key`s are:
|
foreseen. The associated `measurement key`s are:
|
||||||
|
|
||||||
- `measurement_load0_mr`: Load0 meter reading [kWh]
|
- `load0_mr`: Load0 meter reading [kWh]
|
||||||
- `measurement_load1_mr`: Load1 meter reading [kWh]
|
- `load1_mr`: Load1 meter reading [kWh]
|
||||||
- `measurement_load2_mr`: Load2 meter reading [kWh]
|
- `load2_mr`: Load2 meter reading [kWh]
|
||||||
- `measurement_load3_mr`: Load3 meter reading [kWh]
|
- `load3_mr`: Load3 meter reading [kWh]
|
||||||
- `measurement_load4_mr`: Load4 meter reading [kWh]
|
- `load4_mr`: Load4 meter reading [kWh]
|
||||||
|
|
||||||
For ease of use, you can assign descriptive names to the `measurement key`s to represent your
|
For ease of use, you can assign descriptive names to the `measurement key`s to represent your
|
||||||
system's load sources. Use the following `configuration options` to set these names
|
system's load sources. Use the following `configuration options` to set these names
|
||||||
(e.g., 'Dish Washer', 'Heat Pump'):
|
(e.g., 'Dish Washer', 'Heat Pump'):
|
||||||
|
|
||||||
- `measurement_load0_name`: Name of the load0 source
|
- `load0_name`: Name of the load0 source
|
||||||
- `measurement_load1_name`: Name of the load1 source
|
- `load1_name`: Name of the load1 source
|
||||||
- `measurement_load2_name`: Name of the load2 source
|
- `load2_name`: Name of the load2 source
|
||||||
- `measurement_load3_name`: Name of the load3 source
|
- `load3_name`: Name of the load3 source
|
||||||
- `measurement_load4_name`: Name of the load4 source
|
- `load4_name`: Name of the load4 source
|
||||||
|
|
||||||
Load measurements can be stored for any datetime. The values between different meter readings are
|
Load measurements can be stored for any datetime. The values between different meter readings are
|
||||||
linearly approximated. Since optimization occurs on the hour, storing values between hours is
|
linearly approximated. Since optimization occurs on the hour, storing values between hours is
|
||||||
@@ -84,8 +84,8 @@ for specified intervals, usually one hour. This aggregated data can be used for
|
|||||||
The EOS measurement store also allows for the storage of meter readings for grid import and export.
|
The EOS measurement store also allows for the storage of meter readings for grid import and export.
|
||||||
The associated `measurement key`s are:
|
The associated `measurement key`s are:
|
||||||
|
|
||||||
- `measurement_grid_export_mr`: Export to grid meter reading [kWh]
|
- `grid_export_mr`: Export to grid meter reading [kWh]
|
||||||
- `measurement_grid_import_mr`: Import from grid meter reading [kWh]
|
- `grid_import_mr`: Import from grid meter reading [kWh]
|
||||||
|
|
||||||
:::{admonition} Todo
|
:::{admonition} Todo
|
||||||
:class: note
|
:class: note
|
||||||
|
@@ -2,199 +2,7 @@
|
|||||||
|
|
||||||
# Optimization
|
# Optimization
|
||||||
|
|
||||||
## Introduction
|
:::{admonition} Todo
|
||||||
|
:class: note
|
||||||
The `POST /optimize` API endpoint optimizes your energy management system based on various inputs
|
Describe optimization.
|
||||||
including electricity prices, battery storage capacity, PV forecast, and temperature data.
|
:::
|
||||||
|
|
||||||
## Input Payload
|
|
||||||
|
|
||||||
### Sample Request
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"ems": {
|
|
||||||
"preis_euro_pro_wh_akku": 0.0007,
|
|
||||||
"einspeiseverguetung_euro_pro_wh": 0.00007,
|
|
||||||
"gesamtlast": [500, 500, ..., 500, 500],
|
|
||||||
"pv_prognose_wh": [300, 0, 0, ..., 2160, 1840],
|
|
||||||
"strompreis_euro_pro_wh": [0.0003784, 0.0003868, ..., 0.00034102, 0.00033709]
|
|
||||||
},
|
|
||||||
"pv_akku": {
|
|
||||||
"capacity_wh": 12000,
|
|
||||||
"charging_efficiency": 0.92,
|
|
||||||
"discharging_efficiency": 0.92,
|
|
||||||
"max_charge_power_w": 5700,
|
|
||||||
"initial_soc_percentage": 66,
|
|
||||||
"min_soc_percentage": 5,
|
|
||||||
"max_soc_percentage": 100
|
|
||||||
},
|
|
||||||
"inverter": {
|
|
||||||
"max_power_wh": 15500
|
|
||||||
},
|
|
||||||
"eauto": {
|
|
||||||
"capacity_wh": 64000,
|
|
||||||
"charging_efficiency": 0.88,
|
|
||||||
"discharging_efficiency": 0.88,
|
|
||||||
"max_charge_power_w": 11040,
|
|
||||||
"initial_soc_percentage": 98,
|
|
||||||
"min_soc_percentage": 60,
|
|
||||||
"max_soc_percentage": 100
|
|
||||||
},
|
|
||||||
"temperature_forecast": [18.3, 18, ..., 20.16, 19.84],
|
|
||||||
"start_solution": null
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Input Parameters
|
|
||||||
|
|
||||||
### Energy Management System (EMS)
|
|
||||||
|
|
||||||
#### Battery Cost (`preis_euro_pro_wh_akku`)
|
|
||||||
|
|
||||||
- Unit: €/Wh
|
|
||||||
- Purpose: Represents the residual value of energy stored in the battery
|
|
||||||
- Impact: Lower values encourage battery depletion, higher values preserve charge at the end of the simulation.
|
|
||||||
|
|
||||||
#### Feed-in Tariff (`einspeiseverguetung_euro_pro_wh`)
|
|
||||||
|
|
||||||
- Unit: €/Wh
|
|
||||||
- Purpose: Compensation received for feeding excess energy back to the grid
|
|
||||||
|
|
||||||
#### Total Load Forecast (`gesamtlast`)
|
|
||||||
|
|
||||||
- Unit: W
|
|
||||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
|
||||||
- Format: Array of hourly values
|
|
||||||
- Note: Exclude optimizable loads (EV charging, battery charging, etc.)
|
|
||||||
|
|
||||||
##### Data Sources
|
|
||||||
|
|
||||||
1. Standard Load Profile: `GET /v1/prediction/list?key=load_mean` for a standard load profile based
|
|
||||||
on your yearly consumption.
|
|
||||||
2. Adjusted Load Profile: `GET /v1/prediction/list?key=load_mean_adjusted` for a combination of a
|
|
||||||
standard load profile based on your yearly consumption incl. data from last 48h.
|
|
||||||
|
|
||||||
#### PV Generation Forecast (`pv_prognose_wh`)
|
|
||||||
|
|
||||||
- Unit: W
|
|
||||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
|
||||||
- Format: Array of hourly values
|
|
||||||
- Data Source: `GET /v1/prediction/series?key=pvforecast_ac_power`
|
|
||||||
|
|
||||||
#### Electricity Price Forecast (`strompreis_euro_pro_wh`)
|
|
||||||
|
|
||||||
- Unit: €/Wh
|
|
||||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
|
||||||
- Format: Array of hourly values
|
|
||||||
- Data Source: `GET /v1/prediction/list?key=elecprice_marketprice_wh`
|
|
||||||
|
|
||||||
Verify prices against your local tariffs.
|
|
||||||
|
|
||||||
### Battery Storage System
|
|
||||||
|
|
||||||
#### Configuration
|
|
||||||
|
|
||||||
- `capacity_wh`: Total battery capacity in Wh
|
|
||||||
- `charging_efficiency`: Charging efficiency (0-1)
|
|
||||||
- `discharging_efficiency`: Discharging efficiency (0-1)
|
|
||||||
- `max_charge_power_w`: Maximum charging power in W
|
|
||||||
|
|
||||||
#### State of Charge (SoC)
|
|
||||||
|
|
||||||
- `initial_soc_percentage`: Current battery level (%)
|
|
||||||
- `min_soc_percentage`: Minimum allowed SoC (%)
|
|
||||||
- `max_soc_percentage`: Maximum allowed SoC (%)
|
|
||||||
|
|
||||||
### Inverter
|
|
||||||
|
|
||||||
- `max_power_wh`: Maximum inverter power in Wh
|
|
||||||
|
|
||||||
### Electric Vehicle (EV)
|
|
||||||
|
|
||||||
- `capacity_wh`: Battery capacity in Wh
|
|
||||||
- `charging_efficiency`: Charging efficiency (0-1)
|
|
||||||
- `discharging_efficiency`: Discharging efficiency (0-1)
|
|
||||||
- `max_charge_power_w`: Maximum charging power in W
|
|
||||||
- `initial_soc_percentage`: Current charge level (%)
|
|
||||||
- `min_soc_percentage`: Minimum allowed SoC (%)
|
|
||||||
- `max_soc_percentage`: Maximum allowed SoC (%)
|
|
||||||
|
|
||||||
### Temperature Forecast
|
|
||||||
|
|
||||||
- Unit: °C
|
|
||||||
- Time Range: 48 hours (00:00 today to 23:00 tomorrow)
|
|
||||||
- Format: Array of hourly values
|
|
||||||
- Data Source: `GET /v1/prediction/list?key=weather_temp_air`
|
|
||||||
|
|
||||||
## Output Format
|
|
||||||
|
|
||||||
### Sample Response
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"ac_charge": [0.625, 0, ..., 0.75, 0],
|
|
||||||
"dc_charge": [1, 1, ..., 1, 1],
|
|
||||||
"discharge_allowed": [0, 0, 1, ..., 0, 0],
|
|
||||||
"eautocharge_hours_float": [0.625, 0, ..., 0.75, 0],
|
|
||||||
"result": {
|
|
||||||
"Last_Wh_pro_Stunde": [...],
|
|
||||||
"EAuto_SoC_pro_Stunde": [...],
|
|
||||||
"Einnahmen_Euro_pro_Stunde": [...],
|
|
||||||
"Gesamt_Verluste": 1514.96,
|
|
||||||
"Gesamtbilanz_Euro": 2.51,
|
|
||||||
"Gesamteinnahmen_Euro": 2.88,
|
|
||||||
"Gesamtkosten_Euro": 5.39,
|
|
||||||
"akku_soc_pro_stunde": [...]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Output Parameters
|
|
||||||
|
|
||||||
#### Battery Control
|
|
||||||
|
|
||||||
- `ac_charge`: Grid charging schedule (0-1)
|
|
||||||
- `dc_charge`: DC charging schedule (0-1)
|
|
||||||
- `discharge_allowed`: Discharge permission (0 or 1)
|
|
||||||
|
|
||||||
0 (no charge)
|
|
||||||
1 (charge with full load)
|
|
||||||
|
|
||||||
`ac_charge` multiplied by the maximum charge power of the battery results in the planned charging power.
|
|
||||||
|
|
||||||
#### EV Charging
|
|
||||||
|
|
||||||
- `eautocharge_hours_float`: EV charging schedule (0-1)
|
|
||||||
|
|
||||||
#### Results
|
|
||||||
|
|
||||||
The `result` object contains detailed information about the optimization outcome.
|
|
||||||
The length of the array is between 25 and 48 and starts at the current hour and ends at 23:00 tomorrow.
|
|
||||||
|
|
||||||
- `Last_Wh_pro_Stunde`: Array of hourly load values in Wh
|
|
||||||
- Shows the total energy consumption per hour
|
|
||||||
- Includes household load, battery charging/discharging, and EV charging
|
|
||||||
|
|
||||||
- `EAuto_SoC_pro_Stunde`: Array of hourly EV state of charge values (%)
|
|
||||||
- Shows the projected EV battery level throughout the optimization period
|
|
||||||
|
|
||||||
- `Einnahmen_Euro_pro_Stunde`: Array of hourly revenue values in Euro
|
|
||||||
|
|
||||||
- `Gesamt_Verluste`: Total energy losses in Wh
|
|
||||||
|
|
||||||
- `Gesamtbilanz_Euro`: Overall financial balance in Euro
|
|
||||||
|
|
||||||
- `Gesamteinnahmen_Euro`: Total revenue in Euro
|
|
||||||
|
|
||||||
- `Gesamtkosten_Euro`: Total costs in Euro
|
|
||||||
|
|
||||||
- `akku_soc_pro_stunde`: Array of hourly battery state of charge values (%)
|
|
||||||
|
|
||||||
## Timeframe overview
|
|
||||||
|
|
||||||
```{figure} ../_static/optimization_timeframes.png
|
|
||||||
:alt: Timeframe Overview
|
|
||||||
|
|
||||||
Timeframe Overview
|
|
||||||
```
|
|
||||||
|
@@ -1,15 +1,14 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
(prediction-page)=
|
|
||||||
|
|
||||||
# Predictions
|
# Predictions
|
||||||
|
|
||||||
Predictions, along with simulations and measurements, form the foundation upon which energy
|
Predictions, along with simulations and measurements, form the foundation upon which energy
|
||||||
optimization is executed. In EOS, a standard set of predictions is managed, including:
|
optimization is executed. In EOS, a standard set of predictions is managed, including:
|
||||||
|
|
||||||
- Household Load Prediction
|
- **Household Load Prediction**
|
||||||
- Electricity Price Prediction
|
- **Electricity Price Prediction**
|
||||||
- PV Power Prediction
|
- **PV Power Prediction**
|
||||||
- Weather Prediction
|
- **Weather Prediction**
|
||||||
|
|
||||||
## Storing Predictions
|
## Storing Predictions
|
||||||
|
|
||||||
@@ -20,10 +19,14 @@ data is lost on re-start of the EOS REST server.
|
|||||||
## Prediction Providers
|
## Prediction Providers
|
||||||
|
|
||||||
Most predictions can be sourced from various providers. The specific provider to use is configured
|
Most predictions can be sourced from various providers. The specific provider to use is configured
|
||||||
in the EOS configuration. For example:
|
in the EOS configuration and can be set by prediction type. For example:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
weather_provider = "ClearOutside"
|
{
|
||||||
|
"weather": {
|
||||||
|
"provider": "ClearOutside"
|
||||||
|
}
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Some providers offer multiple prediction keys. For instance, a weather provider might provide data
|
Some providers offer multiple prediction keys. For instance, a weather provider might provide data
|
||||||
@@ -57,15 +60,13 @@ A dictionary with the following structure:
|
|||||||
#### 2. DateTimeDataFrame
|
#### 2. DateTimeDataFrame
|
||||||
|
|
||||||
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) dataframe with a
|
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) dataframe with a
|
||||||
`DatetimeIndex`. Use
|
`DatetimeIndex`. Use [pandas.DataFrame.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_json.html#pandas.DataFrame.to_json).
|
||||||
[pandas.DataFrame.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_json.html#pandas.DataFrame.to_json).
|
|
||||||
The column name of the data must be the same as the names of the `prediction key`s.
|
The column name of the data must be the same as the names of the `prediction key`s.
|
||||||
|
|
||||||
#### 3. DateTimeSeries
|
#### 3. DateTimeSeries
|
||||||
|
|
||||||
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) series with a
|
A JSON string created from a [pandas](https://pandas.pydata.org/docs/index.html) series with a
|
||||||
`DatetimeIndex`. Use
|
`DatetimeIndex`. Use [pandas.Series.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.Series.to_json.html#pandas.Series.to_json).
|
||||||
[pandas.Series.to_json(orient="index")](https://pandas.pydata.org/docs/reference/api/pandas.Series.to_json.html#pandas.Series.to_json).
|
|
||||||
|
|
||||||
## Adjusted Predictions
|
## Adjusted Predictions
|
||||||
|
|
||||||
@@ -74,7 +75,7 @@ predictions are adjusted by real data from your system's measurements if given t
|
|||||||
|
|
||||||
For example, the load prediction provider `LoadAkkudoktor` takes generic load data assembled by
|
For example, the load prediction provider `LoadAkkudoktor` takes generic load data assembled by
|
||||||
Akkudoktor.net, maps that to the yearly energy consumption given in the configuration option
|
Akkudoktor.net, maps that to the yearly energy consumption given in the configuration option
|
||||||
`loadakkudoktor_year_energy`, and finally adjusts the predicted load by the `measurement_loads`
|
`loadakkudoktor_year_energy`, and finally adjusts the predicted load by the `loads`
|
||||||
of your system.
|
of your system.
|
||||||
|
|
||||||
## Prediction Updates
|
## Prediction Updates
|
||||||
@@ -110,21 +111,23 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `elecprice_provider`: Electricity price provider id of provider to be used.
|
- `elecprice`: Electricity price configuration.
|
||||||
|
|
||||||
- `ElecPriceAkkudoktor`: Retrieves from Akkudoktor.net.
|
- `provider`: Electricity price provider id of provider to be used.
|
||||||
- `ElecPriceImport`: Imports from a file or JSON string.
|
|
||||||
|
|
||||||
- `elecprice_charges_kwh`: Electricity price charges (€/kWh).
|
- `ElecPriceAkkudoktor`: Retrieves from Akkudoktor.net.
|
||||||
- `elecpriceimport_file_path`: Path to the file to import electricity price forecast data from.
|
- `ElecPriceImport`: Imports from a file or JSON string.
|
||||||
- `elecpriceimport_json`: JSON string, dictionary of electricity price forecast value lists.
|
|
||||||
|
- `charges_kwh`: Electricity price charges (€/kWh).
|
||||||
|
- `provider_settings.import_file_path`: Path to the file to import electricity price forecast data from.
|
||||||
|
- `provider_settings.import_json`: JSON string, dictionary of electricity price forecast value lists.
|
||||||
|
|
||||||
### ElecPriceAkkudoktor Provider
|
### ElecPriceAkkudoktor Provider
|
||||||
|
|
||||||
The `ElecPriceAkkudoktor` provider retrieves electricity prices directly from **Akkudoktor.net**,
|
The `ElecPriceAkkudoktor` provider retrieves electricity prices directly from **Akkudoktor.net**,
|
||||||
which supplies price data for the next 24 hours. For periods beyond 24 hours, the provider generates
|
which supplies price data for the next 24 hours. For periods beyond 24 hours, the provider generates
|
||||||
prices by extrapolating historical price data combined with the most recent actual prices obtained
|
prices by extrapolating historical price data combined with the most recent actual prices obtained
|
||||||
from Akkudoktor.net. Electricity price charges given in the `elecprice_charges_kwh` configuration
|
from Akkudoktor.net. Electricity price charges given in the `charges_kwh` configuration
|
||||||
option are added.
|
option are added.
|
||||||
|
|
||||||
### ElecPriceImport Provider
|
### ElecPriceImport Provider
|
||||||
@@ -139,7 +142,7 @@ The prediction key for the electricity price forecast data is:
|
|||||||
|
|
||||||
The electricity proce forecast data must be provided in one of the formats described in
|
The electricity proce forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source must be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`elecpriceimport_file_path` or `elecpriceimport_json` configuration option.
|
`import_file_path` or `import_json` configuration option.
|
||||||
|
|
||||||
## Load Prediction
|
## Load Prediction
|
||||||
|
|
||||||
@@ -151,14 +154,16 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `load_provider`: Load provider id of provider to be used.
|
- `load`: Load configuration.
|
||||||
|
|
||||||
- `LoadAkkudoktor`: Retrieves from local database.
|
- `provider`: Load provider id of provider to be used.
|
||||||
- `LoadImport`: Imports from a file or JSON string.
|
|
||||||
|
|
||||||
- `loadakkudoktor_year_energy`: Yearly energy consumption (kWh).
|
- `LoadAkkudoktor`: Retrieves from local database.
|
||||||
- `loadimport_file_path`: Path to the file to import load forecast data from.
|
- `LoadImport`: Imports from a file or JSON string.
|
||||||
- `loadimport_json`: JSON string, dictionary of load forecast value lists.
|
|
||||||
|
- `provider_settings.loadakkudoktor_year_energy`: Yearly energy consumption (kWh).
|
||||||
|
- `provider_settings.loadimport_file_path`: Path to the file to import load forecast data from.
|
||||||
|
- `provider_settings.loadimport_json`: JSON string, dictionary of load forecast value lists.
|
||||||
|
|
||||||
### LoadAkkudoktor Provider
|
### LoadAkkudoktor Provider
|
||||||
|
|
||||||
@@ -191,111 +196,72 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `pvforecast_provider`: PVForecast provider id of provider to be used.
|
- `general`: General configuration.
|
||||||
|
|
||||||
- `PVForecastAkkudoktor`: Retrieves from Akkudoktor.net.
|
- `latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
||||||
- `PVForecastImport`: Imports from a file or JSON string.
|
- `longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
||||||
|
|
||||||
- `latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
- `pvforecast`: PV forecast configuration.
|
||||||
- `longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
|
||||||
- `pvforecast<0..5>_surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
|
||||||
- `pvforecast<0..5>_surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
|
||||||
Clockwise from north (north=0, east=90, south=180, west=270).
|
|
||||||
- `pvforecast<0..5>_userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
|
||||||
- `pvforecast<0..5>_peakpower`: Nominal power of PV system in kW.
|
|
||||||
- `pvforecast<0..5>_pvtechchoice`: PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
|
||||||
- `pvforecast<0..5>_mountingplace`: Type of mounting for PV system. Options are 'free' for free-standing
|
|
||||||
and 'building' for building-integrated.
|
|
||||||
- `pvforecast<0..5>_loss`: Sum of PV system losses in percent
|
|
||||||
- `pvforecast<0..5>_trackingtype`: Type of suntracking. 0=fixed,
|
|
||||||
1=single horizontal axis aligned north-south,
|
|
||||||
2=two-axis tracking,
|
|
||||||
3=vertical axis tracking,
|
|
||||||
4=single horizontal axis aligned east-west,
|
|
||||||
5=single inclined axis aligned north-south.
|
|
||||||
- `pvforecast<0..5>_optimal_surface_tilt`: Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
|
||||||
- `pvforecast<0..5>_optimalangles`: Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
|
||||||
- `pvforecast<0..5>_albedo`: Proportion of the light hitting the ground that it reflects back.
|
|
||||||
- `pvforecast<0..5>_module_model`: Model of the PV modules of this plane.
|
|
||||||
- `pvforecast<0..5>_inverter_model`: Model of the inverter of this plane.
|
|
||||||
- `pvforecast<0..5>_inverter_paco`: AC power rating of the inverter. [W]
|
|
||||||
- `pvforecast<0..5>_modules_per_string`: Number of the PV modules of the strings of this plane.
|
|
||||||
- `pvforecast<0..5>_strings_per_inverter`: Number of the strings of the inverter of this plane.
|
|
||||||
- `pvforecastimport_file_path`: Path to the file to import PV forecast data from.
|
|
||||||
- `pvforecastimport_json`: JSON string, dictionary of PV forecast value lists.
|
|
||||||
|
|
||||||
---
|
- `provider`: PVForecast provider id of provider to be used.
|
||||||
|
|
||||||
Some of the configuration options directly follow the
|
- `PVForecastAkkudoktor`: Retrieves from Akkudoktor.net.
|
||||||
[PVGIS](https://joint-research-centre.ec.europa.eu/photovoltaic-geographical-information-system-pvgis/getting-started-pvgis/pvgis-user-manual_en)
|
- `PVForecastImport`: Imports from a file or JSON string.
|
||||||
nomenclature.
|
|
||||||
|
- `planes[].surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
||||||
|
- `planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
||||||
|
- `planes[].userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
||||||
|
- `planes[].peakpower`: Nominal power of PV system in kW.
|
||||||
|
- `planes[].pvtechchoice`: PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'.
|
||||||
|
- `planes[].mountingplace`: Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.
|
||||||
|
- `planes[].loss`: Sum of PV system losses in percent
|
||||||
|
- `planes[].trackingtype`: Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.
|
||||||
|
- `planes[].optimal_surface_tilt`: Calculate the optimum tilt angle. Ignored for two-axis tracking.
|
||||||
|
- `planes[].optimalangles`: Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.
|
||||||
|
- `planes[].albedo`: Proportion of the light hitting the ground that it reflects back.
|
||||||
|
- `planes[].module_model`: Model of the PV modules of this plane.
|
||||||
|
- `planes[].inverter_model`: Model of the inverter of this plane.
|
||||||
|
- `planes[].inverter_paco`: AC power rating of the inverter. [W]
|
||||||
|
- `planes[].modules_per_string`: Number of the PV modules of the strings of this plane.
|
||||||
|
- `planes[].strings_per_inverter`: Number of the strings of the inverter of this plane.
|
||||||
|
- `provider_settings.import_file_path`: Path to the file to import PV forecast data from.
|
||||||
|
- `provider_settings.import_json`: JSON string, dictionary of PV forecast value lists.
|
||||||
|
|
||||||
|
------
|
||||||
|
|
||||||
|
Some of the planes configuration options directly follow the [PVGIS](https://joint-research-centre.ec.europa.eu/photovoltaic-geographical-information-system-pvgis/getting-started-pvgis/pvgis-user-manual_en) nomenclature.
|
||||||
|
|
||||||
Detailed definitions taken from **PVGIS**:
|
Detailed definitions taken from **PVGIS**:
|
||||||
|
|
||||||
- `pvforecast<0..5>_pvtechchoice`
|
- `pvtechchoice`
|
||||||
|
|
||||||
The performance of PV modules depends on the temperature and on the solar irradiance, but the exact
|
The performance of PV modules depends on the temperature and on the solar irradiance, but the exact dependence varies between different types of PV modules. At the moment we can estimate the losses due to temperature and irradiance effects for the following types of modules: crystalline silicon cells; thin film modules made from CIS or CIGS and thin film modules made from Cadmium Telluride (CdTe).
|
||||||
dependence varies between different types of PV modules. At the moment we can estimate the losses
|
|
||||||
due to temperature and irradiance effects for the following types of modules: crystalline silicon
|
|
||||||
cells; thin film modules made from CIS or CIGS and thin film modules made from Cadmium Telluride
|
|
||||||
(CdTe).
|
|
||||||
|
|
||||||
For other technologies (especially various amorphous technologies), this correction cannot be
|
For other technologies (especially various amorphous technologies), this correction cannot be calculated here. If you choose one of the first three options here the calculation of performance will take into account the temperature dependence of the performance of the chosen technology. If you choose the other option (other/unknown), the calculation will assume a loss of 8% of power due to temperature effects (a generic value which has found to be reasonable for temperate climates).
|
||||||
calculated here. If you choose one of the first three options here the calculation of performance
|
|
||||||
will take into account the temperature dependence of the performance of the chosen technology. If
|
|
||||||
you choose the other option (other/unknown), the calculation will assume a loss of 8% of power due
|
|
||||||
to temperature effects (a generic value which has found to be reasonable for temperate climates).
|
|
||||||
|
|
||||||
PV power output also depends on the spectrum of the solar radiation. PVGIS can calculate how the
|
PV power output also depends on the spectrum of the solar radiation. PVGIS can calculate how the variations of the spectrum of sunlight affects the overall energy production from a PV system. At the moment this calculation can be done for crystalline silicon and CdTe modules. Note that this calculation is not yet available when using the NSRDB solar radiation database.
|
||||||
variations of the spectrum of sunlight affects the overall energy production from a PV system. At
|
|
||||||
the moment this calculation can be done for crystalline silicon and CdTe modules. Note that this
|
|
||||||
calculation is not yet available when using the NSRDB solar radiation database.
|
|
||||||
|
|
||||||
- `pvforecast<0..5>_peakpower`
|
- `peakpower`
|
||||||
|
|
||||||
This is the power that the manufacturer declares that the PV array can produce under standard test
|
This is the power that the manufacturer declares that the PV array can produce under standard test conditions (STC), which are a constant 1000W of solar irradiation per square meter in the plane of the array, at an array temperature of 25°C. The peak power should be entered in kilowatt-peak (kWp). If you do not know the declared peak power of your modules but instead know the area of the modules and the declared conversion efficiency (in percent), you can calculate the peak power as power = area * efficiency / 100.
|
||||||
conditions (STC), which are a constant 1000W of solar irradiation per square meter in the plane of
|
|
||||||
the array, at an array temperature of 25°C. The peak power should be entered in kilowatt-peak (kWp).
|
|
||||||
If you do not know the declared peak power of your modules but instead know the area of the modules
|
|
||||||
and the declared conversion efficiency (in percent), you can calculate the peak power as
|
|
||||||
power = area \* efficiency / 100.
|
|
||||||
|
|
||||||
Bifacial modules: PVGIS doesn't make specific calculations for bifacial modules at present. Users
|
Bifacial modules: PVGIS doesn't make specific calculations for bifacial modules at present. Users who wish to explore the possible benefits of this technology can input the power value for Bifacial Nameplate Irradiance. This can also be can also be estimated from the front side peak power P_STC value and the bifaciality factor, φ (if reported in the module data sheet) as: P_BNPI = P_STC * (1 + φ * 0.135). NB this bifacial approach is not appropriate for BAPV or BIPV installations or for modules mounting on a N-S axis i.e. facing E-W.
|
||||||
who wish to explore the possible benefits of this technology can input the power value for Bifacial
|
|
||||||
Nameplate Irradiance. This can also be can also be estimated from the front side peak power P_STC
|
|
||||||
value and the bifaciality factor, φ (if reported in the module data sheet) as:
|
|
||||||
P_BNPI = P_STC \* (1 + φ \* 0.135). NB this bifacial approach is not appropriate for BAPV or BIPV
|
|
||||||
installations or for modules mounting on a N-S axis i.e. facing E-W.
|
|
||||||
|
|
||||||
- `pvforecast<0..5>_loss`
|
- `loss`
|
||||||
|
|
||||||
The estimated system losses are all the losses in the system, which cause the power actually
|
The estimated system losses are all the losses in the system, which cause the power actually delivered to the electricity grid to be lower than the power produced by the PV modules. There are several causes for this loss, such as losses in cables, power inverters, dirt (sometimes snow) on the modules and so on. Over the years the modules also tend to lose a bit of their power, so the average yearly output over the lifetime of the system will be a few percent lower than the output in the first years.
|
||||||
delivered to the electricity grid to be lower than the power produced by the PV modules. There are
|
|
||||||
several causes for this loss, such as losses in cables, power inverters, dirt (sometimes snow) on
|
|
||||||
the modules and so on. Over the years the modules also tend to lose a bit of their power, so the
|
|
||||||
average yearly output over the lifetime of the system will be a few percent lower than the output
|
|
||||||
in the first years.
|
|
||||||
|
|
||||||
We have given a default value of 14% for the overall losses. If you have a good idea that your value
|
We have given a default value of 14% for the overall losses. If you have a good idea that your value will be different (maybe due to a really high-efficiency inverter) you may reduce this value a little.
|
||||||
will be different (maybe due to a really high-efficiency inverter) you may reduce this value a little.
|
|
||||||
|
|
||||||
- `pvforecast<0..5>_mountingplace`
|
- `mountingplace`
|
||||||
|
|
||||||
For fixed (non-tracking) systems, the way the modules are mounted will have an influence on the
|
For fixed (non-tracking) systems, the way the modules are mounted will have an influence on the temperature of the module, which in turn affects the efficiency. Experiments have shown that if the movement of air behind the modules is restricted, the modules can get considerably hotter (up to 15°C at 1000W/m2 of sunlight).
|
||||||
temperature of the module, which in turn affects the efficiency. Experiments have shown that if the
|
|
||||||
movement of air behind the modules is restricted, the modules can get considerably hotter
|
|
||||||
(up to 15°C at 1000W/m2 of sunlight).
|
|
||||||
|
|
||||||
In PVGIS there are two possibilities: free-standing, meaning that the modules are mounted on a rack
|
In PVGIS there are two possibilities: free-standing, meaning that the modules are mounted on a rack with air flowing freely behind the modules; and building- integrated, which means that the modules are completely built into the structure of the wall or roof of a building, with no air movement behind the modules.
|
||||||
with air flowing freely behind the modules; and building- integrated, which means that the modules
|
|
||||||
are completely built into the structure of the wall or roof of a building, with no air movement
|
|
||||||
behind the modules.
|
|
||||||
|
|
||||||
Some types of mounting are in between these two extremes, for instance if the modules are mounted on
|
Some types of mounting are in between these two extremes, for instance if the modules are mounted on a roof with curved roof tiles, allowing air to move behind the modules. In such cases, the performance will be somewhere between the results of the two calculations that are possible here.
|
||||||
a roof with curved roof tiles, allowing air to move behind the modules. In such cases, the
|
|
||||||
performance will be somewhere between the results of the two calculations that are possible here.
|
|
||||||
|
|
||||||
- `pvforecast<0..5>_userhorizon`
|
- `userhorizon`
|
||||||
|
|
||||||
Elevation of horizon in degrees, at equally spaced azimuth clockwise from north. In the user horizon
|
Elevation of horizon in degrees, at equally spaced azimuth clockwise from north. In the user horizon
|
||||||
data each number represents the horizon height in degrees in a certain compass direction around the
|
data each number represents the horizon height in degrees in a certain compass direction around the
|
||||||
@@ -305,71 +271,82 @@ represent equal angular distance around the horizon. For instance, if you have 3
|
|||||||
point is due north, the next is 10 degrees east of north, and so on, until the last point, 10
|
point is due north, the next is 10 degrees east of north, and so on, until the last point, 10
|
||||||
degrees west of north.
|
degrees west of north.
|
||||||
|
|
||||||
---
|
------
|
||||||
|
|
||||||
Most of the configuration options are in line with the
|
Most of the planes configuration options are in line with the [PVLib](https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html) definition for PVGIS data.
|
||||||
[PVLib](https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html) definition for PVGIS data.
|
|
||||||
|
|
||||||
Detailed definitions from **PVLib** for PVGIS data.
|
Detailed definitions from **PVLib** for PVGIS data.
|
||||||
|
|
||||||
- `pvforecast<0..5>_surface_tilt`:
|
- `surface_tilt`:
|
||||||
|
|
||||||
Tilt angle from horizontal plane.
|
Tilt angle from horizontal plane.
|
||||||
|
|
||||||
- `pvforecast<0..5>_surface_azimuth`
|
- `surface_azimuth`
|
||||||
|
|
||||||
Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180,
|
Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180,
|
||||||
west=270). This is offset 180 degrees from the convention used by PVGIS.
|
west=270). This is offset 180 degrees from the convention used by PVGIS.
|
||||||
|
|
||||||
---
|
------
|
||||||
|
|
||||||
### PVForecastAkkudoktor Provider
|
### PVForecastAkkudoktor Provider
|
||||||
|
|
||||||
The `PVForecastAkkudoktor` provider retrieves the PV power forecast data directly from
|
The `PVForecastAkkudoktor` provider retrieves the PV power forecast data directly from
|
||||||
**Akkudoktor.net**.
|
**Akkudoktor.net**.
|
||||||
|
|
||||||
The following general configuration options of the PV system must be set:
|
The following prediction configuration options of the PV system must be set:
|
||||||
|
|
||||||
- `latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
- `general.latitude`: Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)"
|
||||||
- `longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
- `general.longitude`: Longitude in decimal degrees, within -180 to 180 (°)
|
||||||
|
|
||||||
For each plane `<0..5>` of the PV system the following configuration options must be set:
|
For each plane of the PV system the following configuration options must be set:
|
||||||
|
|
||||||
- `pvforecast<0..5>_surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
- `pvforecast.planes[].surface_tilt`: Tilt angle from horizontal plane. Ignored for two-axis tracking.
|
||||||
- `pvforecast<0..5>_surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane.
|
- `pvforecast.planes[].surface_azimuth`: Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).
|
||||||
Clockwise from north (north=0, east=90, south=180, west=270).
|
- `pvforecast.planes[].userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
||||||
- `pvforecast<0..5>_userhorizon`: Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.
|
- `pvforecast.planes[].inverter_paco`: AC power rating of the inverter. [W]
|
||||||
- `pvforecast<0..5>_inverter_paco`: AC power rating of the inverter. [W]
|
- `pvforecast.planes[].peakpower`: Nominal power of PV system in kW.
|
||||||
- `pvforecast<0..5>_peakpower`: Nominal power of PV system in kW.
|
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```Python
|
```Python
|
||||||
{
|
{
|
||||||
"latitude": 50.1234,
|
"general": {
|
||||||
"longitude": 9.7654,
|
"latitude": 50.1234,
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"longitude": 9.7654,
|
||||||
"pvforecast0_peakpower": 5.0,
|
},
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"pvforecast": {
|
||||||
"pvforecast0_surface_tilt": 7,
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
"planes": [
|
||||||
"pvforecast0_inverter_paco": 10000,
|
{
|
||||||
"pvforecast1_peakpower": 4.8,
|
"peakpower": 5.0,
|
||||||
"pvforecast1_surface_azimuth": -90,
|
"surface_azimuth": -10,
|
||||||
"pvforecast1_surface_tilt": 7,
|
"surface_tilt": 7,
|
||||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
"userhorizon": [20, 27, 22, 20],
|
||||||
"pvforecast1_inverter_paco": 10000,
|
"inverter_paco": 10000,
|
||||||
"pvforecast2_peakpower": 1.4,
|
},
|
||||||
"pvforecast2_surface_azimuth": -40,
|
{
|
||||||
"pvforecast2_surface_tilt": 60,
|
"peakpower": 4.8,
|
||||||
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
"surface_azimuth": -90,
|
||||||
"pvforecast2_inverter_paco": 2000,
|
"surface_tilt": 7,
|
||||||
"pvforecast3_peakpower": 1.6,
|
"userhorizon": [30, 30, 30, 50],
|
||||||
"pvforecast3_surface_azimuth": 5,
|
"inverter_paco": 10000,
|
||||||
"pvforecast3_surface_tilt": 45,
|
},
|
||||||
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
{
|
||||||
"pvforecast3_inverter_paco": 1400,
|
"peakpower": 1.4,
|
||||||
"pvforecast4_peakpower": None,
|
"surface_azimuth": -40,
|
||||||
|
"surface_tilt": 60,
|
||||||
|
"userhorizon": [60, 30, 0, 30],
|
||||||
|
"inverter_paco": 2000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"peakpower": 1.6,
|
||||||
|
"surface_azimuth": 5,
|
||||||
|
"surface_tilt": 45,
|
||||||
|
"userhorizon": [45, 25, 30, 60],
|
||||||
|
"inverter_paco": 1400,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -386,7 +363,7 @@ The prediction keys for the PV forecast data are:
|
|||||||
|
|
||||||
The PV forecast data must be provided in one of the formats described in
|
The PV forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source must be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`pvforecastimport_file_path` or `pvforecastimport_json` configuration option.
|
`import_file_path` or `import_json` configuration option.
|
||||||
|
|
||||||
## Weather Prediction
|
## Weather Prediction
|
||||||
|
|
||||||
@@ -417,14 +394,16 @@ Prediction keys:
|
|||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
- `weather_provider`: Load provider id of provider to be used.
|
- `weather`: General weather configuration.
|
||||||
|
|
||||||
- `BrightSky`: Retrieves from [BrightSky](https://api.brightsky.dev).
|
- `provider`: Load provider id of provider to be used.
|
||||||
- `ClearOutside`: Retrieves from [ClearOutside](https://clearoutside.com/forecast).
|
|
||||||
- `LoadImport`: Imports from a file or JSON string.
|
|
||||||
|
|
||||||
- `weatherimport_file_path`: Path to the file to import weatherforecast data from.
|
- `BrightSky`: Retrieves from https://api.brightsky.dev.
|
||||||
- `weatherimport_json`: JSON string, dictionary of weather forecast value lists.
|
- `ClearOutside`: Retrieves from https://clearoutside.com/forecast.
|
||||||
|
- `LoadImport`: Imports from a file or JSON string.
|
||||||
|
|
||||||
|
- `provider_settings.import_file_path`: Path to the file to import weatherforecast data from.
|
||||||
|
- `provider_settings.import_json`: JSON string, dictionary of weather forecast value lists.
|
||||||
|
|
||||||
### BrightSky Provider
|
### BrightSky Provider
|
||||||
|
|
||||||
@@ -508,4 +487,4 @@ The prediction keys for the PV forecast data are:
|
|||||||
|
|
||||||
The PV forecast data must be provided in one of the formats described in
|
The PV forecast data must be provided in one of the formats described in
|
||||||
<project:#prediction-import-providers>. The data source must be given in the
|
<project:#prediction-import-providers>. The data source must be given in the
|
||||||
`weatherimport_file_path` or `pvforecastimport_json` configuration option.
|
`import_file_path` or `import_json` configuration option.
|
||||||
|
@@ -99,6 +99,7 @@ html_theme_options = {
|
|||||||
"logo_only": False,
|
"logo_only": False,
|
||||||
"titles_only": True,
|
"titles_only": True,
|
||||||
}
|
}
|
||||||
|
html_css_files = ["eos.css"]
|
||||||
|
|
||||||
# -- Options for autodoc -------------------------------------------------
|
# -- Options for autodoc -------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
|
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
|
||||||
|
@@ -19,7 +19,6 @@ Install the dependencies in a virtual environment:
|
|||||||
|
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.venv\Scripts\pip install -r requirements.txt
|
.venv\Scripts\pip install -r requirements.txt
|
||||||
.venv\Scripts\pip install -e .
|
|
||||||
|
|
||||||
.. tab:: Linux
|
.. tab:: Linux
|
||||||
|
|
||||||
@@ -27,7 +26,6 @@ Install the dependencies in a virtual environment:
|
|||||||
|
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.venv/bin/pip install -r requirements.txt
|
.venv/bin/pip install -r requirements.txt
|
||||||
.venv/bin/pip install -e .
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -75,53 +73,37 @@ This project uses the `EOS.config.json` file to manage configuration settings.
|
|||||||
|
|
||||||
### Default Configuration
|
### Default Configuration
|
||||||
|
|
||||||
A default configuration file `default.config.json` is provided. This file contains all the necessary
|
A default configuration file `default.config.json` is provided. This file contains all the necessary configuration keys with their default values.
|
||||||
configuration keys with their default values.
|
|
||||||
|
|
||||||
### Custom Configuration
|
### Custom Configuration
|
||||||
|
|
||||||
Users can specify a custom configuration directory by setting the environment variable `EOS_DIR`.
|
Users can specify a custom configuration directory by setting the environment variable `EOS_DIR`.
|
||||||
|
|
||||||
- If the directory specified by `EOS_DIR` contains an existing `EOS.config.json` file, the
|
- If the directory specified by `EOS_DIR` contains an existing `EOS.config.json` file, the application will use this configuration file.
|
||||||
application will use this configuration file.
|
- If the `EOS.config.json` file does not exist in the specified directory, the `default.config.json` file will be copied to the directory as `EOS.config.json`.
|
||||||
- If the `EOS.config.json` file does not exist in the specified directory, the `default.config.json`
|
|
||||||
file will be copied to the directory as `EOS.config.json`.
|
|
||||||
|
|
||||||
### Configuration Updates
|
### Configuration Updates
|
||||||
|
|
||||||
If the configuration keys in the `EOS.config.json` file are missing or different from those in
|
If the configuration keys in the `EOS.config.json` file are missing or different from those in `default.config.json`, they will be automatically updated to match the default settings, ensuring that all required keys are present.
|
||||||
`default.config.json`, they will be automatically updated to match the default settings, ensuring
|
|
||||||
that all required keys are present.
|
|
||||||
|
|
||||||
## Classes and Functionalities
|
## Classes and Functionalities
|
||||||
|
|
||||||
This project uses various classes to simulate and optimize the components of an energy system. Each
|
This project uses various classes to simulate and optimize the components of an energy system. Each class represents a specific aspect of the system, as described below:
|
||||||
class represents a specific aspect of the system, as described below:
|
|
||||||
|
|
||||||
- `Battery`: Simulates a battery storage system, including capacity, state of charge, and now
|
- `Battery`: Simulates a battery storage system, including capacity, state of charge, and now charge and discharge losses.
|
||||||
charge and discharge losses.
|
|
||||||
|
|
||||||
- `PVForecast`: Provides forecast data for photovoltaic generation, based on weather data and
|
- `PVForecast`: Provides forecast data for photovoltaic generation, based on weather data and historical generation data.
|
||||||
historical generation data.
|
|
||||||
|
|
||||||
- `Load`: Models the load requirements of a household or business, enabling the prediction of future
|
- `Load`: Models the load requirements of a household or business, enabling the prediction of future energy demand.
|
||||||
energy demand.
|
|
||||||
|
|
||||||
- `Heatpump`: Simulates a heat pump, including its energy consumption and efficiency under various
|
- `Heatpump`: Simulates a heat pump, including its energy consumption and efficiency under various operating conditions.
|
||||||
operating conditions.
|
|
||||||
|
|
||||||
- `Strompreis`: Provides information on electricity prices, enabling optimization of energy
|
- `Strompreis`: Provides information on electricity prices, enabling optimization of energy consumption and generation based on tariff information.
|
||||||
consumption and generation based on tariff information.
|
|
||||||
|
|
||||||
- `EMS`: The Energy Management System (EMS) coordinates the interaction between the various
|
- `EMS`: The Energy Management System (EMS) coordinates the interaction between the various components, performs optimization, and simulates the operation of the entire energy system.
|
||||||
components, performs optimization, and simulates the operation of the entire energy system.
|
|
||||||
|
|
||||||
These classes work together to enable a detailed simulation and optimization of the energy system.
|
These classes work together to enable a detailed simulation and optimization of the energy system. For each class, specific parameters and settings can be adjusted to test different scenarios and strategies.
|
||||||
For each class, specific parameters and settings can be adjusted to test different scenarios and
|
|
||||||
strategies.
|
|
||||||
|
|
||||||
### Customization and Extension
|
### Customization and Extension
|
||||||
|
|
||||||
Each class is designed to be easily customized and extended to integrate additional functions or
|
Each class is designed to be easily customized and extended to integrate additional functions or improvements. For example, new methods can be added for more accurate modeling of PV system or battery behavior. Developers are invited to modify and extend the system according to their needs.
|
||||||
improvements. For example, new methods can be added for more accurate modeling of PV system or
|
|
||||||
battery behavior. Developers are invited to modify and extend the system according to their needs.
|
|
||||||
|
@@ -8,32 +8,12 @@
|
|||||||
|
|
||||||
```{toctree}
|
```{toctree}
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
:caption: Overview
|
:caption: 'Contents:'
|
||||||
|
|
||||||
akkudoktoreos/introduction.md
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
```{toctree}
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: Tutorials
|
|
||||||
|
|
||||||
|
welcome.md
|
||||||
|
akkudoktoreos/about.md
|
||||||
develop/getting_started.md
|
develop/getting_started.md
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
```{toctree}
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: How-To Guides
|
|
||||||
|
|
||||||
develop/CONTRIBUTING.md
|
develop/CONTRIBUTING.md
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
```{toctree}
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: Reference
|
|
||||||
|
|
||||||
akkudoktoreos/architecture.md
|
akkudoktoreos/architecture.md
|
||||||
akkudoktoreos/configuration.md
|
akkudoktoreos/configuration.md
|
||||||
akkudoktoreos/optimization.md
|
akkudoktoreos/optimization.md
|
||||||
@@ -42,10 +22,9 @@ akkudoktoreos/measurement.md
|
|||||||
akkudoktoreos/integration.md
|
akkudoktoreos/integration.md
|
||||||
akkudoktoreos/serverapi.md
|
akkudoktoreos/serverapi.md
|
||||||
akkudoktoreos/api.rst
|
akkudoktoreos/api.rst
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Indices and tables
|
# Indices and tables
|
||||||
|
|
||||||
- {ref}`genindex`
|
- {ref}`genindex`
|
||||||
- {ref}`modindex`
|
- {ref}`modindex`
|
||||||
|
@@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"plugins": {
|
|
||||||
"md007": {
|
|
||||||
"enabled": true,
|
|
||||||
"code_block_line_length" : 160
|
|
||||||
},
|
|
||||||
"md013": {
|
|
||||||
"enabled": true,
|
|
||||||
"line_length" : 120
|
|
||||||
},
|
|
||||||
"md041": {
|
|
||||||
"enabled": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"extensions": {
|
|
||||||
"front-matter" : {
|
|
||||||
"enabled" : true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,12 +1,12 @@
|
|||||||
% SPDX-License-Identifier: Apache-2.0
|
% SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
# Welcome to the EOS documentation
|
# Welcome to the EOS documentation!
|
||||||
|
|
||||||
This documentation is continuously written. It is edited via text files in the
|
This documentation is continuously written. It is edited via text files in the
|
||||||
[Markdown/ Markedly Structured Text](https://myst-parser.readthedocs.io/en/latest/index.html)
|
[Markdown/ Markedly Structured Text](https://myst-parser.readthedocs.io/en/latest/index.html)
|
||||||
markup language and then compiled into a static website/ offline document using the open source tool
|
markup language and then compiled into a static website/ offline document using the open source tool
|
||||||
[Sphinx](https://www.sphinx-doc.org) and is available on
|
[Sphinx](https://www.sphinx-doc.org) and will someday land on
|
||||||
[Read the Docs](https://akkudoktor-eos.readthedocs.io/en/latest/).
|
[Read the Docs](https://akkudoktoreos.readthedocs.io/en/latest/index.html).
|
||||||
|
|
||||||
You can contribute to EOS's documentation by opening
|
You can contribute to EOS's documentation by opening
|
||||||
[GitHub issues](https://github.com/Akkudoktor-EOS/EOS/issues)
|
[GitHub issues](https://github.com/Akkudoktor-EOS/EOS/issues)
|
||||||
|
12951
openapi.json
12951
openapi.json
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,7 @@ authors = [
|
|||||||
description = "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period."
|
description = "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
license = {file = "LICENSE"}
|
license = {file = "LICENSE"}
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.10"
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Development Status :: 3 - Alpha",
|
"Development Status :: 3 - Alpha",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
|
@@ -1,14 +1,14 @@
|
|||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
gitpython==3.1.44
|
gitpython==3.1.44
|
||||||
linkify-it-py==2.0.3
|
linkify-it-py==2.0.3
|
||||||
myst-parser==4.0.1
|
myst-parser==4.0.0
|
||||||
sphinx==8.2.3
|
sphinx==8.1.3
|
||||||
sphinx_rtd_theme==3.0.2
|
sphinx_rtd_theme==3.0.2
|
||||||
sphinx-tabs==3.4.7
|
sphinx-tabs==3.4.7
|
||||||
pytest==8.3.5
|
pytest==8.3.4
|
||||||
pytest-cov==6.0.0
|
pytest-cov==6.0.0
|
||||||
pytest-xprocess==1.0.2
|
pytest-xprocess==1.0.2
|
||||||
pre-commit
|
pre-commit
|
||||||
mypy==1.15.0
|
mypy==1.13.0
|
||||||
types-requests==2.32.0.20250306
|
types-requests==2.32.0.20241016
|
||||||
pandas-stubs==2.2.3.250308
|
pandas-stubs==2.2.3.241126
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
numpy==2.2.4
|
numpy==2.2.2
|
||||||
numpydantic==1.6.8
|
numpydantic==1.6.7
|
||||||
matplotlib==3.10.1
|
matplotlib==3.10.0
|
||||||
fastapi[standard]==0.115.11
|
fastapi[standard]==0.115.7
|
||||||
python-fasthtml==0.12.4
|
python-fasthtml==0.12.0
|
||||||
uvicorn==0.34.0
|
uvicorn==0.34.0
|
||||||
scikit-learn==1.6.1
|
scikit-learn==1.6.1
|
||||||
timezonefinder==6.5.8
|
timezonefinder==6.5.8
|
||||||
@@ -10,7 +10,8 @@ deap==1.4.2
|
|||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
pandas==2.2.3
|
pandas==2.2.3
|
||||||
pendulum==3.0.0
|
pendulum==3.0.0
|
||||||
platformdirs==4.3.7
|
platformdirs==4.3.6
|
||||||
pvlib==0.12.0
|
pvlib==0.11.2
|
||||||
pydantic==2.10.6
|
pydantic==2.10.6
|
||||||
statsmodels==0.14.4
|
statsmodels==0.14.4
|
||||||
|
pydantic-settings==2.7.0
|
||||||
|
@@ -2,132 +2,279 @@
|
|||||||
"""Utility functions for Configuration specification generation."""
|
"""Utility functions for Configuration specification generation."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
from akkudoktoreos.config.config import get_config
|
from pydantic.fields import ComputedFieldInfo, FieldInfo
|
||||||
|
from pydantic_core import PydanticUndefined
|
||||||
|
|
||||||
|
from akkudoktoreos.config.config import ConfigEOS, GeneralSettings, get_config
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
from akkudoktoreos.utils.docs import get_model_structure_from_examples
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
config_eos = get_config()
|
|
||||||
|
|
||||||
# Fixed set of prefixes to filter configuration values and their respective titles
|
documented_types: set[PydanticBaseModel] = set()
|
||||||
CONFIG_PREFIXES = {
|
undocumented_types: dict[PydanticBaseModel, tuple[str, list[str]]] = dict()
|
||||||
"battery": "Battery Device Simulation Configuration",
|
|
||||||
"bev": "Battery Electric Vehicle Device Simulation Configuration",
|
|
||||||
"dishwasher": "Dishwasher Device Simulation Configuration",
|
|
||||||
"inverter": "Inverter Device Simulation Configuration",
|
|
||||||
"measurement": "Measurement Configuration",
|
|
||||||
"optimization": "General Optimization Configuration",
|
|
||||||
"server": "Server Configuration",
|
|
||||||
"elecprice": "Electricity Price Prediction Configuration",
|
|
||||||
"load": "Load Prediction Configuration",
|
|
||||||
"logging": "Logging Configuration",
|
|
||||||
"prediction": "General Prediction Configuration",
|
|
||||||
"pvforecast": "PV Forecast Configuration",
|
|
||||||
"weather": "Weather Forecast Configuration",
|
|
||||||
}
|
|
||||||
|
|
||||||
# Static set of configuration names to include in a separate table
|
global_config_dict: dict[str, Any] = dict()
|
||||||
GENERAL_CONFIGS = [
|
|
||||||
"config_default_file_path",
|
|
||||||
"config_file_path",
|
|
||||||
"config_folder_path",
|
|
||||||
"config_keys",
|
|
||||||
"config_keys_read_only",
|
|
||||||
"data_cache_path",
|
|
||||||
"data_cache_subpath",
|
|
||||||
"data_folder_path",
|
|
||||||
"data_output_path",
|
|
||||||
"data_output_subpath",
|
|
||||||
"latitude",
|
|
||||||
"longitude",
|
|
||||||
"package_root_path",
|
|
||||||
"timezone",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def generate_config_table_md(configs, title):
|
def get_title(config: PydanticBaseModel) -> str:
|
||||||
|
if config.__doc__ is None:
|
||||||
|
raise NameError(f"Missing docstring: {config}")
|
||||||
|
return config.__doc__.strip().splitlines()[0].strip(".")
|
||||||
|
|
||||||
|
|
||||||
|
def get_body(config: PydanticBaseModel) -> str:
|
||||||
|
if config.__doc__ is None:
|
||||||
|
raise NameError(f"Missing docstring: {config}")
|
||||||
|
return textwrap.dedent("\n".join(config.__doc__.strip().splitlines()[1:])).strip()
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_nested_types(field_type: Any, parent_types: list[str]) -> list[tuple[Any, list[str]]]:
|
||||||
|
resolved_types: list[tuple[type, list[str]]] = []
|
||||||
|
|
||||||
|
origin = getattr(field_type, "__origin__", field_type)
|
||||||
|
if origin is Union:
|
||||||
|
for arg in getattr(field_type, "__args__", []):
|
||||||
|
resolved_types.extend(resolve_nested_types(arg, parent_types))
|
||||||
|
elif origin is list:
|
||||||
|
for arg in getattr(field_type, "__args__", []):
|
||||||
|
resolved_types.extend(resolve_nested_types(arg, parent_types + ["list"]))
|
||||||
|
else:
|
||||||
|
resolved_types.append((field_type, parent_types))
|
||||||
|
|
||||||
|
return resolved_types
|
||||||
|
|
||||||
|
|
||||||
|
def create_model_from_examples(
|
||||||
|
model_class: PydanticBaseModel, multiple: bool
|
||||||
|
) -> list[PydanticBaseModel]:
|
||||||
|
"""Create a model instance with default or example values, respecting constraints."""
|
||||||
|
return [
|
||||||
|
model_class(**data) for data in get_model_structure_from_examples(model_class, multiple)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def build_nested_structure(keys: list[str], value: Any) -> Any:
|
||||||
|
if not keys:
|
||||||
|
return value
|
||||||
|
|
||||||
|
current_key = keys[0]
|
||||||
|
if current_key == "list":
|
||||||
|
return [build_nested_structure(keys[1:], value)]
|
||||||
|
else:
|
||||||
|
return {current_key: build_nested_structure(keys[1:], value)}
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_value(field_info: Union[FieldInfo, ComputedFieldInfo], regular_field: bool) -> Any:
|
||||||
|
default_value = ""
|
||||||
|
if regular_field:
|
||||||
|
if (val := field_info.default) is not PydanticUndefined:
|
||||||
|
default_value = val
|
||||||
|
else:
|
||||||
|
default_value = "required"
|
||||||
|
else:
|
||||||
|
default_value = "N/A"
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
|
||||||
|
def get_type_name(field_type: type) -> str:
|
||||||
|
type_name = str(field_type).replace("typing.", "")
|
||||||
|
if type_name.startswith("<class"):
|
||||||
|
type_name = field_type.__name__
|
||||||
|
return type_name
|
||||||
|
|
||||||
|
|
||||||
|
def generate_config_table_md(
|
||||||
|
config: PydanticBaseModel,
|
||||||
|
toplevel_keys: list[str],
|
||||||
|
prefix: str,
|
||||||
|
toplevel: bool = False,
|
||||||
|
extra_config: bool = False,
|
||||||
|
) -> str:
|
||||||
"""Generate a markdown table for given configurations.
|
"""Generate a markdown table for given configurations.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
configs (dict): Configuration values with keys and their descriptions.
|
config (PydanticBaseModel): PydanticBaseModel configuration definition.
|
||||||
title (str): Title for the table.
|
prefix (str): Prefix for table entries.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The markdown table as a string.
|
str: The markdown table as a string.
|
||||||
"""
|
"""
|
||||||
if not configs:
|
table = ""
|
||||||
return ""
|
if toplevel:
|
||||||
|
title = get_title(config)
|
||||||
|
|
||||||
|
heading_level = "###" if extra_config else "##"
|
||||||
|
env_header = ""
|
||||||
|
env_header_underline = ""
|
||||||
|
env_width = ""
|
||||||
|
if not extra_config:
|
||||||
|
env_header = "| Environment Variable "
|
||||||
|
env_header_underline = "| -------------------- "
|
||||||
|
env_width = "20 "
|
||||||
|
|
||||||
|
table += f"{heading_level} {title}\n\n"
|
||||||
|
|
||||||
|
body = get_body(config)
|
||||||
|
if body:
|
||||||
|
table += body
|
||||||
|
table += "\n\n"
|
||||||
|
|
||||||
|
table += (
|
||||||
|
":::{table} "
|
||||||
|
+ f"{'::'.join(toplevel_keys)}\n:widths: 10 {env_width}10 5 5 30\n:align: left\n\n"
|
||||||
|
)
|
||||||
|
table += f"| Name {env_header}| Type | Read-Only | Default | Description |\n"
|
||||||
|
table += f"| ---- {env_header_underline}| ---- | --------- | ------- | ----------- |\n"
|
||||||
|
|
||||||
|
for field_name, field_info in list(config.model_fields.items()) + list(
|
||||||
|
config.model_computed_fields.items()
|
||||||
|
):
|
||||||
|
regular_field = isinstance(field_info, FieldInfo)
|
||||||
|
|
||||||
|
config_name = field_name if extra_config else field_name.upper()
|
||||||
|
field_type = field_info.annotation if regular_field else field_info.return_type
|
||||||
|
default_value = get_default_value(field_info, regular_field)
|
||||||
|
description = field_info.description if field_info.description else "-"
|
||||||
|
read_only = "rw" if regular_field else "ro"
|
||||||
|
type_name = get_type_name(field_type)
|
||||||
|
|
||||||
|
env_entry = ""
|
||||||
|
if not extra_config:
|
||||||
|
if regular_field:
|
||||||
|
env_entry = f"| `{prefix}{config_name}` "
|
||||||
|
else:
|
||||||
|
env_entry = "| "
|
||||||
|
table += f"| {field_name} {env_entry}| `{type_name}` | `{read_only}` | `{default_value}` | {description} |\n"
|
||||||
|
|
||||||
|
inner_types: dict[PydanticBaseModel, tuple[str, list[str]]] = dict()
|
||||||
|
|
||||||
|
def extract_nested_models(subtype: Any, subprefix: str, parent_types: list[str]):
|
||||||
|
if subtype in inner_types.keys():
|
||||||
|
return
|
||||||
|
nested_types = resolve_nested_types(subtype, [])
|
||||||
|
for nested_type, nested_parent_types in nested_types:
|
||||||
|
if issubclass(nested_type, PydanticBaseModel):
|
||||||
|
new_parent_types = parent_types + nested_parent_types
|
||||||
|
if "list" in parent_types:
|
||||||
|
new_prefix = ""
|
||||||
|
else:
|
||||||
|
new_prefix = f"{subprefix}"
|
||||||
|
inner_types.setdefault(nested_type, (new_prefix, new_parent_types))
|
||||||
|
for nested_field_name, nested_field_info in list(
|
||||||
|
nested_type.model_fields.items()
|
||||||
|
) + list(nested_type.model_computed_fields.items()):
|
||||||
|
nested_field_type = nested_field_info.annotation
|
||||||
|
if new_prefix:
|
||||||
|
new_prefix += f"{nested_field_name.upper()}__"
|
||||||
|
extract_nested_models(
|
||||||
|
nested_field_type,
|
||||||
|
new_prefix,
|
||||||
|
new_parent_types + [nested_field_name],
|
||||||
|
)
|
||||||
|
|
||||||
|
extract_nested_models(field_type, f"{prefix}{config_name}__", toplevel_keys + [field_name])
|
||||||
|
|
||||||
|
for new_type, info in inner_types.items():
|
||||||
|
if new_type not in documented_types:
|
||||||
|
undocumented_types.setdefault(new_type, (info[0], info[1]))
|
||||||
|
|
||||||
|
if toplevel:
|
||||||
|
table += ":::\n\n" # Add an empty line after the table
|
||||||
|
|
||||||
|
has_examples_list = toplevel_keys[-1] == "list"
|
||||||
|
instance_list = create_model_from_examples(config, has_examples_list)
|
||||||
|
if instance_list:
|
||||||
|
ins_dict_list = []
|
||||||
|
ins_out_dict_list = []
|
||||||
|
for ins in instance_list:
|
||||||
|
# Transform to JSON (and manually to dict) to use custom serializers and then merge with parent keys
|
||||||
|
ins_json = ins.model_dump_json(include_computed_fields=False)
|
||||||
|
ins_dict_list.append(json.loads(ins_json))
|
||||||
|
|
||||||
|
ins_out_json = ins.model_dump_json(include_computed_fields=True)
|
||||||
|
ins_out_dict_list.append(json.loads(ins_out_json))
|
||||||
|
|
||||||
|
same_output = ins_out_dict_list == ins_dict_list
|
||||||
|
same_output_str = "/Output" if same_output else ""
|
||||||
|
|
||||||
|
table += f"#{heading_level} Example Input{same_output_str}\n\n"
|
||||||
|
table += "```{eval-rst}\n"
|
||||||
|
table += ".. code-block:: json\n\n"
|
||||||
|
if has_examples_list:
|
||||||
|
input_dict = build_nested_structure(toplevel_keys[:-1], ins_dict_list)
|
||||||
|
if not extra_config:
|
||||||
|
global_config_dict[toplevel_keys[0]] = ins_dict_list
|
||||||
|
else:
|
||||||
|
input_dict = build_nested_structure(toplevel_keys, ins_dict_list[0])
|
||||||
|
if not extra_config:
|
||||||
|
global_config_dict[toplevel_keys[0]] = ins_dict_list[0]
|
||||||
|
table += textwrap.indent(json.dumps(input_dict, indent=4), " ")
|
||||||
|
table += "\n"
|
||||||
|
table += "```\n\n"
|
||||||
|
|
||||||
|
if not same_output:
|
||||||
|
table += f"#{heading_level} Example Output\n\n"
|
||||||
|
table += "```{eval-rst}\n"
|
||||||
|
table += ".. code-block:: json\n\n"
|
||||||
|
if has_examples_list:
|
||||||
|
output_dict = build_nested_structure(toplevel_keys[:-1], ins_out_dict_list)
|
||||||
|
else:
|
||||||
|
output_dict = build_nested_structure(toplevel_keys, ins_out_dict_list[0])
|
||||||
|
table += textwrap.indent(json.dumps(output_dict, indent=4), " ")
|
||||||
|
table += "\n"
|
||||||
|
table += "```\n\n"
|
||||||
|
|
||||||
|
while undocumented_types:
|
||||||
|
extra_config_type, extra_info = undocumented_types.popitem()
|
||||||
|
documented_types.add(extra_config_type)
|
||||||
|
table += generate_config_table_md(
|
||||||
|
extra_config_type, extra_info[1], extra_info[0], True, True
|
||||||
|
)
|
||||||
|
|
||||||
table = f"## {title}\n\n"
|
|
||||||
table += ":::{table} " + f"{title}\n:widths: 10 10 5 5 30\n:align: left\n\n"
|
|
||||||
table += "| Name | Type | Read-Only | Default | Description |\n"
|
|
||||||
table += "| ---- | ---- | --------- | ------- | ----------- |\n"
|
|
||||||
for name, config in sorted(configs.items()):
|
|
||||||
type_name = config["type"]
|
|
||||||
if type_name.startswith("typing."):
|
|
||||||
type_name = type_name[len("typing.") :]
|
|
||||||
table += f"| `{config['name']}` | `{type_name}` | `{config['read-only']}` | `{config['default']}` | {config['description']} |\n"
|
|
||||||
table += ":::\n\n" # Add an empty line after the table
|
|
||||||
return table
|
return table
|
||||||
|
|
||||||
|
|
||||||
def generate_config_md() -> str:
|
def generate_config_md(config_eos: ConfigEOS) -> str:
|
||||||
"""Generate configuration specification in Markdown with extra tables for prefixed values.
|
"""Generate configuration specification in Markdown with extra tables for prefixed values.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The Markdown representation of the configuration spec.
|
str: The Markdown representation of the configuration spec.
|
||||||
"""
|
"""
|
||||||
configs = {}
|
# Fix file path for general settings to not show local/test file path
|
||||||
config_keys = config_eos.config_keys
|
GeneralSettings._config_file_path = Path(
|
||||||
config_keys_read_only = config_eos.config_keys_read_only
|
"/home/user/.config/net.akkudoktoreos.net/EOS.config.json"
|
||||||
for config_key in config_keys:
|
)
|
||||||
config = {}
|
GeneralSettings._config_folder_path = config_eos.general.config_file_path.parent
|
||||||
config["name"] = config_key
|
|
||||||
config["value"] = getattr(config_eos, config_key)
|
|
||||||
|
|
||||||
if config_key in config_keys_read_only:
|
|
||||||
config["read-only"] = "ro"
|
|
||||||
computed_field_info = config_eos.__pydantic_decorators__.computed_fields[
|
|
||||||
config_key
|
|
||||||
].info
|
|
||||||
config["default"] = "N/A"
|
|
||||||
config["description"] = computed_field_info.description
|
|
||||||
config["type"] = str(computed_field_info.return_type)
|
|
||||||
else:
|
|
||||||
config["read-only"] = "rw"
|
|
||||||
field_info = config_eos.model_fields[config_key]
|
|
||||||
config["default"] = field_info.default
|
|
||||||
config["description"] = field_info.description
|
|
||||||
config["type"] = str(field_info.annotation)
|
|
||||||
|
|
||||||
configs[config_key] = config
|
|
||||||
|
|
||||||
# Generate markdown for the main table
|
|
||||||
markdown = "# Configuration Table\n\n"
|
markdown = "# Configuration Table\n\n"
|
||||||
|
|
||||||
# Generate table for general configuration names
|
# Generate tables for each top level config
|
||||||
general_configs = {k: v for k, v in configs.items() if k in GENERAL_CONFIGS}
|
for field_name, field_info in config_eos.model_fields.items():
|
||||||
for k in general_configs.keys():
|
field_type = field_info.annotation
|
||||||
del configs[k] # Remove general configs from the main configs dictionary
|
markdown += generate_config_table_md(
|
||||||
markdown += generate_config_table_md(general_configs, "General Configuration Values")
|
field_type, [field_name], f"EOS_{field_name.upper()}__", True
|
||||||
|
)
|
||||||
|
|
||||||
non_prefixed_configs = {k: v for k, v in configs.items()}
|
# Full config
|
||||||
|
markdown += "## Full example Config\n\n"
|
||||||
|
markdown += "```{eval-rst}\n"
|
||||||
|
markdown += ".. code-block:: json\n\n"
|
||||||
|
# Test for valid config first
|
||||||
|
config_eos.merge_settings_from_dict(global_config_dict)
|
||||||
|
markdown += textwrap.indent(json.dumps(global_config_dict, indent=4), " ")
|
||||||
|
markdown += "\n"
|
||||||
|
markdown += "```\n\n"
|
||||||
|
|
||||||
# Generate tables for each prefix (sorted by value) and remove prefixed configs from the main dictionary
|
# Assure there is no double \n at end of file
|
||||||
sorted_prefixes = sorted(CONFIG_PREFIXES.items(), key=lambda item: item[1])
|
|
||||||
for prefix, title in sorted_prefixes:
|
|
||||||
prefixed_configs = {k: v for k, v in configs.items() if k.startswith(prefix)}
|
|
||||||
for k in prefixed_configs.keys():
|
|
||||||
del non_prefixed_configs[k]
|
|
||||||
markdown += generate_config_table_md(prefixed_configs, title)
|
|
||||||
|
|
||||||
# Generate markdown for the remaining non-prefixed configs if any
|
|
||||||
if non_prefixed_configs:
|
|
||||||
markdown += generate_config_table_md(non_prefixed_configs, "Other Configuration Values")
|
|
||||||
|
|
||||||
# Assure the is no double \n at end of file
|
|
||||||
markdown = markdown.rstrip("\n")
|
markdown = markdown.rstrip("\n")
|
||||||
markdown += "\n"
|
markdown += "\n"
|
||||||
|
|
||||||
@@ -145,9 +292,10 @@ def main():
|
|||||||
)
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
config_eos = get_config()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config_md = generate_config_md()
|
config_md = generate_config_md(config_eos)
|
||||||
if args.output_file:
|
if args.output_file:
|
||||||
# Write to file
|
# Write to file
|
||||||
with open(args.output_file, "w", encoding="utf8") as f:
|
with open(args.output_file, "w", encoding="utf8") as f:
|
||||||
@@ -158,7 +306,8 @@ def main():
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error during Configuration Specification generation: {e}", file=sys.stderr)
|
print(f"Error during Configuration Specification generation: {e}", file=sys.stderr)
|
||||||
sys.exit(1)
|
# keep throwing error to debug potential problems (e.g. invalid examples)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@@ -37,6 +37,11 @@ def generate_openapi() -> dict:
|
|||||||
routes=app.routes,
|
routes=app.routes,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Fix file path for general settings to not show local/test file path
|
||||||
|
general = openapi_spec["components"]["schemas"]["ConfigEOS"]["properties"]["general"]["default"]
|
||||||
|
general["config_file_path"] = "/home/user/.config/net.akkudoktoreos.net/EOS.config.json"
|
||||||
|
general["config_folder_path"] = "/home/user/.config/net.akkudoktoreos.net"
|
||||||
|
|
||||||
return openapi_spec
|
return openapi_spec
|
||||||
|
|
||||||
|
|
||||||
|
@@ -30,42 +30,63 @@ def prepare_optimization_real_parameters() -> OptimizationParameters:
|
|||||||
"""
|
"""
|
||||||
# Make a config
|
# Make a config
|
||||||
settings = {
|
settings = {
|
||||||
# -- General --
|
"general": {
|
||||||
"prediction_hours": 48,
|
"latitude": 52.52,
|
||||||
"prediction_historic_hours": 24,
|
"longitude": 13.405,
|
||||||
"latitude": 52.52,
|
},
|
||||||
"longitude": 13.405,
|
"prediction": {
|
||||||
# -- Predictions --
|
"hours": 48,
|
||||||
|
"historic_hours": 24,
|
||||||
|
},
|
||||||
# PV Forecast
|
# PV Forecast
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"pvforecast": {
|
||||||
"pvforecast0_peakpower": 5.0,
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"planes": [
|
||||||
"pvforecast0_surface_tilt": 7,
|
{
|
||||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
"peakpower": 5.0,
|
||||||
"pvforecast0_inverter_paco": 10000,
|
"surface_azimuth": -10,
|
||||||
"pvforecast1_peakpower": 4.8,
|
"surface_tilt": 7,
|
||||||
"pvforecast1_surface_azimuth": -90,
|
"userhorizon": [20, 27, 22, 20],
|
||||||
"pvforecast1_surface_tilt": 7,
|
"inverter_paco": 10000,
|
||||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
},
|
||||||
"pvforecast1_inverter_paco": 10000,
|
{
|
||||||
"pvforecast2_peakpower": 1.4,
|
"peakpower": 4.8,
|
||||||
"pvforecast2_surface_azimuth": -40,
|
"surface_azimuth": -90,
|
||||||
"pvforecast2_surface_tilt": 60,
|
"surface_tilt": 7,
|
||||||
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
"userhorizon": [30, 30, 30, 50],
|
||||||
"pvforecast2_inverter_paco": 2000,
|
"inverter_paco": 10000,
|
||||||
"pvforecast3_peakpower": 1.6,
|
},
|
||||||
"pvforecast3_surface_azimuth": 5,
|
{
|
||||||
"pvforecast3_surface_tilt": 45,
|
"peakpower": 1.4,
|
||||||
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
"surface_azimuth": -40,
|
||||||
"pvforecast3_inverter_paco": 1400,
|
"surface_tilt": 60,
|
||||||
"pvforecast4_peakpower": None,
|
"userhorizon": [60, 30, 0, 30],
|
||||||
|
"inverter_paco": 2000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"peakpower": 1.6,
|
||||||
|
"surface_azimuth": 5,
|
||||||
|
"surface_tilt": 45,
|
||||||
|
"userhorizon": [45, 25, 30, 60],
|
||||||
|
"inverter_paco": 1400,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
# Weather Forecast
|
# Weather Forecast
|
||||||
"weather_provider": "ClearOutside",
|
"weather": {
|
||||||
|
"provider": "ClearOutside",
|
||||||
|
},
|
||||||
# Electricity Price Forecast
|
# Electricity Price Forecast
|
||||||
"elecprice_provider": "ElecPriceAkkudoktor",
|
"elecprice": {
|
||||||
|
"provider": "ElecPriceAkkudoktor",
|
||||||
|
},
|
||||||
# Load Forecast
|
# Load Forecast
|
||||||
"load_provider": "LoadAkkudoktor",
|
"load": {
|
||||||
"loadakkudoktor_year_energy": 5000, # Energy consumption per year in kWh
|
"provider": "LoadAkkudoktor",
|
||||||
|
"provider_settings": {
|
||||||
|
"loadakkudoktor_year_energy": 5000, # Energy consumption per year in kWh
|
||||||
|
},
|
||||||
|
},
|
||||||
# -- Simulations --
|
# -- Simulations --
|
||||||
}
|
}
|
||||||
config_eos = get_config()
|
config_eos = get_config()
|
||||||
@@ -129,20 +150,20 @@ def prepare_optimization_real_parameters() -> OptimizationParameters:
|
|||||||
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
||||||
},
|
},
|
||||||
"pv_akku": {
|
"pv_akku": {
|
||||||
|
"device_id": "battery1",
|
||||||
"capacity_wh": 26400,
|
"capacity_wh": 26400,
|
||||||
"initial_soc_percentage": 15,
|
"initial_soc_percentage": 15,
|
||||||
"min_soc_percentage": 15,
|
"min_soc_percentage": 15,
|
||||||
},
|
},
|
||||||
|
"inverter": {"device_id": "iv1", "max_power_wh": 10000, "battery_id": "battery1"},
|
||||||
"eauto": {
|
"eauto": {
|
||||||
|
"device_id": "ev1",
|
||||||
"min_soc_percentage": 50,
|
"min_soc_percentage": 50,
|
||||||
"capacity_wh": 60000,
|
"capacity_wh": 60000,
|
||||||
"charging_efficiency": 0.95,
|
"charging_efficiency": 0.95,
|
||||||
"max_charge_power_w": 11040,
|
"max_charge_power_w": 11040,
|
||||||
"initial_soc_percentage": 5,
|
"initial_soc_percentage": 5,
|
||||||
},
|
},
|
||||||
"inverter": {
|
|
||||||
"max_power_wh": 10000,
|
|
||||||
},
|
|
||||||
"temperature_forecast": temperature_forecast,
|
"temperature_forecast": temperature_forecast,
|
||||||
"start_solution": start_solution,
|
"start_solution": start_solution,
|
||||||
}
|
}
|
||||||
@@ -283,20 +304,20 @@ def prepare_optimization_parameters() -> OptimizationParameters:
|
|||||||
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
"strompreis_euro_pro_wh": strompreis_euro_pro_wh,
|
||||||
},
|
},
|
||||||
"pv_akku": {
|
"pv_akku": {
|
||||||
|
"device_id": "battery1",
|
||||||
"capacity_wh": 26400,
|
"capacity_wh": 26400,
|
||||||
"initial_soc_percentage": 15,
|
"initial_soc_percentage": 15,
|
||||||
"min_soc_percentage": 15,
|
"min_soc_percentage": 15,
|
||||||
},
|
},
|
||||||
|
"inverter": {"device_id": "iv1", "max_power_wh": 10000, "battery_id": "battery1"},
|
||||||
"eauto": {
|
"eauto": {
|
||||||
|
"device_id": "ev1",
|
||||||
"min_soc_percentage": 50,
|
"min_soc_percentage": 50,
|
||||||
"capacity_wh": 60000,
|
"capacity_wh": 60000,
|
||||||
"charging_efficiency": 0.95,
|
"charging_efficiency": 0.95,
|
||||||
"max_charge_power_w": 11040,
|
"max_charge_power_w": 11040,
|
||||||
"initial_soc_percentage": 5,
|
"initial_soc_percentage": 5,
|
||||||
},
|
},
|
||||||
"inverter": {
|
|
||||||
"max_power_wh": 10000,
|
|
||||||
},
|
|
||||||
"temperature_forecast": temperature_forecast,
|
"temperature_forecast": temperature_forecast,
|
||||||
"start_solution": start_solution,
|
"start_solution": start_solution,
|
||||||
}
|
}
|
||||||
@@ -330,7 +351,9 @@ def run_optimization(
|
|||||||
|
|
||||||
# Initialize the optimization problem using the default configuration
|
# Initialize the optimization problem using the default configuration
|
||||||
config_eos = get_config()
|
config_eos = get_config()
|
||||||
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 48})
|
config_eos.merge_settings_from_dict(
|
||||||
|
{"prediction": {"hours": 48}, "optimization": {"hours": 48}}
|
||||||
|
)
|
||||||
opt_class = optimization_problem(verbose=verbose, fixed_seed=seed)
|
opt_class = optimization_problem(verbose=verbose, fixed_seed=seed)
|
||||||
|
|
||||||
# Perform the optimisation based on the provided parameters and start hour
|
# Perform the optimisation based on the provided parameters and start hour
|
||||||
|
@@ -16,32 +16,47 @@ prediction_eos = get_prediction()
|
|||||||
def config_pvforecast() -> dict:
|
def config_pvforecast() -> dict:
|
||||||
"""Configure settings for PV forecast."""
|
"""Configure settings for PV forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction_hours": 48,
|
"general": {
|
||||||
"prediction_historic_hours": 24,
|
"latitude": 52.52,
|
||||||
"latitude": 52.52,
|
"longitude": 13.405,
|
||||||
"longitude": 13.405,
|
},
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"prediction": {
|
||||||
"pvforecast0_peakpower": 5.0,
|
"hours": 48,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"historic_hours": 24,
|
||||||
"pvforecast0_surface_tilt": 7,
|
},
|
||||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
"pvforecast": {
|
||||||
"pvforecast0_inverter_paco": 10000,
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast1_peakpower": 4.8,
|
"planes": [
|
||||||
"pvforecast1_surface_azimuth": -90,
|
{
|
||||||
"pvforecast1_surface_tilt": 7,
|
"peakpower": 5.0,
|
||||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
"surface_azimuth": -10,
|
||||||
"pvforecast1_inverter_paco": 10000,
|
"surface_tilt": 7,
|
||||||
"pvforecast2_peakpower": 1.4,
|
"userhorizon": [20, 27, 22, 20],
|
||||||
"pvforecast2_surface_azimuth": -40,
|
"inverter_paco": 10000,
|
||||||
"pvforecast2_surface_tilt": 60,
|
},
|
||||||
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
{
|
||||||
"pvforecast2_inverter_paco": 2000,
|
"peakpower": 4.8,
|
||||||
"pvforecast3_peakpower": 1.6,
|
"surface_azimuth": -90,
|
||||||
"pvforecast3_surface_azimuth": 5,
|
"surface_tilt": 7,
|
||||||
"pvforecast3_surface_tilt": 45,
|
"userhorizon": [30, 30, 30, 50],
|
||||||
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
"inverter_paco": 10000,
|
||||||
"pvforecast3_inverter_paco": 1400,
|
},
|
||||||
"pvforecast4_peakpower": None,
|
{
|
||||||
|
"peakpower": 1.4,
|
||||||
|
"surface_azimuth": -40,
|
||||||
|
"surface_tilt": 60,
|
||||||
|
"userhorizon": [60, 30, 0, 30],
|
||||||
|
"inverter_paco": 2000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"peakpower": 1.6,
|
||||||
|
"surface_azimuth": 5,
|
||||||
|
"surface_tilt": 45,
|
||||||
|
"userhorizon": [45, 25, 30, 60],
|
||||||
|
"inverter_paco": 1400,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -49,10 +64,15 @@ def config_pvforecast() -> dict:
|
|||||||
def config_weather() -> dict:
|
def config_weather() -> dict:
|
||||||
"""Configure settings for weather forecast."""
|
"""Configure settings for weather forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction_hours": 48,
|
"general": {
|
||||||
"prediction_historic_hours": 24,
|
"latitude": 52.52,
|
||||||
"latitude": 52.52,
|
"longitude": 13.405,
|
||||||
"longitude": 13.405,
|
},
|
||||||
|
"prediction": {
|
||||||
|
"hours": 48,
|
||||||
|
"historic_hours": 24,
|
||||||
|
},
|
||||||
|
"weather": dict(),
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -60,10 +80,15 @@ def config_weather() -> dict:
|
|||||||
def config_elecprice() -> dict:
|
def config_elecprice() -> dict:
|
||||||
"""Configure settings for electricity price forecast."""
|
"""Configure settings for electricity price forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction_hours": 48,
|
"general": {
|
||||||
"prediction_historic_hours": 24,
|
"latitude": 52.52,
|
||||||
"latitude": 52.52,
|
"longitude": 13.405,
|
||||||
"longitude": 13.405,
|
},
|
||||||
|
"prediction": {
|
||||||
|
"hours": 48,
|
||||||
|
"historic_hours": 24,
|
||||||
|
},
|
||||||
|
"elecprice": dict(),
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -71,10 +96,14 @@ def config_elecprice() -> dict:
|
|||||||
def config_load() -> dict:
|
def config_load() -> dict:
|
||||||
"""Configure settings for load forecast."""
|
"""Configure settings for load forecast."""
|
||||||
settings = {
|
settings = {
|
||||||
"prediction_hours": 48,
|
"general": {
|
||||||
"prediction_historic_hours": 24,
|
"latitude": 52.52,
|
||||||
"latitude": 52.52,
|
"longitude": 13.405,
|
||||||
"longitude": 13.405,
|
},
|
||||||
|
"prediction": {
|
||||||
|
"hours": 48,
|
||||||
|
"historic_hours": 24,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
@@ -96,17 +125,17 @@ def run_prediction(provider_id: str, verbose: bool = False) -> str:
|
|||||||
print(f"\nProvider ID: {provider_id}")
|
print(f"\nProvider ID: {provider_id}")
|
||||||
if provider_id in ("PVForecastAkkudoktor",):
|
if provider_id in ("PVForecastAkkudoktor",):
|
||||||
settings = config_pvforecast()
|
settings = config_pvforecast()
|
||||||
settings["pvforecast_provider"] = provider_id
|
settings["pvforecast"]["provider"] = provider_id
|
||||||
elif provider_id in ("BrightSky", "ClearOutside"):
|
elif provider_id in ("BrightSky", "ClearOutside"):
|
||||||
settings = config_weather()
|
settings = config_weather()
|
||||||
settings["weather_provider"] = provider_id
|
settings["weather"]["provider"] = provider_id
|
||||||
elif provider_id in ("ElecPriceAkkudoktor",):
|
elif provider_id in ("ElecPriceAkkudoktor",):
|
||||||
settings = config_elecprice()
|
settings = config_elecprice()
|
||||||
settings["elecprice_provider"] = provider_id
|
settings["elecprice"]["provider"] = provider_id
|
||||||
elif provider_id in ("LoadAkkudoktor",):
|
elif provider_id in ("LoadAkkudoktor",):
|
||||||
settings = config_elecprice()
|
settings = config_elecprice()
|
||||||
settings["loadakkudoktor_year_energy"] = 1000
|
settings["load"]["loadakkudoktor_year_energy"] = 1000
|
||||||
settings["load_provider"] = provider_id
|
settings["load"]["provider"] = provider_id
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unknown provider '{provider_id}'.")
|
raise ValueError(f"Unknown provider '{provider_id}'.")
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
|
@@ -12,30 +12,35 @@ Key features:
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, ClassVar, List, Optional
|
from typing import Any, ClassVar, Optional, Type
|
||||||
|
|
||||||
from platformdirs import user_config_dir, user_data_dir
|
from platformdirs import user_config_dir, user_data_dir
|
||||||
from pydantic import Field, ValidationError, computed_field
|
from pydantic import Field, computed_field
|
||||||
|
from pydantic_settings import (
|
||||||
|
BaseSettings,
|
||||||
|
JsonConfigSettingsSource,
|
||||||
|
PydanticBaseSettingsSource,
|
||||||
|
SettingsConfigDict,
|
||||||
|
)
|
||||||
|
from pydantic_settings.sources import ConfigFileSourceMixin
|
||||||
|
|
||||||
# settings
|
# settings
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.coreabc import SingletonMixin
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
|
from akkudoktoreos.core.decorators import classproperty
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.logsettings import LoggingCommonSettings
|
from akkudoktoreos.core.logsettings import LoggingCommonSettings
|
||||||
from akkudoktoreos.devices.devices import DevicesCommonSettings
|
from akkudoktoreos.core.pydantic import merge_models
|
||||||
|
from akkudoktoreos.devices.settings import DevicesCommonSettings
|
||||||
from akkudoktoreos.measurement.measurement import MeasurementCommonSettings
|
from akkudoktoreos.measurement.measurement import MeasurementCommonSettings
|
||||||
from akkudoktoreos.optimization.optimization import OptimizationCommonSettings
|
from akkudoktoreos.optimization.optimization import OptimizationCommonSettings
|
||||||
from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
||||||
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettings
|
|
||||||
from akkudoktoreos.prediction.load import LoadCommonSettings
|
from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||||
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
|
||||||
from akkudoktoreos.prediction.loadimport import LoadImportCommonSettings
|
|
||||||
from akkudoktoreos.prediction.prediction import PredictionCommonSettings
|
from akkudoktoreos.prediction.prediction import PredictionCommonSettings
|
||||||
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||||
from akkudoktoreos.prediction.pvforecastimport import PVForecastImportCommonSettings
|
|
||||||
from akkudoktoreos.prediction.weather import WeatherCommonSettings
|
from akkudoktoreos.prediction.weather import WeatherCommonSettings
|
||||||
from akkudoktoreos.prediction.weatherimport import WeatherImportCommonSettings
|
|
||||||
from akkudoktoreos.server.server import ServerCommonSettings
|
from akkudoktoreos.server.server import ServerCommonSettings
|
||||||
|
from akkudoktoreos.utils.datetimeutil import to_timezone
|
||||||
from akkudoktoreos.utils.utils import UtilsCommonSettings
|
from akkudoktoreos.utils.utils import UtilsCommonSettings
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@@ -59,61 +64,137 @@ def get_absolute_path(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class ConfigCommonSettings(SettingsBaseModel):
|
class GeneralSettings(SettingsBaseModel):
|
||||||
"""Settings for common configuration."""
|
"""Settings for common configuration.
|
||||||
|
|
||||||
|
General configuration to set directories of cache and output files and system location (latitude
|
||||||
|
and longitude).
|
||||||
|
Validators ensure each parameter is within a specified range. A computed property, `timezone`,
|
||||||
|
determines the time zone based on latitude and longitude.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
||||||
|
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
||||||
|
|
||||||
|
Properties:
|
||||||
|
timezone (Optional[str]): Computed time zone string based on the specified latitude
|
||||||
|
and longitude.
|
||||||
|
|
||||||
|
Validators:
|
||||||
|
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
||||||
|
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_config_folder_path: ClassVar[Optional[Path]] = None
|
||||||
|
_config_file_path: ClassVar[Optional[Path]] = None
|
||||||
|
|
||||||
data_folder_path: Optional[Path] = Field(
|
data_folder_path: Optional[Path] = Field(
|
||||||
default=None, description="Path to EOS data directory."
|
default=None, description="Path to EOS data directory.", examples=[None, "/home/eos/data"]
|
||||||
)
|
)
|
||||||
|
|
||||||
data_output_subpath: Optional[Path] = Field(
|
data_output_subpath: Optional[Path] = Field(
|
||||||
"output", description="Sub-path for the EOS output data directory."
|
default="output", description="Sub-path for the EOS output data directory."
|
||||||
)
|
)
|
||||||
|
|
||||||
data_cache_subpath: Optional[Path] = Field(
|
data_cache_subpath: Optional[Path] = Field(
|
||||||
"cache", description="Sub-path for the EOS cache data directory."
|
default="cache", description="Sub-path for the EOS cache data directory."
|
||||||
|
)
|
||||||
|
|
||||||
|
latitude: Optional[float] = Field(
|
||||||
|
default=52.52,
|
||||||
|
ge=-90.0,
|
||||||
|
le=90.0,
|
||||||
|
description="Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)",
|
||||||
|
)
|
||||||
|
longitude: Optional[float] = Field(
|
||||||
|
default=13.405,
|
||||||
|
ge=-180.0,
|
||||||
|
le=180.0,
|
||||||
|
description="Longitude in decimal degrees, within -180 to 180 (°)",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Computed fields
|
# Computed fields
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def timezone(self) -> Optional[str]:
|
||||||
|
"""Compute timezone based on latitude and longitude."""
|
||||||
|
if self.latitude and self.longitude:
|
||||||
|
return to_timezone(location=(self.latitude, self.longitude), as_string=True)
|
||||||
|
return None
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def data_output_path(self) -> Optional[Path]:
|
def data_output_path(self) -> Optional[Path]:
|
||||||
"""Compute data_output_path based on data_folder_path."""
|
"""Compute data_output_path based on data_folder_path."""
|
||||||
return get_absolute_path(self.data_folder_path, self.data_output_subpath)
|
return get_absolute_path(self.data_folder_path, self.data_output_subpath)
|
||||||
|
|
||||||
# Computed fields
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def data_cache_path(self) -> Optional[Path]:
|
def data_cache_path(self) -> Optional[Path]:
|
||||||
"""Compute data_cache_path based on data_folder_path."""
|
"""Compute data_cache_path based on data_folder_path."""
|
||||||
return get_absolute_path(self.data_folder_path, self.data_cache_subpath)
|
return get_absolute_path(self.data_folder_path, self.data_cache_subpath)
|
||||||
|
|
||||||
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@property
|
||||||
|
def config_folder_path(self) -> Optional[Path]:
|
||||||
|
"""Path to EOS configuration directory."""
|
||||||
|
return self._config_folder_path
|
||||||
|
|
||||||
class SettingsEOS(
|
@computed_field # type: ignore[prop-decorator]
|
||||||
ConfigCommonSettings,
|
@property
|
||||||
LoggingCommonSettings,
|
def config_file_path(self) -> Optional[Path]:
|
||||||
DevicesCommonSettings,
|
"""Path to EOS configuration file."""
|
||||||
MeasurementCommonSettings,
|
return self._config_file_path
|
||||||
OptimizationCommonSettings,
|
|
||||||
PredictionCommonSettings,
|
|
||||||
ElecPriceCommonSettings,
|
|
||||||
ElecPriceImportCommonSettings,
|
|
||||||
LoadCommonSettings,
|
|
||||||
LoadAkkudoktorCommonSettings,
|
|
||||||
LoadImportCommonSettings,
|
|
||||||
PVForecastCommonSettings,
|
|
||||||
PVForecastImportCommonSettings,
|
|
||||||
WeatherCommonSettings,
|
|
||||||
WeatherImportCommonSettings,
|
|
||||||
ServerCommonSettings,
|
|
||||||
UtilsCommonSettings,
|
|
||||||
):
|
|
||||||
"""Settings for all EOS."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigEOS(SingletonMixin, SettingsEOS):
|
class SettingsEOS(BaseSettings):
|
||||||
|
"""Settings for all EOS.
|
||||||
|
|
||||||
|
Used by updating the configuration with specific settings only.
|
||||||
|
"""
|
||||||
|
|
||||||
|
general: Optional[GeneralSettings] = None
|
||||||
|
logging: Optional[LoggingCommonSettings] = None
|
||||||
|
devices: Optional[DevicesCommonSettings] = None
|
||||||
|
measurement: Optional[MeasurementCommonSettings] = None
|
||||||
|
optimization: Optional[OptimizationCommonSettings] = None
|
||||||
|
prediction: Optional[PredictionCommonSettings] = None
|
||||||
|
elecprice: Optional[ElecPriceCommonSettings] = None
|
||||||
|
load: Optional[LoadCommonSettings] = None
|
||||||
|
pvforecast: Optional[PVForecastCommonSettings] = None
|
||||||
|
weather: Optional[WeatherCommonSettings] = None
|
||||||
|
server: Optional[ServerCommonSettings] = None
|
||||||
|
utils: Optional[UtilsCommonSettings] = None
|
||||||
|
|
||||||
|
model_config = SettingsConfigDict(
|
||||||
|
env_nested_delimiter="__",
|
||||||
|
nested_model_default_partial_update=True,
|
||||||
|
env_prefix="EOS_",
|
||||||
|
ignored_types=(classproperty,),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsEOSDefaults(SettingsEOS):
|
||||||
|
"""Settings for all of EOS with defaults.
|
||||||
|
|
||||||
|
Used by ConfigEOS instance to make all fields available.
|
||||||
|
"""
|
||||||
|
|
||||||
|
general: GeneralSettings = GeneralSettings()
|
||||||
|
logging: LoggingCommonSettings = LoggingCommonSettings()
|
||||||
|
devices: DevicesCommonSettings = DevicesCommonSettings()
|
||||||
|
measurement: MeasurementCommonSettings = MeasurementCommonSettings()
|
||||||
|
optimization: OptimizationCommonSettings = OptimizationCommonSettings()
|
||||||
|
prediction: PredictionCommonSettings = PredictionCommonSettings()
|
||||||
|
elecprice: ElecPriceCommonSettings = ElecPriceCommonSettings()
|
||||||
|
load: LoadCommonSettings = LoadCommonSettings()
|
||||||
|
pvforecast: PVForecastCommonSettings = PVForecastCommonSettings()
|
||||||
|
weather: WeatherCommonSettings = WeatherCommonSettings()
|
||||||
|
server: ServerCommonSettings = ServerCommonSettings()
|
||||||
|
utils: UtilsCommonSettings = UtilsCommonSettings()
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
||||||
"""Singleton configuration handler for the EOS application.
|
"""Singleton configuration handler for the EOS application.
|
||||||
|
|
||||||
ConfigEOS extends `SettingsEOS` with support for default configuration paths and automatic
|
ConfigEOS extends `SettingsEOS` with support for default configuration paths and automatic
|
||||||
@@ -143,8 +224,6 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
|
|||||||
in one part of the application reflects across all references to this class.
|
in one part of the application reflects across all references to this class.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
_settings (ClassVar[SettingsEOS]): Holds application-wide settings.
|
|
||||||
_file_settings (ClassVar[SettingsEOS]): Stores configuration loaded from file.
|
|
||||||
config_folder_path (Optional[Path]): Path to the configuration directory.
|
config_folder_path (Optional[Path]): Path to the configuration directory.
|
||||||
config_file_path (Optional[Path]): Path to the configuration file.
|
config_file_path (Optional[Path]): Path to the configuration file.
|
||||||
|
|
||||||
@@ -155,7 +234,7 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
|
|||||||
To initialize and access configuration attributes (only one instance is created):
|
To initialize and access configuration attributes (only one instance is created):
|
||||||
```python
|
```python
|
||||||
config_eos = ConfigEOS() # Always returns the same instance
|
config_eos = ConfigEOS() # Always returns the same instance
|
||||||
print(config_eos.prediction_hours) # Access a setting from the loaded configuration
|
print(config_eos.prediction.hours) # Access a setting from the loaded configuration
|
||||||
```
|
```
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -167,111 +246,111 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
|
|||||||
ENCODING: ClassVar[str] = "UTF-8"
|
ENCODING: ClassVar[str] = "UTF-8"
|
||||||
CONFIG_FILE_NAME: ClassVar[str] = "EOS.config.json"
|
CONFIG_FILE_NAME: ClassVar[str] = "EOS.config.json"
|
||||||
|
|
||||||
_settings: ClassVar[Optional[SettingsEOS]] = None
|
@classmethod
|
||||||
_file_settings: ClassVar[Optional[SettingsEOS]] = None
|
def settings_customise_sources(
|
||||||
|
cls,
|
||||||
|
settings_cls: Type[BaseSettings],
|
||||||
|
init_settings: PydanticBaseSettingsSource,
|
||||||
|
env_settings: PydanticBaseSettingsSource,
|
||||||
|
dotenv_settings: PydanticBaseSettingsSource,
|
||||||
|
file_secret_settings: PydanticBaseSettingsSource,
|
||||||
|
) -> tuple[PydanticBaseSettingsSource, ...]:
|
||||||
|
"""Customizes the order and handling of settings sources for a Pydantic BaseSettings subclass.
|
||||||
|
|
||||||
_config_folder_path: Optional[Path] = None
|
This method determines the sources for application configuration settings, including
|
||||||
_config_file_path: Optional[Path] = None
|
environment variables, dotenv files and JSON configuration files.
|
||||||
|
It ensures that a default configuration file exists and creates one if necessary.
|
||||||
|
|
||||||
# Computed fields
|
Args:
|
||||||
@computed_field # type: ignore[prop-decorator]
|
settings_cls (Type[BaseSettings]): The Pydantic BaseSettings class for which sources are customized.
|
||||||
@property
|
init_settings (PydanticBaseSettingsSource): The initial settings source, typically passed at runtime.
|
||||||
def config_folder_path(self) -> Optional[Path]:
|
env_settings (PydanticBaseSettingsSource): Settings sourced from environment variables.
|
||||||
"""Path to EOS configuration directory."""
|
dotenv_settings (PydanticBaseSettingsSource): Settings sourced from a dotenv file.
|
||||||
return self._config_folder_path
|
file_secret_settings (PydanticBaseSettingsSource): Unused (needed for parent class interface).
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
Returns:
|
||||||
@property
|
tuple[PydanticBaseSettingsSource, ...]: A tuple of settings sources in the order they should be applied.
|
||||||
def config_file_path(self) -> Optional[Path]:
|
|
||||||
"""Path to EOS configuration file."""
|
|
||||||
return self._config_file_path
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
Behavior:
|
||||||
@property
|
1. Checks for the existence of a JSON configuration file in the expected location.
|
||||||
def config_default_file_path(self) -> Path:
|
2. If the configuration file does not exist, creates the directory (if needed) and attempts to copy a
|
||||||
|
default configuration file to the location. If the copy fails, uses the default configuration file directly.
|
||||||
|
3. Creates a `JsonConfigSettingsSource` for both the configuration file and the default configuration file.
|
||||||
|
4. Updates class attributes `GeneralSettings._config_folder_path` and
|
||||||
|
`GeneralSettings._config_file_path` to reflect the determined paths.
|
||||||
|
5. Returns a tuple containing all provided and newly created settings sources in the desired order.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- This method logs a warning if the default configuration file cannot be copied.
|
||||||
|
- It ensures that a fallback to the default configuration file is always possible.
|
||||||
|
"""
|
||||||
|
file_settings: Optional[ConfigFileSourceMixin] = None
|
||||||
|
config_file, exists = cls._get_config_file_path()
|
||||||
|
config_dir = config_file.parent
|
||||||
|
if not exists:
|
||||||
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
try:
|
||||||
|
shutil.copy2(cls.config_default_file_path, config_file)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(f"Could not copy default config: {exc}. Using default config...")
|
||||||
|
config_file = cls.config_default_file_path
|
||||||
|
config_dir = config_file.parent
|
||||||
|
file_settings = JsonConfigSettingsSource(settings_cls, json_file=config_file)
|
||||||
|
default_settings = JsonConfigSettingsSource(
|
||||||
|
settings_cls, json_file=cls.config_default_file_path
|
||||||
|
)
|
||||||
|
GeneralSettings._config_folder_path = config_dir
|
||||||
|
GeneralSettings._config_file_path = config_file
|
||||||
|
|
||||||
|
return (
|
||||||
|
init_settings,
|
||||||
|
env_settings,
|
||||||
|
dotenv_settings,
|
||||||
|
file_settings,
|
||||||
|
default_settings,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def config_default_file_path(cls) -> Path:
|
||||||
"""Compute the default config file path."""
|
"""Compute the default config file path."""
|
||||||
return self.package_root_path.joinpath("data/default.config.json")
|
return cls.package_root_path.joinpath("data/default.config.json")
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@classproperty
|
||||||
@property
|
def package_root_path(cls) -> Path:
|
||||||
def package_root_path(self) -> Path:
|
|
||||||
"""Compute the package root path."""
|
"""Compute the package root path."""
|
||||||
return Path(__file__).parent.parent.resolve()
|
return Path(__file__).parent.parent.resolve()
|
||||||
|
|
||||||
# Computed fields
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def config_keys(self) -> List[str]:
|
|
||||||
"""Returns the keys of all fields in the configuration."""
|
|
||||||
key_list = []
|
|
||||||
key_list.extend(list(self.model_fields.keys()))
|
|
||||||
key_list.extend(list(self.__pydantic_decorators__.computed_fields.keys()))
|
|
||||||
return key_list
|
|
||||||
|
|
||||||
# Computed fields
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def config_keys_read_only(self) -> List[str]:
|
|
||||||
"""Returns the keys of all read only fields in the configuration."""
|
|
||||||
key_list = []
|
|
||||||
key_list.extend(list(self.__pydantic_decorators__.computed_fields.keys()))
|
|
||||||
return key_list
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
"""Initializes the singleton ConfigEOS instance.
|
"""Initializes the singleton ConfigEOS instance.
|
||||||
|
|
||||||
Configuration data is loaded from a configuration file or a default one is created if none
|
Configuration data is loaded from a configuration file or a default one is created if none
|
||||||
exists.
|
exists.
|
||||||
"""
|
"""
|
||||||
super().__init__()
|
if hasattr(self, "_initialized"):
|
||||||
self.from_config_file()
|
return
|
||||||
self.update()
|
super().__init__(*args, **kwargs)
|
||||||
|
self._create_initial_config_file()
|
||||||
|
self._update_data_folder_path()
|
||||||
|
|
||||||
@property
|
def _setup(self, *args: Any, **kwargs: Any) -> None:
|
||||||
def settings(self) -> Optional[SettingsEOS]:
|
"""Re-initialize global settings."""
|
||||||
"""Returns global settings for EOS.
|
SettingsEOSDefaults.__init__(self, *args, **kwargs)
|
||||||
|
self._create_initial_config_file()
|
||||||
|
self._update_data_folder_path()
|
||||||
|
|
||||||
Settings generally provide configuration for EOS and are typically set only once.
|
def merge_settings(self, settings: SettingsEOS) -> None:
|
||||||
|
|
||||||
Returns:
|
|
||||||
SettingsEOS: The settings for EOS or None.
|
|
||||||
"""
|
|
||||||
return ConfigEOS._settings
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _merge_and_update_settings(cls, settings: SettingsEOS) -> None:
|
|
||||||
"""Merge new and available settings.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
settings (SettingsEOS): The new settings to apply.
|
|
||||||
"""
|
|
||||||
for key in SettingsEOS.model_fields:
|
|
||||||
if value := getattr(settings, key, None):
|
|
||||||
setattr(cls._settings, key, value)
|
|
||||||
|
|
||||||
def merge_settings(self, settings: SettingsEOS, force: Optional[bool] = None) -> None:
|
|
||||||
"""Merges the provided settings into the global settings for EOS, with optional overwrite.
|
"""Merges the provided settings into the global settings for EOS, with optional overwrite.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
settings (SettingsEOS): The settings to apply globally.
|
settings (SettingsEOS): The settings to apply globally.
|
||||||
force (Optional[bool]): If True, overwrites the existing settings completely.
|
|
||||||
If False, the new settings are merged to the existing ones with priority for
|
|
||||||
the new ones. Defaults to False.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If settings are already set and `force` is not True or
|
ValueError: If the `settings` is not a `SettingsEOS` instance.
|
||||||
if the `settings` is not a `SettingsEOS` instance.
|
|
||||||
"""
|
"""
|
||||||
if not isinstance(settings, SettingsEOS):
|
if not isinstance(settings, SettingsEOS):
|
||||||
raise ValueError(f"Settings must be an instance of SettingsEOS: '{settings}'.")
|
raise ValueError(f"Settings must be an instance of SettingsEOS: '{settings}'.")
|
||||||
|
|
||||||
if ConfigEOS._settings is None or force:
|
self.merge_settings_from_dict(settings.model_dump(exclude_none=True, exclude_unset=True))
|
||||||
ConfigEOS._settings = settings
|
|
||||||
else:
|
|
||||||
self._merge_and_update_settings(settings)
|
|
||||||
|
|
||||||
# Update configuration after merging
|
|
||||||
self.update()
|
|
||||||
|
|
||||||
def merge_settings_from_dict(self, data: dict) -> None:
|
def merge_settings_from_dict(self, data: dict) -> None:
|
||||||
"""Merges the provided dictionary data into the current instance.
|
"""Merges the provided dictionary data into the current instance.
|
||||||
@@ -289,141 +368,83 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
|
|||||||
|
|
||||||
Example:
|
Example:
|
||||||
>>> config = get_config()
|
>>> config = get_config()
|
||||||
>>> new_data = {"prediction_hours": 24, "server_eos_port": 8000}
|
>>> new_data = {"prediction": {"hours": 24}, "server": {"port": 8000}}
|
||||||
>>> config.merge_settings_from_dict(new_data)
|
>>> config.merge_settings_from_dict(new_data)
|
||||||
"""
|
"""
|
||||||
# Create new settings instance with reset optional fields and merged data
|
self._setup(**merge_models(self, data))
|
||||||
settings = SettingsEOS.from_dict(data)
|
|
||||||
self.merge_settings(settings)
|
|
||||||
|
|
||||||
def reset_settings(self) -> None:
|
def reset_settings(self) -> None:
|
||||||
"""Reset all available settings.
|
"""Reset all changed settings to environment/config file defaults.
|
||||||
|
|
||||||
This functions basically deletes the settings provided before.
|
This functions basically deletes the settings provided before.
|
||||||
"""
|
"""
|
||||||
ConfigEOS._settings = None
|
self._setup()
|
||||||
|
|
||||||
|
def _create_initial_config_file(self) -> None:
|
||||||
|
if self.general.config_file_path and not self.general.config_file_path.exists():
|
||||||
|
self.general.config_file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
try:
|
||||||
|
with open(self.general.config_file_path, "w") as f:
|
||||||
|
f.write(self.model_dump_json(indent=4))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Could not write configuration file '{self.general.config_file_path}': {e}"
|
||||||
|
)
|
||||||
|
|
||||||
def _update_data_folder_path(self) -> None:
|
def _update_data_folder_path(self) -> None:
|
||||||
"""Updates path to the data directory."""
|
"""Updates path to the data directory."""
|
||||||
# From Settings
|
# From Settings
|
||||||
if self.settings and (data_dir := self.settings.data_folder_path):
|
if data_dir := self.general.data_folder_path:
|
||||||
try:
|
try:
|
||||||
data_dir.mkdir(parents=True, exist_ok=True)
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
self.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except:
|
except Exception as e:
|
||||||
pass
|
logger.warning(f"Could not setup data dir: {e}")
|
||||||
# From EOS_DIR env
|
# From EOS_DIR env
|
||||||
env_dir = os.getenv(self.EOS_DIR)
|
if env_dir := os.getenv(self.EOS_DIR):
|
||||||
if env_dir is not None:
|
|
||||||
try:
|
try:
|
||||||
data_dir = Path(env_dir).resolve()
|
data_dir = Path(env_dir).resolve()
|
||||||
data_dir.mkdir(parents=True, exist_ok=True)
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
self.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except:
|
except Exception as e:
|
||||||
pass
|
logger.warning(f"Could not setup data dir: {e}")
|
||||||
# From configuration file
|
|
||||||
if self._file_settings and (data_dir := self._file_settings.data_folder_path):
|
|
||||||
try:
|
|
||||||
data_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
self.data_folder_path = data_dir
|
|
||||||
return
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
# From platform specific default path
|
# From platform specific default path
|
||||||
try:
|
try:
|
||||||
data_dir = Path(user_data_dir(self.APP_NAME, self.APP_AUTHOR))
|
data_dir = Path(user_data_dir(self.APP_NAME, self.APP_AUTHOR))
|
||||||
if data_dir is not None:
|
if data_dir is not None:
|
||||||
data_dir.mkdir(parents=True, exist_ok=True)
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
self.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
return
|
return
|
||||||
except:
|
except Exception as e:
|
||||||
pass
|
logger.warning(f"Could not setup data dir: {e}")
|
||||||
# Current working directory
|
# Current working directory
|
||||||
data_dir = Path.cwd()
|
data_dir = Path.cwd()
|
||||||
self.data_folder_path = data_dir
|
self.general.data_folder_path = data_dir
|
||||||
|
|
||||||
def _get_config_file_path(self) -> tuple[Path, bool]:
|
@classmethod
|
||||||
|
def _get_config_file_path(cls) -> tuple[Path, bool]:
|
||||||
"""Finds the a valid configuration file or returns the desired path for a new config file.
|
"""Finds the a valid configuration file or returns the desired path for a new config file.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
tuple[Path, bool]: The path to the configuration directory and if there is already a config file there
|
tuple[Path, bool]: The path to the configuration directory and if there is already a config file there
|
||||||
"""
|
"""
|
||||||
config_dirs = []
|
config_dirs = []
|
||||||
env_base_dir = os.getenv(self.EOS_DIR)
|
env_base_dir = os.getenv(cls.EOS_DIR)
|
||||||
env_config_dir = os.getenv(self.EOS_CONFIG_DIR)
|
env_config_dir = os.getenv(cls.EOS_CONFIG_DIR)
|
||||||
env_dir = get_absolute_path(env_base_dir, env_config_dir)
|
env_dir = get_absolute_path(env_base_dir, env_config_dir)
|
||||||
logger.debug(f"Envionment config dir: '{env_dir}'")
|
logger.debug(f"Environment config dir: '{env_dir}'")
|
||||||
if env_dir is not None:
|
if env_dir is not None:
|
||||||
config_dirs.append(env_dir.resolve())
|
config_dirs.append(env_dir.resolve())
|
||||||
config_dirs.append(Path(user_config_dir(self.APP_NAME)))
|
config_dirs.append(Path(user_config_dir(cls.APP_NAME)))
|
||||||
config_dirs.append(Path.cwd())
|
config_dirs.append(Path.cwd())
|
||||||
for cdir in config_dirs:
|
for cdir in config_dirs:
|
||||||
cfile = cdir.joinpath(self.CONFIG_FILE_NAME)
|
cfile = cdir.joinpath(cls.CONFIG_FILE_NAME)
|
||||||
if cfile.exists():
|
if cfile.exists():
|
||||||
logger.debug(f"Found config file: '{cfile}'")
|
logger.debug(f"Found config file: '{cfile}'")
|
||||||
return cfile, True
|
return cfile, True
|
||||||
return config_dirs[0].joinpath(self.CONFIG_FILE_NAME), False
|
return config_dirs[0].joinpath(cls.CONFIG_FILE_NAME), False
|
||||||
|
|
||||||
def settings_from_config_file(self) -> tuple[SettingsEOS, Path]:
|
|
||||||
"""Load settings from the configuration file.
|
|
||||||
|
|
||||||
If the config file does not exist, it will be created.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple of settings and path
|
|
||||||
settings (SettingsEOS): The settings defined by the EOS configuration file.
|
|
||||||
path (pathlib.Path): The path of the configuration file.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the configuration file is invalid or incomplete.
|
|
||||||
"""
|
|
||||||
config_file, exists = self._get_config_file_path()
|
|
||||||
config_dir = config_file.parent
|
|
||||||
|
|
||||||
# Create config directory and copy default config if file does not exist
|
|
||||||
if not exists:
|
|
||||||
config_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
try:
|
|
||||||
shutil.copy2(self.config_default_file_path, config_file)
|
|
||||||
except Exception as exc:
|
|
||||||
logger.warning(f"Could not copy default config: {exc}. Using default config...")
|
|
||||||
config_file = self.config_default_file_path
|
|
||||||
config_dir = config_file.parent
|
|
||||||
|
|
||||||
# Load and validate the configuration file
|
|
||||||
with config_file.open("r", encoding=self.ENCODING) as f_in:
|
|
||||||
try:
|
|
||||||
json_str = f_in.read()
|
|
||||||
settings = SettingsEOS.model_validate_json(json_str)
|
|
||||||
except ValidationError as exc:
|
|
||||||
raise ValueError(f"Configuration '{config_file}' is incomplete or not valid: {exc}")
|
|
||||||
|
|
||||||
return settings, config_file
|
|
||||||
|
|
||||||
def from_config_file(self) -> tuple[SettingsEOS, Path]:
|
|
||||||
"""Load the configuration file settings for EOS.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple of settings and path
|
|
||||||
settings (SettingsEOS): The settings defined by the EOS configuration file.
|
|
||||||
path (pathlib.Path): The path of the configuration file.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the configuration file is invalid or incomplete.
|
|
||||||
"""
|
|
||||||
# Load settings from config file
|
|
||||||
ConfigEOS._file_settings, config_file = self.settings_from_config_file()
|
|
||||||
|
|
||||||
# Update configuration in memory
|
|
||||||
self.update()
|
|
||||||
|
|
||||||
# Everything worked, remember the values
|
|
||||||
self._config_folder_path = config_file.parent
|
|
||||||
self._config_file_path = config_file
|
|
||||||
|
|
||||||
return ConfigEOS._file_settings, config_file
|
|
||||||
|
|
||||||
def to_config_file(self) -> None:
|
def to_config_file(self) -> None:
|
||||||
"""Saves the current configuration to the configuration file.
|
"""Saves the current configuration to the configuration file.
|
||||||
@@ -433,77 +454,24 @@ class ConfigEOS(SingletonMixin, SettingsEOS):
|
|||||||
Raises:
|
Raises:
|
||||||
ValueError: If the configuration file path is not specified or can not be written to.
|
ValueError: If the configuration file path is not specified or can not be written to.
|
||||||
"""
|
"""
|
||||||
if not self.config_file_path:
|
if not self.general.config_file_path:
|
||||||
raise ValueError("Configuration file path unknown.")
|
raise ValueError("Configuration file path unknown.")
|
||||||
with self.config_file_path.open("w", encoding=self.ENCODING) as f_out:
|
with self.general.config_file_path.open("w", encoding=self.ENCODING) as f_out:
|
||||||
try:
|
json_str = super().model_dump_json()
|
||||||
json_str = super().to_json()
|
f_out.write(json_str)
|
||||||
# Write to file
|
|
||||||
f_out.write(json_str)
|
|
||||||
# Also remember as actual settings
|
|
||||||
ConfigEOS._file_settings = SettingsEOS.model_validate_json(json_str)
|
|
||||||
except ValidationError as exc:
|
|
||||||
raise ValueError(f"Could not update '{self.config_file_path}': {exc}")
|
|
||||||
|
|
||||||
def _config_value(self, key: str) -> Any:
|
|
||||||
"""Retrieves the configuration value for a specific key, following a priority order.
|
|
||||||
|
|
||||||
Values are fetched in the following order:
|
|
||||||
1. Settings.
|
|
||||||
2. Environment variables.
|
|
||||||
3. EOS configuration file.
|
|
||||||
4. Current configuration.
|
|
||||||
5. Field default constants.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): The configuration key to retrieve.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Any: The configuration value, or None if not found.
|
|
||||||
"""
|
|
||||||
# Settings
|
|
||||||
if ConfigEOS._settings:
|
|
||||||
if (value := getattr(self.settings, key, None)) is not None:
|
|
||||||
return value
|
|
||||||
|
|
||||||
# Environment variables
|
|
||||||
if (value := os.getenv(key)) is not None:
|
|
||||||
try:
|
|
||||||
return float(value)
|
|
||||||
except ValueError:
|
|
||||||
return value
|
|
||||||
|
|
||||||
# EOS configuration file.
|
|
||||||
if self._file_settings:
|
|
||||||
if (value := getattr(self._file_settings, key, None)) is not None:
|
|
||||||
return value
|
|
||||||
|
|
||||||
# Current configuration - key is valid as called by update().
|
|
||||||
if (value := getattr(self, key, None)) is not None:
|
|
||||||
return value
|
|
||||||
|
|
||||||
# Field default constants
|
|
||||||
if (value := ConfigEOS.model_fields[key].default) is not None:
|
|
||||||
return value
|
|
||||||
|
|
||||||
logger.debug(f"Value for configuration key '{key}' not found or is {value}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Updates all configuration fields.
|
"""Updates all configuration fields.
|
||||||
|
|
||||||
This method updates all configuration fields using the following order for value retrieval:
|
This method updates all configuration fields using the following order for value retrieval:
|
||||||
1. Settings.
|
1. Current settings.
|
||||||
2. Environment variables.
|
2. Environment variables.
|
||||||
3. EOS configuration file.
|
3. EOS configuration file.
|
||||||
4. Current configuration.
|
4. Field default constants.
|
||||||
5. Field default constants.
|
|
||||||
|
|
||||||
The first non None value in priority order is taken.
|
The first non None value in priority order is taken.
|
||||||
"""
|
"""
|
||||||
self._update_data_folder_path()
|
self._setup(**self.model_dump())
|
||||||
for key in self.model_fields:
|
|
||||||
setattr(self, key, self._config_value(key))
|
|
||||||
|
|
||||||
|
|
||||||
def get_config() -> ConfigEOS:
|
def get_config() -> ConfigEOS:
|
||||||
|
@@ -4,10 +4,6 @@ from akkudoktoreos.core.pydantic import PydanticBaseModel
|
|||||||
|
|
||||||
|
|
||||||
class SettingsBaseModel(PydanticBaseModel):
|
class SettingsBaseModel(PydanticBaseModel):
|
||||||
"""Base model class for all settings configurations.
|
"""Base model class for all settings configurations."""
|
||||||
|
|
||||||
Note:
|
|
||||||
Settings property names shall be disjunctive to all existing settings' property names.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
@@ -265,6 +265,12 @@ class SingletonMixin:
|
|||||||
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
class MySingletonModel(SingletonMixin, PydanticBaseModel):
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
|
# implement __init__ to avoid re-initialization of parent class PydanticBaseModel:
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
instance1 = MySingletonModel(name="Instance 1")
|
instance1 = MySingletonModel(name="Instance 1")
|
||||||
instance2 = MySingletonModel(name="Instance 2")
|
instance2 = MySingletonModel(name="Instance 2")
|
||||||
|
|
||||||
|
@@ -1110,7 +1110,7 @@ class DataProvider(SingletonMixin, DataSequence):
|
|||||||
|
|
||||||
To be implemented by derived classes.
|
To be implemented by derived classes.
|
||||||
"""
|
"""
|
||||||
return self.provider_id() == self.config.abstract_provider
|
raise NotImplementedError()
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
@@ -1121,6 +1121,11 @@ class DataProvider(SingletonMixin, DataSequence):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def update_data(
|
def update_data(
|
||||||
self,
|
self,
|
||||||
force_enable: Optional[bool] = False,
|
force_enable: Optional[bool] = False,
|
||||||
@@ -1595,6 +1600,11 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
|
|||||||
)
|
)
|
||||||
return list(key_set)
|
return list(key_set)
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> pd.Series:
|
def __getitem__(self, key: str) -> pd.Series:
|
||||||
"""Retrieve a Pandas Series for a specified key from the data in each DataProvider.
|
"""Retrieve a Pandas Series for a specified key from the data in each DataProvider.
|
||||||
|
|
||||||
|
48
src/akkudoktoreos/core/decorators.py
Normal file
48
src/akkudoktoreos/core/decorators.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class classproperty:
|
||||||
|
"""A decorator to define a read-only property at the class level.
|
||||||
|
|
||||||
|
This class replaces the built-in `property` which is no longer available in
|
||||||
|
combination with @classmethod since Python 3.13 to allow a method to be
|
||||||
|
accessed as a property on the class itself, rather than an instance. This
|
||||||
|
is useful when you want a property-like syntax for methods that depend on
|
||||||
|
the class rather than any instance of the class.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
class MyClass:
|
||||||
|
_value = 42
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@classproperty
|
||||||
|
def value(cls):
|
||||||
|
return cls._value
|
||||||
|
|
||||||
|
print(MyClass.value) # Outputs: 42
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
__get__: Retrieves the value of the class property by calling the
|
||||||
|
decorated method on the class.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
fget (Callable[[Any], Any]): A method that takes the class as an
|
||||||
|
argument and returns a value.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
AssertionError: If `fget` is not defined when `__get__` is called.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, fget: Callable[[Any], Any]) -> None:
|
||||||
|
self.fget = fget
|
||||||
|
|
||||||
|
def __get__(self, _: Any, owner_cls: Optional[type[Any]] = None) -> Any:
|
||||||
|
if owner_cls is None:
|
||||||
|
return self
|
||||||
|
assert self.fget is not None
|
||||||
|
return self.fget(owner_cls)
|
@@ -169,6 +169,11 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
|||||||
dc_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
dc_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
ev_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
ev_charge_hours: Optional[NDArray[Shape["*"], float]] = Field(default=None, description="TBD")
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def set_parameters(
|
def set_parameters(
|
||||||
self,
|
self,
|
||||||
parameters: EnergieManagementSystemParameters,
|
parameters: EnergieManagementSystemParameters,
|
||||||
@@ -193,9 +198,9 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
|||||||
self.ev = ev
|
self.ev = ev
|
||||||
self.home_appliance = home_appliance
|
self.home_appliance = home_appliance
|
||||||
self.inverter = inverter
|
self.inverter = inverter
|
||||||
self.ac_charge_hours = np.full(self.config.prediction_hours, 0.0)
|
self.ac_charge_hours = np.full(self.config.prediction.hours, 0.0)
|
||||||
self.dc_charge_hours = np.full(self.config.prediction_hours, 1.0)
|
self.dc_charge_hours = np.full(self.config.prediction.hours, 1.0)
|
||||||
self.ev_charge_hours = np.full(self.config.prediction_hours, 0.0)
|
self.ev_charge_hours = np.full(self.config.prediction.hours, 0.0)
|
||||||
|
|
||||||
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
def set_akku_discharge_hours(self, ds: np.ndarray) -> None:
|
||||||
if self.battery:
|
if self.battery:
|
||||||
@@ -246,11 +251,11 @@ class EnergieManagementSystem(SingletonMixin, ConfigMixin, PredictionMixin, Pyda
|
|||||||
error_msg = "Start datetime unknown."
|
error_msg = "Start datetime unknown."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
if self.config.prediction_hours is None:
|
if self.config.prediction.hours is None:
|
||||||
error_msg = "Prediction hours unknown."
|
error_msg = "Prediction hours unknown."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
if self.config.optimisation_hours is None:
|
if self.config.prediction.optimisation_hours is None:
|
||||||
error_msg = "Optimisation hours unknown."
|
error_msg = "Optimisation hours unknown."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
|
@@ -4,7 +4,6 @@ Kept in an extra module to avoid cyclic dependencies on package import.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, computed_field, field_validator
|
from pydantic import Field, computed_field, field_validator
|
||||||
@@ -14,21 +13,20 @@ from akkudoktoreos.core.logabc import logging_str_to_level
|
|||||||
|
|
||||||
|
|
||||||
class LoggingCommonSettings(SettingsBaseModel):
|
class LoggingCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for logging."""
|
"""Logging Configuration."""
|
||||||
|
|
||||||
logging_level_default: Optional[str] = Field(
|
level: Optional[str] = Field(
|
||||||
default=None, description="EOS default logging level."
|
default=None,
|
||||||
|
description="EOS default logging level.",
|
||||||
|
examples=["INFO", "DEBUG", "WARNING", "ERROR", "CRITICAL"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("logging_level_default", mode="after")
|
@field_validator("level", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
|
def set_default_logging_level(cls, value: Optional[str]) -> Optional[str]:
|
||||||
if isinstance(value, str) and value.upper() == "NONE":
|
if isinstance(value, str) and value.upper() == "NONE":
|
||||||
value = None
|
value = None
|
||||||
if value is None and (env_level := os.getenv("EOS_LOGGING_LEVEL")) is not None:
|
|
||||||
# Take default logging level from special environment variable
|
|
||||||
value = env_level
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
level = logging_str_to_level(value)
|
level = logging_str_to_level(value)
|
||||||
@@ -38,7 +36,7 @@ class LoggingCommonSettings(SettingsBaseModel):
|
|||||||
# Computed fields
|
# Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def logging_level_root(self) -> str:
|
def root_level(self) -> str:
|
||||||
"""Root logger logging level."""
|
"""Root logger logging level."""
|
||||||
level = logging.getLogger().getEffectiveLevel()
|
level = logging.getLogger().getEffectiveLevel()
|
||||||
level_name = logging.getLevelName(level)
|
level_name = logging.getLevelName(level)
|
||||||
|
@@ -14,6 +14,7 @@ Key Features:
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
from copy import deepcopy
|
||||||
from typing import Any, Dict, List, Optional, Type, Union
|
from typing import Any, Dict, List, Optional, Type, Union
|
||||||
from zoneinfo import ZoneInfo
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
@@ -35,6 +36,21 @@ from pydantic import (
|
|||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||||
|
|
||||||
|
|
||||||
|
def merge_models(source: BaseModel, update_dict: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
def deep_update(source_dict: dict[str, Any], update_dict: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
for key, value in source_dict.items():
|
||||||
|
if isinstance(value, dict) and isinstance(update_dict.get(key), dict):
|
||||||
|
update_dict[key] = deep_update(update_dict[key], value)
|
||||||
|
else:
|
||||||
|
update_dict[key] = value
|
||||||
|
return update_dict
|
||||||
|
|
||||||
|
source_dict = source.model_dump(exclude_unset=True)
|
||||||
|
merged_dict = deep_update(source_dict, deepcopy(update_dict))
|
||||||
|
|
||||||
|
return merged_dict
|
||||||
|
|
||||||
|
|
||||||
class PydanticTypeAdapterDateTime(TypeAdapter[pendulum.DateTime]):
|
class PydanticTypeAdapterDateTime(TypeAdapter[pendulum.DateTime]):
|
||||||
"""Custom type adapter for Pendulum DateTime fields."""
|
"""Custom type adapter for Pendulum DateTime fields."""
|
||||||
|
|
||||||
@@ -113,9 +129,16 @@ class PydanticBaseModel(BaseModel):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
# Override Pydantic’s serialization for all DateTime fields
|
# Override Pydantic’s serialization for all DateTime fields
|
||||||
def model_dump(self, *args: Any, **kwargs: Any) -> dict:
|
def model_dump(
|
||||||
|
self, *args: Any, include_computed_fields: bool = True, **kwargs: Any
|
||||||
|
) -> dict[str, Any]:
|
||||||
"""Custom dump method to handle serialization for DateTime fields."""
|
"""Custom dump method to handle serialization for DateTime fields."""
|
||||||
result = super().model_dump(*args, **kwargs)
|
result = super().model_dump(*args, **kwargs)
|
||||||
|
|
||||||
|
if not include_computed_fields:
|
||||||
|
for computed_field_name in self.model_computed_fields:
|
||||||
|
result.pop(computed_field_name, None)
|
||||||
|
|
||||||
for key, value in result.items():
|
for key, value in result.items():
|
||||||
if isinstance(value, pendulum.DateTime):
|
if isinstance(value, pendulum.DateTime):
|
||||||
result[key] = PydanticTypeAdapterDateTime.serialize(value)
|
result[key] = PydanticTypeAdapterDateTime.serialize(value)
|
||||||
@@ -170,6 +193,10 @@ class PydanticBaseModel(BaseModel):
|
|||||||
"""
|
"""
|
||||||
return cls.model_validate(data)
|
return cls.model_validate(data)
|
||||||
|
|
||||||
|
def model_dump_json(self, *args: Any, indent: Optional[int] = None, **kwargs: Any) -> str:
|
||||||
|
data = self.model_dump(*args, **kwargs)
|
||||||
|
return json.dumps(data, indent=indent, default=str)
|
||||||
|
|
||||||
def to_json(self) -> str:
|
def to_json(self) -> str:
|
||||||
"""Convert the PydanticBaseModel instance to a JSON string.
|
"""Convert the PydanticBaseModel instance to a JSON string.
|
||||||
|
|
||||||
|
@@ -1,113 +1,2 @@
|
|||||||
{
|
{
|
||||||
"config_file_path": null,
|
|
||||||
"config_folder_path": null,
|
|
||||||
"data_cache_path": null,
|
|
||||||
"data_cache_subpath": null,
|
|
||||||
"data_folder_path": null,
|
|
||||||
"data_output_path": null,
|
|
||||||
"data_output_subpath": null,
|
|
||||||
"elecprice_charges_kwh": 0.21,
|
|
||||||
"elecprice_provider": null,
|
|
||||||
"elecpriceimport_file_path": null,
|
|
||||||
"latitude": 52.5,
|
|
||||||
"load_import_file_path": null,
|
|
||||||
"load_name": null,
|
|
||||||
"load_provider": null,
|
|
||||||
"loadakkudoktor_year_energy": null,
|
|
||||||
"logging_level": "INFO",
|
|
||||||
"longitude": 13.4,
|
|
||||||
"optimization_ev_available_charge_rates_percent": null,
|
|
||||||
"optimization_hours": 48,
|
|
||||||
"optimization_penalty": null,
|
|
||||||
"prediction_historic_hours": 48,
|
|
||||||
"prediction_hours": 48,
|
|
||||||
"pvforecast0_albedo": null,
|
|
||||||
"pvforecast0_inverter_model": null,
|
|
||||||
"pvforecast0_inverter_paco": null,
|
|
||||||
"pvforecast0_loss": null,
|
|
||||||
"pvforecast0_module_model": null,
|
|
||||||
"pvforecast0_modules_per_string": null,
|
|
||||||
"pvforecast0_mountingplace": "free",
|
|
||||||
"pvforecast0_optimal_surface_tilt": false,
|
|
||||||
"pvforecast0_optimalangles": false,
|
|
||||||
"pvforecast0_peakpower": null,
|
|
||||||
"pvforecast0_pvtechchoice": "crystSi",
|
|
||||||
"pvforecast0_strings_per_inverter": null,
|
|
||||||
"pvforecast0_surface_azimuth": 180,
|
|
||||||
"pvforecast0_surface_tilt": 0,
|
|
||||||
"pvforecast0_trackingtype": 0,
|
|
||||||
"pvforecast0_userhorizon": null,
|
|
||||||
"pvforecast1_albedo": null,
|
|
||||||
"pvforecast1_inverter_model": null,
|
|
||||||
"pvforecast1_inverter_paco": null,
|
|
||||||
"pvforecast1_loss": 0,
|
|
||||||
"pvforecast1_module_model": null,
|
|
||||||
"pvforecast1_modules_per_string": null,
|
|
||||||
"pvforecast1_mountingplace": "free",
|
|
||||||
"pvforecast1_optimal_surface_tilt": false,
|
|
||||||
"pvforecast1_optimalangles": false,
|
|
||||||
"pvforecast1_peakpower": null,
|
|
||||||
"pvforecast1_pvtechchoice": "crystSi",
|
|
||||||
"pvforecast1_strings_per_inverter": null,
|
|
||||||
"pvforecast1_surface_azimuth": 180,
|
|
||||||
"pvforecast1_surface_tilt": 0,
|
|
||||||
"pvforecast1_trackingtype": 0,
|
|
||||||
"pvforecast1_userhorizon": null,
|
|
||||||
"pvforecast2_albedo": null,
|
|
||||||
"pvforecast2_inverter_model": null,
|
|
||||||
"pvforecast2_inverter_paco": null,
|
|
||||||
"pvforecast2_loss": 0,
|
|
||||||
"pvforecast2_module_model": null,
|
|
||||||
"pvforecast2_modules_per_string": null,
|
|
||||||
"pvforecast2_mountingplace": "free",
|
|
||||||
"pvforecast2_optimal_surface_tilt": false,
|
|
||||||
"pvforecast2_optimalangles": false,
|
|
||||||
"pvforecast2_peakpower": null,
|
|
||||||
"pvforecast2_pvtechchoice": "crystSi",
|
|
||||||
"pvforecast2_strings_per_inverter": null,
|
|
||||||
"pvforecast2_surface_azimuth": 180,
|
|
||||||
"pvforecast2_surface_tilt": 0,
|
|
||||||
"pvforecast2_trackingtype": 0,
|
|
||||||
"pvforecast2_userhorizon": null,
|
|
||||||
"pvforecast3_albedo": null,
|
|
||||||
"pvforecast3_inverter_model": null,
|
|
||||||
"pvforecast3_inverter_paco": null,
|
|
||||||
"pvforecast3_loss": 0,
|
|
||||||
"pvforecast3_module_model": null,
|
|
||||||
"pvforecast3_modules_per_string": null,
|
|
||||||
"pvforecast3_mountingplace": "free",
|
|
||||||
"pvforecast3_optimal_surface_tilt": false,
|
|
||||||
"pvforecast3_optimalangles": false,
|
|
||||||
"pvforecast3_peakpower": null,
|
|
||||||
"pvforecast3_pvtechchoice": "crystSi",
|
|
||||||
"pvforecast3_strings_per_inverter": null,
|
|
||||||
"pvforecast3_surface_azimuth": 180,
|
|
||||||
"pvforecast3_surface_tilt": 0,
|
|
||||||
"pvforecast3_trackingtype": 0,
|
|
||||||
"pvforecast3_userhorizon": null,
|
|
||||||
"pvforecast4_albedo": null,
|
|
||||||
"pvforecast4_inverter_model": null,
|
|
||||||
"pvforecast4_inverter_paco": null,
|
|
||||||
"pvforecast4_loss": 0,
|
|
||||||
"pvforecast4_module_model": null,
|
|
||||||
"pvforecast4_modules_per_string": null,
|
|
||||||
"pvforecast4_mountingplace": "free",
|
|
||||||
"pvforecast4_optimal_surface_tilt": false,
|
|
||||||
"pvforecast4_optimalangles": false,
|
|
||||||
"pvforecast4_peakpower": null,
|
|
||||||
"pvforecast4_pvtechchoice": "crystSi",
|
|
||||||
"pvforecast4_strings_per_inverter": null,
|
|
||||||
"pvforecast4_surface_azimuth": 180,
|
|
||||||
"pvforecast4_surface_tilt": 0,
|
|
||||||
"pvforecast4_trackingtype": 0,
|
|
||||||
"pvforecast4_userhorizon": null,
|
|
||||||
"pvforecast_provider": null,
|
|
||||||
"pvforecastimport_file_path": null,
|
|
||||||
"server_eos_startup_eosdash": true,
|
|
||||||
"server_eos_host": "0.0.0.0",
|
|
||||||
"server_eos_port": 8503,
|
|
||||||
"server_eosdash_host": "0.0.0.0",
|
|
||||||
"server_eosdash_port": 8504,
|
|
||||||
"weather_provider": null,
|
|
||||||
"weatherimport_file_path": null
|
|
||||||
}
|
}
|
||||||
|
@@ -1,11 +1,14 @@
|
|||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
from akkudoktoreos.devices.devicesabc import (
|
||||||
from akkudoktoreos.devices.devicesabc import DeviceBase
|
DeviceBase,
|
||||||
|
DeviceOptimizeResult,
|
||||||
|
DeviceParameters,
|
||||||
|
)
|
||||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@@ -22,14 +25,26 @@ def max_charging_power_field(description: Optional[str] = None) -> float:
|
|||||||
|
|
||||||
|
|
||||||
def initial_soc_percentage_field(description: str) -> int:
|
def initial_soc_percentage_field(description: str) -> int:
|
||||||
return Field(default=0, ge=0, le=100, description=description)
|
return Field(default=0, ge=0, le=100, description=description, examples=[42])
|
||||||
|
|
||||||
|
|
||||||
class BaseBatteryParameters(ParametersBaseModel):
|
def discharging_efficiency_field(default_value: float) -> float:
|
||||||
"""Base class for battery parameters with fields for capacity, efficiency, and state of charge."""
|
return Field(
|
||||||
|
default=default_value,
|
||||||
|
gt=0,
|
||||||
|
le=1,
|
||||||
|
description="A float representing the discharge efficiency of the battery.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseBatteryParameters(DeviceParameters):
|
||||||
|
"""Battery Device Simulation Configuration."""
|
||||||
|
|
||||||
|
device_id: str = Field(description="ID of battery", examples=["battery1"])
|
||||||
capacity_wh: int = Field(
|
capacity_wh: int = Field(
|
||||||
gt=0, description="An integer representing the capacity of the battery in watt-hours."
|
gt=0,
|
||||||
|
description="An integer representing the capacity of the battery in watt-hours.",
|
||||||
|
examples=[8000],
|
||||||
)
|
)
|
||||||
charging_efficiency: float = Field(
|
charging_efficiency: float = Field(
|
||||||
default=0.88,
|
default=0.88,
|
||||||
@@ -37,12 +52,7 @@ class BaseBatteryParameters(ParametersBaseModel):
|
|||||||
le=1,
|
le=1,
|
||||||
description="A float representing the charging efficiency of the battery.",
|
description="A float representing the charging efficiency of the battery.",
|
||||||
)
|
)
|
||||||
discharging_efficiency: float = Field(
|
discharging_efficiency: float = discharging_efficiency_field(0.88)
|
||||||
default=0.88,
|
|
||||||
gt=0,
|
|
||||||
le=1,
|
|
||||||
description="A float representing the discharge efficiency of the battery.",
|
|
||||||
)
|
|
||||||
max_charge_power_w: Optional[float] = max_charging_power_field()
|
max_charge_power_w: Optional[float] = max_charging_power_field()
|
||||||
initial_soc_percentage: int = initial_soc_percentage_field(
|
initial_soc_percentage: int = initial_soc_percentage_field(
|
||||||
"An integer representing the state of charge of the battery at the **start** of the current hour (not the current state)."
|
"An integer representing the state of charge of the battery at the **start** of the current hour (not the current state)."
|
||||||
@@ -52,6 +62,7 @@ class BaseBatteryParameters(ParametersBaseModel):
|
|||||||
ge=0,
|
ge=0,
|
||||||
le=100,
|
le=100,
|
||||||
description="An integer representing the minimum state of charge (SOC) of the battery in percentage.",
|
description="An integer representing the minimum state of charge (SOC) of the battery in percentage.",
|
||||||
|
examples=[10],
|
||||||
)
|
)
|
||||||
max_soc_percentage: int = Field(
|
max_soc_percentage: int = Field(
|
||||||
default=100,
|
default=100,
|
||||||
@@ -66,17 +77,19 @@ class SolarPanelBatteryParameters(BaseBatteryParameters):
|
|||||||
|
|
||||||
|
|
||||||
class ElectricVehicleParameters(BaseBatteryParameters):
|
class ElectricVehicleParameters(BaseBatteryParameters):
|
||||||
"""Parameters specific to an electric vehicle (EV)."""
|
"""Battery Electric Vehicle Device Simulation Configuration."""
|
||||||
|
|
||||||
discharging_efficiency: float = 1.0
|
device_id: str = Field(description="ID of electric vehicle", examples=["ev1"])
|
||||||
|
discharging_efficiency: float = discharging_efficiency_field(1.0)
|
||||||
initial_soc_percentage: int = initial_soc_percentage_field(
|
initial_soc_percentage: int = initial_soc_percentage_field(
|
||||||
"An integer representing the current state of charge (SOC) of the battery in percentage."
|
"An integer representing the current state of charge (SOC) of the battery in percentage."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ElectricVehicleResult(BaseModel):
|
class ElectricVehicleResult(DeviceOptimizeResult):
|
||||||
"""Result class containing information related to the electric vehicle's charging and discharging behavior."""
|
"""Result class containing information related to the electric vehicle's charging and discharging behavior."""
|
||||||
|
|
||||||
|
device_id: str = Field(description="ID of electric vehicle", examples=["ev1"])
|
||||||
charge_array: list[float] = Field(
|
charge_array: list[float] = Field(
|
||||||
description="Hourly charging status (0 for no charging, 1 for charging)."
|
description="Hourly charging status (0 for no charging, 1 for charging)."
|
||||||
)
|
)
|
||||||
@@ -84,7 +97,6 @@ class ElectricVehicleResult(BaseModel):
|
|||||||
description="Hourly discharging status (0 for no discharging, 1 for discharging)."
|
description="Hourly discharging status (0 for no discharging, 1 for discharging)."
|
||||||
)
|
)
|
||||||
discharging_efficiency: float = Field(description="The discharge efficiency as a float..")
|
discharging_efficiency: float = Field(description="The discharge efficiency as a float..")
|
||||||
hours: int = Field(description="Number of hours in the simulation.")
|
|
||||||
capacity_wh: int = Field(description="Capacity of the EV’s battery in watt-hours.")
|
capacity_wh: int = Field(description="Capacity of the EV’s battery in watt-hours.")
|
||||||
charging_efficiency: float = Field(description="Charging efficiency as a float..")
|
charging_efficiency: float = Field(description="Charging efficiency as a float..")
|
||||||
max_charge_power_w: int = Field(description="Maximum charging power in watts.")
|
max_charge_power_w: int = Field(description="Maximum charging power in watts.")
|
||||||
@@ -103,81 +115,30 @@ class ElectricVehicleResult(BaseModel):
|
|||||||
class Battery(DeviceBase):
|
class Battery(DeviceBase):
|
||||||
"""Represents a battery device with methods to simulate energy charging and discharging."""
|
"""Represents a battery device with methods to simulate energy charging and discharging."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, parameters: Optional[BaseBatteryParameters] = None):
|
||||||
self,
|
self.parameters: Optional[BaseBatteryParameters] = None
|
||||||
parameters: Optional[BaseBatteryParameters] = None,
|
super().__init__(parameters)
|
||||||
hours: Optional[int] = 24,
|
|
||||||
provider_id: Optional[str] = None,
|
|
||||||
):
|
|
||||||
# Initialize configuration and parameters
|
|
||||||
self.provider_id = provider_id
|
|
||||||
self.prefix = "<invalid>"
|
|
||||||
if self.provider_id == "GenericBattery":
|
|
||||||
self.prefix = "battery"
|
|
||||||
elif self.provider_id == "GenericBEV":
|
|
||||||
self.prefix = "bev"
|
|
||||||
|
|
||||||
self.parameters = parameters
|
def _setup(self) -> None:
|
||||||
if hours is None:
|
|
||||||
self.hours = self.total_hours # TODO where does that come from?
|
|
||||||
else:
|
|
||||||
self.hours = hours
|
|
||||||
|
|
||||||
self.initialised = False
|
|
||||||
|
|
||||||
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
|
||||||
if self.parameters is not None:
|
|
||||||
self.setup()
|
|
||||||
|
|
||||||
def setup(self) -> None:
|
|
||||||
"""Sets up the battery parameters based on configuration or provided parameters."""
|
"""Sets up the battery parameters based on configuration or provided parameters."""
|
||||||
if self.initialised:
|
assert self.parameters is not None
|
||||||
return
|
self.capacity_wh = self.parameters.capacity_wh
|
||||||
|
self.initial_soc_percentage = self.parameters.initial_soc_percentage
|
||||||
|
self.charging_efficiency = self.parameters.charging_efficiency
|
||||||
|
self.discharging_efficiency = self.parameters.discharging_efficiency
|
||||||
|
|
||||||
if self.provider_id:
|
# Only assign for storage battery
|
||||||
# Setup from configuration
|
self.min_soc_percentage = (
|
||||||
self.capacity_wh = getattr(self.config, f"{self.prefix}_capacity")
|
self.parameters.min_soc_percentage
|
||||||
self.initial_soc_percentage = getattr(self.config, f"{self.prefix}_initial_soc")
|
if isinstance(self.parameters, SolarPanelBatteryParameters)
|
||||||
self.hours = self.total_hours # TODO where does that come from?
|
else 0
|
||||||
self.charging_efficiency = getattr(self.config, f"{self.prefix}_charging_efficiency")
|
)
|
||||||
self.discharging_efficiency = getattr(
|
self.max_soc_percentage = self.parameters.max_soc_percentage
|
||||||
self.config, f"{self.prefix}_discharging_efficiency"
|
|
||||||
)
|
|
||||||
self.max_charge_power_w = getattr(self.config, f"{self.prefix}_max_charging_power")
|
|
||||||
|
|
||||||
if self.provider_id == "GenericBattery":
|
|
||||||
self.min_soc_percentage = getattr(
|
|
||||||
self.config,
|
|
||||||
f"{self.prefix}_soc_min",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.min_soc_percentage = 0
|
|
||||||
|
|
||||||
self.max_soc_percentage = getattr(
|
|
||||||
self.config,
|
|
||||||
f"{self.prefix}_soc_max",
|
|
||||||
)
|
|
||||||
elif self.parameters:
|
|
||||||
# Setup from parameters
|
|
||||||
self.capacity_wh = self.parameters.capacity_wh
|
|
||||||
self.initial_soc_percentage = self.parameters.initial_soc_percentage
|
|
||||||
self.charging_efficiency = self.parameters.charging_efficiency
|
|
||||||
self.discharging_efficiency = self.parameters.discharging_efficiency
|
|
||||||
self.max_charge_power_w = self.parameters.max_charge_power_w
|
|
||||||
# Only assign for storage battery
|
|
||||||
self.min_soc_percentage = (
|
|
||||||
self.parameters.min_soc_percentage
|
|
||||||
if isinstance(self.parameters, SolarPanelBatteryParameters)
|
|
||||||
else 0
|
|
||||||
)
|
|
||||||
self.max_soc_percentage = self.parameters.max_soc_percentage
|
|
||||||
else:
|
|
||||||
error_msg = "Parameters and provider ID are missing. Cannot instantiate."
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
# Initialize state of charge
|
# Initialize state of charge
|
||||||
if self.max_charge_power_w is None:
|
if self.parameters.max_charge_power_w is not None:
|
||||||
|
self.max_charge_power_w = self.parameters.max_charge_power_w
|
||||||
|
else:
|
||||||
self.max_charge_power_w = self.capacity_wh # TODO this should not be equal capacity_wh
|
self.max_charge_power_w = self.capacity_wh # TODO this should not be equal capacity_wh
|
||||||
self.discharge_array = np.full(self.hours, 1)
|
self.discharge_array = np.full(self.hours, 1)
|
||||||
self.charge_array = np.full(self.hours, 1)
|
self.charge_array = np.full(self.hours, 1)
|
||||||
@@ -185,11 +146,10 @@ class Battery(DeviceBase):
|
|||||||
self.min_soc_wh = (self.min_soc_percentage / 100) * self.capacity_wh
|
self.min_soc_wh = (self.min_soc_percentage / 100) * self.capacity_wh
|
||||||
self.max_soc_wh = (self.max_soc_percentage / 100) * self.capacity_wh
|
self.max_soc_wh = (self.max_soc_percentage / 100) * self.capacity_wh
|
||||||
|
|
||||||
self.initialised = True
|
|
||||||
|
|
||||||
def to_dict(self) -> dict[str, Any]:
|
def to_dict(self) -> dict[str, Any]:
|
||||||
"""Converts the object to a dictionary representation."""
|
"""Converts the object to a dictionary representation."""
|
||||||
return {
|
return {
|
||||||
|
"device_id": self.device_id,
|
||||||
"capacity_wh": self.capacity_wh,
|
"capacity_wh": self.capacity_wh,
|
||||||
"initial_soc_percentage": self.initial_soc_percentage,
|
"initial_soc_percentage": self.initial_soc_percentage,
|
||||||
"soc_wh": self.soc_wh,
|
"soc_wh": self.soc_wh,
|
||||||
|
@@ -1,307 +1,42 @@
|
|||||||
from typing import Any, ClassVar, Dict, Optional, Union
|
from typing import Optional
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from numpydantic import NDArray, Shape
|
|
||||||
from pydantic import Field, computed_field
|
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
|
||||||
from akkudoktoreos.core.coreabc import SingletonMixin
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.devices.battery import Battery
|
from akkudoktoreos.devices.battery import Battery
|
||||||
from akkudoktoreos.devices.devicesabc import DevicesBase
|
from akkudoktoreos.devices.devicesabc import DevicesBase
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance
|
from akkudoktoreos.devices.generic import HomeAppliance
|
||||||
from akkudoktoreos.devices.inverter import Inverter
|
from akkudoktoreos.devices.inverter import Inverter
|
||||||
from akkudoktoreos.prediction.interpolator import SelfConsumptionProbabilityInterpolator
|
from akkudoktoreos.devices.settings import DevicesCommonSettings
|
||||||
from akkudoktoreos.utils.datetimeutil import to_duration
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DevicesCommonSettings(SettingsBaseModel):
|
|
||||||
"""Base configuration for devices simulation settings."""
|
|
||||||
|
|
||||||
# Battery
|
|
||||||
# -------
|
|
||||||
battery_provider: Optional[str] = Field(
|
|
||||||
default=None, description="Id of Battery simulation provider."
|
|
||||||
)
|
|
||||||
battery_capacity: Optional[int] = Field(default=None, description="Battery capacity [Wh].")
|
|
||||||
battery_initial_soc: Optional[int] = Field(
|
|
||||||
default=None, description="Battery initial state of charge [%]."
|
|
||||||
)
|
|
||||||
battery_soc_min: Optional[int] = Field(
|
|
||||||
default=None, description="Battery minimum state of charge [%]."
|
|
||||||
)
|
|
||||||
battery_soc_max: Optional[int] = Field(
|
|
||||||
default=None, description="Battery maximum state of charge [%]."
|
|
||||||
)
|
|
||||||
battery_charging_efficiency: Optional[float] = Field(
|
|
||||||
default=None, description="Battery charging efficiency [%]."
|
|
||||||
)
|
|
||||||
battery_discharging_efficiency: Optional[float] = Field(
|
|
||||||
default=None, description="Battery discharging efficiency [%]."
|
|
||||||
)
|
|
||||||
battery_max_charging_power: Optional[int] = Field(
|
|
||||||
default=None, description="Battery maximum charge power [W]."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Battery Electric Vehicle
|
|
||||||
# ------------------------
|
|
||||||
bev_provider: Optional[str] = Field(
|
|
||||||
default=None, description="Id of Battery Electric Vehicle simulation provider."
|
|
||||||
)
|
|
||||||
bev_capacity: Optional[int] = Field(
|
|
||||||
default=None, description="Battery Electric Vehicle capacity [Wh]."
|
|
||||||
)
|
|
||||||
bev_initial_soc: Optional[int] = Field(
|
|
||||||
default=None, description="Battery Electric Vehicle initial state of charge [%]."
|
|
||||||
)
|
|
||||||
bev_soc_max: Optional[int] = Field(
|
|
||||||
default=None, description="Battery Electric Vehicle maximum state of charge [%]."
|
|
||||||
)
|
|
||||||
bev_charging_efficiency: Optional[float] = Field(
|
|
||||||
default=None, description="Battery Electric Vehicle charging efficiency [%]."
|
|
||||||
)
|
|
||||||
bev_discharging_efficiency: Optional[float] = Field(
|
|
||||||
default=None, description="Battery Electric Vehicle discharging efficiency [%]."
|
|
||||||
)
|
|
||||||
bev_max_charging_power: Optional[int] = Field(
|
|
||||||
default=None, description="Battery Electric Vehicle maximum charge power [W]."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Home Appliance - Dish Washer
|
|
||||||
# ----------------------------
|
|
||||||
dishwasher_provider: Optional[str] = Field(
|
|
||||||
default=None, description="Id of Dish Washer simulation provider."
|
|
||||||
)
|
|
||||||
dishwasher_consumption: Optional[int] = Field(
|
|
||||||
default=None, description="Dish Washer energy consumption [Wh]."
|
|
||||||
)
|
|
||||||
dishwasher_duration: Optional[int] = Field(
|
|
||||||
default=None, description="Dish Washer usage duration [h]."
|
|
||||||
)
|
|
||||||
|
|
||||||
# PV Inverter
|
|
||||||
# -----------
|
|
||||||
inverter_provider: Optional[str] = Field(
|
|
||||||
default=None, description="Id of PV Inverter simulation provider."
|
|
||||||
)
|
|
||||||
inverter_power_max: Optional[float] = Field(
|
|
||||||
default=None, description="Inverter maximum power [W]."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Devices(SingletonMixin, DevicesBase):
|
class Devices(SingletonMixin, DevicesBase):
|
||||||
# Results of the devices simulation and
|
def __init__(self, settings: Optional[DevicesCommonSettings] = None):
|
||||||
# insights into various parameters over the entire forecast period.
|
if hasattr(self, "_initialized"):
|
||||||
# -----------------------------------------------------------------
|
return
|
||||||
last_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
super().__init__()
|
||||||
default=None, description="The load in watt-hours per hour."
|
if settings is None:
|
||||||
)
|
settings = self.config.devices
|
||||||
eauto_soc_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
if settings is None:
|
||||||
default=None, description="The state of charge of the EV for each hour."
|
return
|
||||||
)
|
|
||||||
einnahmen_euro_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="The revenue from grid feed-in or other sources in euros per hour.",
|
|
||||||
)
|
|
||||||
home_appliance_wh_per_hour: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="The energy consumption of a household appliance in watt-hours per hour.",
|
|
||||||
)
|
|
||||||
kosten_euro_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=None, description="The costs in euros per hour."
|
|
||||||
)
|
|
||||||
grid_import_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=None, description="The grid energy drawn in watt-hours per hour."
|
|
||||||
)
|
|
||||||
grid_export_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=None, description="The energy fed into the grid in watt-hours per hour."
|
|
||||||
)
|
|
||||||
verluste_wh_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=None, description="The losses in watt-hours per hour."
|
|
||||||
)
|
|
||||||
akku_soc_pro_stunde: Optional[NDArray[Shape["*"], float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="The state of charge of the battery (not the EV) in percentage per hour.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Computed fields
|
# initialize devices
|
||||||
@computed_field # type: ignore[prop-decorator]
|
if settings.batteries is not None:
|
||||||
@property
|
for battery_params in settings.batteries:
|
||||||
def total_balance_euro(self) -> float:
|
self.add_device(Battery(battery_params))
|
||||||
"""The total balance of revenues minus costs in euros."""
|
if settings.inverters is not None:
|
||||||
return self.total_revenues_euro - self.total_costs_euro
|
for inverter_params in settings.inverters:
|
||||||
|
self.add_device(Inverter(inverter_params))
|
||||||
|
if settings.home_appliances is not None:
|
||||||
|
for home_appliance_params in settings.home_appliances:
|
||||||
|
self.add_device(HomeAppliance(home_appliance_params))
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
self.post_setup()
|
||||||
@property
|
|
||||||
def total_revenues_euro(self) -> float:
|
|
||||||
"""The total revenues in euros."""
|
|
||||||
if self.einnahmen_euro_pro_stunde is None:
|
|
||||||
return 0
|
|
||||||
return np.nansum(self.einnahmen_euro_pro_stunde)
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
def post_setup(self) -> None:
|
||||||
@property
|
for device in self.devices.values():
|
||||||
def total_costs_euro(self) -> float:
|
device.post_setup()
|
||||||
"""The total costs in euros."""
|
|
||||||
if self.kosten_euro_pro_stunde is None:
|
|
||||||
return 0
|
|
||||||
return np.nansum(self.kosten_euro_pro_stunde)
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def total_losses_wh(self) -> float:
|
|
||||||
"""The total losses in watt-hours over the entire period."""
|
|
||||||
if self.verluste_wh_pro_stunde is None:
|
|
||||||
return 0
|
|
||||||
return np.nansum(self.verluste_wh_pro_stunde)
|
|
||||||
|
|
||||||
# Devices
|
|
||||||
# TODO: Make devices class a container of device simulation providers.
|
|
||||||
# Device simulations to be used are then enabled in the configuration.
|
|
||||||
battery: ClassVar[Battery] = Battery(provider_id="GenericBattery")
|
|
||||||
ev: ClassVar[Battery] = Battery(provider_id="GenericBEV")
|
|
||||||
home_appliance: ClassVar[HomeAppliance] = HomeAppliance(provider_id="GenericDishWasher")
|
|
||||||
inverter: ClassVar[Inverter] = Inverter(
|
|
||||||
self_consumption_predictor=SelfConsumptionProbabilityInterpolator,
|
|
||||||
battery=battery,
|
|
||||||
provider_id="GenericInverter",
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_data(self) -> None:
|
|
||||||
"""Update device simulation data."""
|
|
||||||
# Assure devices are set up
|
|
||||||
self.battery.setup()
|
|
||||||
self.ev.setup()
|
|
||||||
self.home_appliance.setup()
|
|
||||||
self.inverter.setup()
|
|
||||||
|
|
||||||
# Pre-allocate arrays for the results, optimized for speed
|
|
||||||
self.last_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.grid_export_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.grid_import_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.kosten_euro_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.einnahmen_euro_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.akku_soc_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.eauto_soc_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.verluste_wh_pro_stunde = np.full((self.total_hours), np.nan)
|
|
||||||
self.home_appliance_wh_per_hour = np.full((self.total_hours), np.nan)
|
|
||||||
|
|
||||||
# Set initial state
|
|
||||||
simulation_step = to_duration("1 hour")
|
|
||||||
if self.battery:
|
|
||||||
self.akku_soc_pro_stunde[0] = self.battery.current_soc_percentage()
|
|
||||||
if self.ev:
|
|
||||||
self.eauto_soc_pro_stunde[0] = self.ev.current_soc_percentage()
|
|
||||||
|
|
||||||
# Get predictions for full device simulation time range
|
|
||||||
# gesamtlast[stunde]
|
|
||||||
load_total_mean = self.prediction.key_to_array(
|
|
||||||
"load_total_mean",
|
|
||||||
start_datetime=self.start_datetime,
|
|
||||||
end_datetime=self.end_datetime,
|
|
||||||
interval=simulation_step,
|
|
||||||
)
|
|
||||||
# pv_prognose_wh[stunde]
|
|
||||||
pvforecast_ac_power = self.prediction.key_to_array(
|
|
||||||
"pvforecast_ac_power",
|
|
||||||
start_datetime=self.start_datetime,
|
|
||||||
end_datetime=self.end_datetime,
|
|
||||||
interval=simulation_step,
|
|
||||||
)
|
|
||||||
# strompreis_euro_pro_wh[stunde]
|
|
||||||
elecprice_marketprice_wh = self.prediction.key_to_array(
|
|
||||||
"elecprice_marketprice_wh",
|
|
||||||
start_datetime=self.start_datetime,
|
|
||||||
end_datetime=self.end_datetime,
|
|
||||||
interval=simulation_step,
|
|
||||||
)
|
|
||||||
# einspeiseverguetung_euro_pro_wh_arr[stunde]
|
|
||||||
# TODO: Create prediction for einspeiseverguetung_euro_pro_wh_arr
|
|
||||||
einspeiseverguetung_euro_pro_wh_arr = np.full((self.total_hours), 0.078)
|
|
||||||
|
|
||||||
for stunde_since_now in range(0, self.total_hours):
|
|
||||||
hour = self.start_datetime.hour + stunde_since_now
|
|
||||||
|
|
||||||
# Accumulate loads and PV generation
|
|
||||||
consumption = load_total_mean[stunde_since_now]
|
|
||||||
self.verluste_wh_pro_stunde[stunde_since_now] = 0.0
|
|
||||||
|
|
||||||
# Home appliances
|
|
||||||
if self.home_appliance:
|
|
||||||
ha_load = self.home_appliance.get_load_for_hour(hour)
|
|
||||||
consumption += ha_load
|
|
||||||
self.home_appliance_wh_per_hour[stunde_since_now] = ha_load
|
|
||||||
|
|
||||||
# E-Auto handling
|
|
||||||
if self.ev:
|
|
||||||
if self.ev_charge_hours[hour] > 0:
|
|
||||||
geladene_menge_eauto, verluste_eauto = self.ev.charge_energy(
|
|
||||||
None, hour, relative_power=self.ev_charge_hours[hour]
|
|
||||||
)
|
|
||||||
consumption += geladene_menge_eauto
|
|
||||||
self.verluste_wh_pro_stunde[stunde_since_now] += verluste_eauto
|
|
||||||
self.eauto_soc_pro_stunde[stunde_since_now] = self.ev.current_soc_percentage()
|
|
||||||
|
|
||||||
# Process inverter logic
|
|
||||||
grid_export, grid_import, losses, self_consumption = (0.0, 0.0, 0.0, 0.0)
|
|
||||||
if self.battery:
|
|
||||||
self.battery.set_charge_allowed_for_hour(self.dc_charge_hours[hour], hour)
|
|
||||||
if self.inverter:
|
|
||||||
generation = pvforecast_ac_power[hour]
|
|
||||||
grid_export, grid_import, losses, self_consumption = self.inverter.process_energy(
|
|
||||||
generation, consumption, hour
|
|
||||||
)
|
|
||||||
|
|
||||||
# AC PV Battery Charge
|
|
||||||
if self.battery and self.ac_charge_hours[hour] > 0.0:
|
|
||||||
self.battery.set_charge_allowed_for_hour(1, hour)
|
|
||||||
geladene_menge, verluste_wh = self.battery.charge_energy(
|
|
||||||
None, hour, relative_power=self.ac_charge_hours[hour]
|
|
||||||
)
|
|
||||||
# print(stunde, " ", geladene_menge, " ",self.ac_charge_hours[stunde]," ",self.battery.current_soc_percentage())
|
|
||||||
consumption += geladene_menge
|
|
||||||
grid_import += geladene_menge
|
|
||||||
self.verluste_wh_pro_stunde[stunde_since_now] += verluste_wh
|
|
||||||
|
|
||||||
self.grid_export_wh_pro_stunde[stunde_since_now] = grid_export
|
|
||||||
self.grid_import_wh_pro_stunde[stunde_since_now] = grid_import
|
|
||||||
self.verluste_wh_pro_stunde[stunde_since_now] += losses
|
|
||||||
self.last_wh_pro_stunde[stunde_since_now] = consumption
|
|
||||||
|
|
||||||
# Financial calculations
|
|
||||||
self.kosten_euro_pro_stunde[stunde_since_now] = (
|
|
||||||
grid_import * self.strompreis_euro_pro_wh[hour]
|
|
||||||
)
|
|
||||||
self.einnahmen_euro_pro_stunde[stunde_since_now] = (
|
|
||||||
grid_export * self.einspeiseverguetung_euro_pro_wh_arr[hour]
|
|
||||||
)
|
|
||||||
|
|
||||||
# battery SOC tracking
|
|
||||||
if self.battery:
|
|
||||||
self.akku_soc_pro_stunde[stunde_since_now] = self.battery.current_soc_percentage()
|
|
||||||
else:
|
|
||||||
self.akku_soc_pro_stunde[stunde_since_now] = 0.0
|
|
||||||
|
|
||||||
def report_dict(self) -> Dict[str, Any]:
|
|
||||||
"""Provides devices simulation output as a dictionary."""
|
|
||||||
out: Dict[str, Optional[Union[np.ndarray, float]]] = {
|
|
||||||
"Last_Wh_pro_Stunde": self.last_wh_pro_stunde,
|
|
||||||
"grid_export_Wh_pro_Stunde": self.grid_export_wh_pro_stunde,
|
|
||||||
"grid_import_Wh_pro_Stunde": self.grid_import_wh_pro_stunde,
|
|
||||||
"Kosten_Euro_pro_Stunde": self.kosten_euro_pro_stunde,
|
|
||||||
"akku_soc_pro_stunde": self.akku_soc_pro_stunde,
|
|
||||||
"Einnahmen_Euro_pro_Stunde": self.einnahmen_euro_pro_stunde,
|
|
||||||
"Gesamtbilanz_Euro": self.total_balance_euro,
|
|
||||||
"EAuto_SoC_pro_Stunde": self.eauto_soc_pro_stunde,
|
|
||||||
"Gesamteinnahmen_Euro": self.total_revenues_euro,
|
|
||||||
"Gesamtkosten_Euro": self.total_costs_euro,
|
|
||||||
"Verluste_Pro_Stunde": self.verluste_wh_pro_stunde,
|
|
||||||
"Gesamt_Verluste": self.total_losses_wh,
|
|
||||||
"Home_appliance_wh_per_hour": self.home_appliance_wh_per_hour,
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
# Initialize the Devices simulation, it is a singleton.
|
# Initialize the Devices simulation, it is a singleton.
|
||||||
|
@@ -1,22 +1,45 @@
|
|||||||
"""Abstract and base classes for devices."""
|
"""Abstract and base classes for devices."""
|
||||||
|
|
||||||
from typing import Optional
|
from enum import Enum
|
||||||
|
from typing import Optional, Type
|
||||||
|
|
||||||
from pendulum import DateTime
|
from pendulum import DateTime
|
||||||
from pydantic import ConfigDict, computed_field
|
from pydantic import Field, computed_field
|
||||||
|
|
||||||
from akkudoktoreos.core.coreabc import (
|
from akkudoktoreos.core.coreabc import (
|
||||||
ConfigMixin,
|
ConfigMixin,
|
||||||
|
DevicesMixin,
|
||||||
EnergyManagementSystemMixin,
|
EnergyManagementSystemMixin,
|
||||||
PredictionMixin,
|
PredictionMixin,
|
||||||
)
|
)
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
||||||
from akkudoktoreos.utils.datetimeutil import to_duration
|
from akkudoktoreos.utils.datetimeutil import to_duration
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceParameters(ParametersBaseModel):
|
||||||
|
device_id: str = Field(description="ID of device", examples="device1")
|
||||||
|
hours: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
gt=0,
|
||||||
|
description="Number of prediction hours. Defaults to global config prediction hours.",
|
||||||
|
examples=[None],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceOptimizeResult(ParametersBaseModel):
|
||||||
|
device_id: str = Field(description="ID of device", examples=["device1"])
|
||||||
|
hours: int = Field(gt=0, description="Number of hours in the simulation.", examples=[24])
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceState(Enum):
|
||||||
|
UNINITIALIZED = 0
|
||||||
|
PREPARED = 1
|
||||||
|
INITIALIZED = 2
|
||||||
|
|
||||||
|
|
||||||
class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
||||||
"""A mixin to manage start, end datetimes for devices data.
|
"""A mixin to manage start, end datetimes for devices data.
|
||||||
|
|
||||||
@@ -28,16 +51,16 @@ class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
|||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def end_datetime(self) -> Optional[DateTime]:
|
def end_datetime(self) -> Optional[DateTime]:
|
||||||
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
"""Compute the end datetime based on the `start_datetime` and `hours`.
|
||||||
|
|
||||||
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||||
"""
|
"""
|
||||||
if self.ems.start_datetime and self.config.prediction_hours:
|
if self.ems.start_datetime and self.config.prediction.hours:
|
||||||
end_datetime = self.ems.start_datetime + to_duration(
|
end_datetime = self.ems.start_datetime + to_duration(
|
||||||
f"{self.config.prediction_hours} hours"
|
f"{self.config.prediction.hours} hours"
|
||||||
)
|
)
|
||||||
dst_change = end_datetime.offset_hours - self.ems.start_datetime.offset_hours
|
dst_change = end_datetime.offset_hours - self.ems.start_datetime.offset_hours
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@@ -68,33 +91,92 @@ class DevicesStartEndMixin(ConfigMixin, EnergyManagementSystemMixin):
|
|||||||
return int(duration.total_hours())
|
return int(duration.total_hours())
|
||||||
|
|
||||||
|
|
||||||
class DeviceBase(DevicesStartEndMixin, PredictionMixin):
|
class DeviceBase(DevicesStartEndMixin, PredictionMixin, DevicesMixin):
|
||||||
"""Base class for device simulations.
|
"""Base class for device simulations.
|
||||||
|
|
||||||
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
Enables access to EOS configuration data (attribute `config`), EOS prediction data (attribute
|
||||||
`prediction`).
|
`prediction`) and EOS device registry (attribute `devices`).
|
||||||
|
|
||||||
Note:
|
Behavior:
|
||||||
Validation on assignment of the Pydantic model is disabled to speed up simulation runs.
|
- Several initialization phases (setup, post_setup):
|
||||||
|
- setup: Initialize class attributes from DeviceParameters (pydantic input validation)
|
||||||
|
- post_setup: Set connections between devices
|
||||||
|
- NotImplemented:
|
||||||
|
- hooks during optimization
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- This class is base to concrete devices like battery, inverter, etc. that are used in optimization.
|
||||||
|
- Not a pydantic model for a low footprint during optimization.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Disable validation on assignment to speed up simulation runs.
|
def __init__(self, parameters: Optional[DeviceParameters] = None):
|
||||||
model_config = ConfigDict(
|
self.device_id: str = "<invalid>"
|
||||||
validate_assignment=False,
|
self.parameters: Optional[DeviceParameters] = None
|
||||||
)
|
self.hours = -1
|
||||||
|
if self.total_hours is not None:
|
||||||
|
self.hours = self.total_hours
|
||||||
|
|
||||||
|
self.initialized = DeviceState.UNINITIALIZED
|
||||||
|
|
||||||
|
if parameters is not None:
|
||||||
|
self.setup(parameters)
|
||||||
|
|
||||||
|
def setup(self, parameters: DeviceParameters) -> None:
|
||||||
|
if self.initialized != DeviceState.UNINITIALIZED:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.parameters = parameters
|
||||||
|
self.device_id = self.parameters.device_id
|
||||||
|
|
||||||
|
if self.parameters.hours is not None:
|
||||||
|
self.hours = self.parameters.hours
|
||||||
|
if self.hours < 0:
|
||||||
|
raise ValueError("hours is unset")
|
||||||
|
|
||||||
|
self._setup()
|
||||||
|
|
||||||
|
self.initialized = DeviceState.PREPARED
|
||||||
|
|
||||||
|
def post_setup(self) -> None:
|
||||||
|
if self.initialized.value >= DeviceState.INITIALIZED.value:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._post_setup()
|
||||||
|
self.initialized = DeviceState.INITIALIZED
|
||||||
|
|
||||||
|
def _setup(self) -> None:
|
||||||
|
"""Implement custom setup in derived device classes."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _post_setup(self) -> None:
|
||||||
|
"""Implement custom setup in derived device classes that is run when all devices are initialized."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DevicesBase(DevicesStartEndMixin, PredictionMixin, PydanticBaseModel):
|
class DevicesBase(DevicesStartEndMixin, PredictionMixin):
|
||||||
"""Base class for handling device data.
|
"""Base class for handling device data.
|
||||||
|
|
||||||
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
Enables access to EOS configuration data (attribute `config`) and EOS prediction data (attribute
|
||||||
`prediction`).
|
`prediction`).
|
||||||
|
|
||||||
Note:
|
|
||||||
Validation on assignment of the Pydantic model is disabled to speed up simulation runs.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Disable validation on assignment to speed up simulation runs.
|
def __init__(self) -> None:
|
||||||
model_config = ConfigDict(
|
super().__init__()
|
||||||
validate_assignment=False,
|
self.devices: dict[str, "DeviceBase"] = dict()
|
||||||
)
|
|
||||||
|
def get_device_by_id(self, device_id: str) -> Optional["DeviceBase"]:
|
||||||
|
return self.devices.get(device_id)
|
||||||
|
|
||||||
|
def add_device(self, device: Optional["DeviceBase"]) -> None:
|
||||||
|
if device is None:
|
||||||
|
return
|
||||||
|
assert device.device_id not in self.devices, f"{device.device_id} already registered"
|
||||||
|
self.devices[device.device_id] = device
|
||||||
|
|
||||||
|
def remove_device(self, device: Type["DeviceBase"] | str) -> bool:
|
||||||
|
if isinstance(device, DeviceBase):
|
||||||
|
device = device.device_id
|
||||||
|
return self.devices.pop(device, None) is not None # type: ignore[arg-type]
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
self.devices = dict()
|
||||||
|
@@ -4,20 +4,24 @@ import numpy as np
|
|||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
from akkudoktoreos.devices.devicesabc import DeviceBase, DeviceParameters
|
||||||
from akkudoktoreos.devices.devicesabc import DeviceBase
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HomeApplianceParameters(ParametersBaseModel):
|
class HomeApplianceParameters(DeviceParameters):
|
||||||
|
"""Home Appliance Device Simulation Configuration."""
|
||||||
|
|
||||||
|
device_id: str = Field(description="ID of home appliance", examples=["dishwasher"])
|
||||||
consumption_wh: int = Field(
|
consumption_wh: int = Field(
|
||||||
gt=0,
|
gt=0,
|
||||||
description="An integer representing the energy consumption of a household device in watt-hours.",
|
description="An integer representing the energy consumption of a household device in watt-hours.",
|
||||||
|
examples=[2000],
|
||||||
)
|
)
|
||||||
duration_h: int = Field(
|
duration_h: int = Field(
|
||||||
gt=0,
|
gt=0,
|
||||||
description="An integer representing the usage duration of a household device in hours.",
|
description="An integer representing the usage duration of a household device in hours.",
|
||||||
|
examples=[3],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -25,46 +29,15 @@ class HomeAppliance(DeviceBase):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
parameters: Optional[HomeApplianceParameters] = None,
|
parameters: Optional[HomeApplianceParameters] = None,
|
||||||
hours: Optional[int] = 24,
|
|
||||||
provider_id: Optional[str] = None,
|
|
||||||
):
|
):
|
||||||
# Configuration initialisation
|
self.parameters: Optional[HomeApplianceParameters] = None
|
||||||
self.provider_id = provider_id
|
super().__init__(parameters)
|
||||||
self.prefix = "<invalid>"
|
|
||||||
if self.provider_id == "GenericDishWasher":
|
|
||||||
self.prefix = "dishwasher"
|
|
||||||
# Parameter initialisiation
|
|
||||||
self.parameters = parameters
|
|
||||||
if hours is None:
|
|
||||||
self.hours = self.total_hours
|
|
||||||
else:
|
|
||||||
self.hours = hours
|
|
||||||
|
|
||||||
self.initialised = False
|
def _setup(self) -> None:
|
||||||
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
assert self.parameters is not None
|
||||||
if self.parameters is not None:
|
|
||||||
self.setup()
|
|
||||||
|
|
||||||
def setup(self) -> None:
|
|
||||||
if self.initialised:
|
|
||||||
return
|
|
||||||
if self.provider_id is not None:
|
|
||||||
# Setup by configuration
|
|
||||||
self.hours = self.total_hours
|
|
||||||
self.consumption_wh = getattr(self.config, f"{self.prefix}_consumption")
|
|
||||||
self.duration_h = getattr(self.config, f"{self.prefix}_duration")
|
|
||||||
elif self.parameters is not None:
|
|
||||||
# Setup by parameters
|
|
||||||
self.consumption_wh = (
|
|
||||||
self.parameters.consumption_wh
|
|
||||||
) # Total energy consumption of the device in kWh
|
|
||||||
self.duration_h = self.parameters.duration_h # Duration of use in hours
|
|
||||||
else:
|
|
||||||
error_msg = "Parameters and provider ID missing. Can't instantiate."
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
|
self.load_curve = np.zeros(self.hours) # Initialize the load curve with zeros
|
||||||
self.initialised = True
|
self.duration_h = self.parameters.duration_h
|
||||||
|
self.consumption_wh = self.parameters.consumption_wh
|
||||||
|
|
||||||
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
|
def set_starting_time(self, start_hour: int, global_start_hour: int = 0) -> None:
|
||||||
"""Sets the start time of the device and generates the corresponding load curve.
|
"""Sets the start time of the device and generates the corresponding load curve.
|
||||||
|
@@ -18,9 +18,9 @@ class Heatpump:
|
|||||||
COP_COEFFICIENT = 0.1
|
COP_COEFFICIENT = 0.1
|
||||||
"""COP increase per degree"""
|
"""COP increase per degree"""
|
||||||
|
|
||||||
def __init__(self, max_heat_output: int, prediction_hours: int):
|
def __init__(self, max_heat_output: int, hours: int):
|
||||||
self.max_heat_output = max_heat_output
|
self.max_heat_output = max_heat_output
|
||||||
self.prediction_hours = prediction_hours
|
self.hours = hours
|
||||||
self.log = logging.getLogger(__name__)
|
self.log = logging.getLogger(__name__)
|
||||||
|
|
||||||
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
def __check_outside_temperature_range__(self, temp_celsius: float) -> bool:
|
||||||
@@ -117,9 +117,9 @@ class Heatpump:
|
|||||||
"""Simulate power data for 24 hours based on provided temperatures."""
|
"""Simulate power data for 24 hours based on provided temperatures."""
|
||||||
power_data: List[float] = []
|
power_data: List[float] = []
|
||||||
|
|
||||||
if len(temperatures) != self.prediction_hours:
|
if len(temperatures) != self.hours:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"The temperature array must contain exactly {self.prediction_hours} entries, "
|
f"The temperature array must contain exactly {self.hours} entries, "
|
||||||
"one for each hour of the day."
|
"one for each hour of the day."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -1,64 +1,48 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from scipy.interpolate import RegularGridInterpolator
|
|
||||||
|
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.core.pydantic import ParametersBaseModel
|
from akkudoktoreos.devices.devicesabc import DeviceBase, DeviceParameters
|
||||||
from akkudoktoreos.devices.battery import Battery
|
from akkudoktoreos.prediction.interpolator import get_eos_load_interpolator
|
||||||
from akkudoktoreos.devices.devicesabc import DeviceBase
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class InverterParameters(ParametersBaseModel):
|
class InverterParameters(DeviceParameters):
|
||||||
max_power_wh: float = Field(gt=0)
|
"""Inverter Device Simulation Configuration."""
|
||||||
|
|
||||||
|
device_id: str = Field(description="ID of inverter", examples=["inverter1"])
|
||||||
|
max_power_wh: float = Field(gt=0, examples=[10000])
|
||||||
|
battery_id: Optional[str] = Field(
|
||||||
|
default=None, description="ID of battery", examples=[None, "battery1"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Inverter(DeviceBase):
|
class Inverter(DeviceBase):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
self_consumption_predictor: RegularGridInterpolator,
|
|
||||||
parameters: Optional[InverterParameters] = None,
|
parameters: Optional[InverterParameters] = None,
|
||||||
battery: Optional[Battery] = None,
|
|
||||||
provider_id: Optional[str] = None,
|
|
||||||
):
|
):
|
||||||
# Configuration initialisation
|
self.parameters: Optional[InverterParameters] = None
|
||||||
self.provider_id = provider_id
|
super().__init__(parameters)
|
||||||
self.prefix = "<invalid>"
|
|
||||||
if self.provider_id == "GenericInverter":
|
def _setup(self) -> None:
|
||||||
self.prefix = "inverter"
|
assert self.parameters is not None
|
||||||
# Parameter initialisiation
|
if self.parameters.battery_id is None:
|
||||||
self.parameters = parameters
|
|
||||||
if battery is None:
|
|
||||||
# For the moment raise exception
|
# For the moment raise exception
|
||||||
# TODO: Make battery configurable by config
|
# TODO: Make battery configurable by config
|
||||||
error_msg = "Battery for PV inverter is mandatory."
|
error_msg = "Battery for PV inverter is mandatory."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise NotImplementedError(error_msg)
|
raise NotImplementedError(error_msg)
|
||||||
self.battery = battery # Connection to a battery object
|
self.self_consumption_predictor = get_eos_load_interpolator()
|
||||||
self.self_consumption_predictor = self_consumption_predictor
|
self.max_power_wh = (
|
||||||
|
self.parameters.max_power_wh
|
||||||
|
) # Maximum power that the inverter can handle
|
||||||
|
|
||||||
self.initialised = False
|
def _post_setup(self) -> None:
|
||||||
# Run setup if parameters are given, otherwise setup() has to be called later when the config is initialised.
|
assert self.parameters is not None
|
||||||
if self.parameters is not None:
|
self.battery = self.devices.get_device_by_id(self.parameters.battery_id)
|
||||||
self.setup()
|
|
||||||
|
|
||||||
def setup(self) -> None:
|
|
||||||
if self.initialised:
|
|
||||||
return
|
|
||||||
if self.provider_id is not None:
|
|
||||||
# Setup by configuration
|
|
||||||
self.max_power_wh = getattr(self.config, f"{self.prefix}_power_max")
|
|
||||||
elif self.parameters is not None:
|
|
||||||
# Setup by parameters
|
|
||||||
self.max_power_wh = (
|
|
||||||
self.parameters.max_power_wh # Maximum power that the inverter can handle
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
error_msg = "Parameters and provider ID missing. Can't instantiate."
|
|
||||||
logger.error(error_msg)
|
|
||||||
raise ValueError(error_msg)
|
|
||||||
|
|
||||||
def process_energy(
|
def process_energy(
|
||||||
self, generation: float, consumption: float, hour: int
|
self, generation: float, consumption: float, hour: int
|
||||||
|
27
src/akkudoktoreos/devices/settings.py
Normal file
27
src/akkudoktoreos/devices/settings.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.devices.battery import BaseBatteryParameters
|
||||||
|
from akkudoktoreos.devices.generic import HomeApplianceParameters
|
||||||
|
from akkudoktoreos.devices.inverter import InverterParameters
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DevicesCommonSettings(SettingsBaseModel):
|
||||||
|
"""Base configuration for devices simulation settings."""
|
||||||
|
|
||||||
|
batteries: Optional[list[BaseBatteryParameters]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="List of battery/ev devices",
|
||||||
|
examples=[[{"device_id": "battery1", "capacity_wh": 8000}]],
|
||||||
|
)
|
||||||
|
inverters: Optional[list[InverterParameters]] = Field(
|
||||||
|
default=None, description="List of inverters", examples=[[]]
|
||||||
|
)
|
||||||
|
home_appliances: Optional[list[HomeApplianceParameters]] = Field(
|
||||||
|
default=None, description="List of home appliances", examples=[[]]
|
||||||
|
)
|
@@ -23,20 +23,22 @@ logger = get_logger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class MeasurementCommonSettings(SettingsBaseModel):
|
class MeasurementCommonSettings(SettingsBaseModel):
|
||||||
measurement_load0_name: Optional[str] = Field(
|
"""Measurement Configuration."""
|
||||||
default=None, description="Name of the load0 source (e.g. 'Household', 'Heat Pump')"
|
|
||||||
|
load0_name: Optional[str] = Field(
|
||||||
|
default=None, description="Name of the load0 source", examples=["Household", "Heat Pump"]
|
||||||
)
|
)
|
||||||
measurement_load1_name: Optional[str] = Field(
|
load1_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load1 source (e.g. 'Household', 'Heat Pump')"
|
default=None, description="Name of the load1 source", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load2_name: Optional[str] = Field(
|
load2_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load2 source (e.g. 'Household', 'Heat Pump')"
|
default=None, description="Name of the load2 source", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load3_name: Optional[str] = Field(
|
load3_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load3 source (e.g. 'Household', 'Heat Pump')"
|
default=None, description="Name of the load3 source", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load4_name: Optional[str] = Field(
|
load4_name: Optional[str] = Field(
|
||||||
default=None, description="Name of the load4 source (e.g. 'Household', 'Heat Pump')"
|
default=None, description="Name of the load4 source", examples=[None]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -48,42 +50,42 @@ class MeasurementDataRecord(DataRecord):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Single loads, to be aggregated to total load
|
# Single loads, to be aggregated to total load
|
||||||
measurement_load0_mr: Optional[float] = Field(
|
load0_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load0 meter reading [kWh]"
|
default=None, ge=0, description="Load0 meter reading [kWh]", examples=[40421]
|
||||||
)
|
)
|
||||||
measurement_load1_mr: Optional[float] = Field(
|
load1_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load1 meter reading [kWh]"
|
default=None, ge=0, description="Load1 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load2_mr: Optional[float] = Field(
|
load2_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load2 meter reading [kWh]"
|
default=None, ge=0, description="Load2 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load3_mr: Optional[float] = Field(
|
load3_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load3 meter reading [kWh]"
|
default=None, ge=0, description="Load3 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
measurement_load4_mr: Optional[float] = Field(
|
load4_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Load4 meter reading [kWh]"
|
default=None, ge=0, description="Load4 meter reading [kWh]", examples=[None]
|
||||||
)
|
)
|
||||||
|
|
||||||
measurement_max_loads: ClassVar[int] = 5 # Maximum number of loads that can be set
|
max_loads: ClassVar[int] = 5 # Maximum number of loads that can be set
|
||||||
|
|
||||||
measurement_grid_export_mr: Optional[float] = Field(
|
grid_export_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Export to grid meter reading [kWh]"
|
default=None, ge=0, description="Export to grid meter reading [kWh]", examples=[1000]
|
||||||
)
|
)
|
||||||
|
|
||||||
measurement_grid_import_mr: Optional[float] = Field(
|
grid_import_mr: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Import from grid meter reading [kWh]"
|
default=None, ge=0, description="Import from grid meter reading [kWh]", examples=[1000]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Computed fields
|
# Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def measurement_loads(self) -> List[str]:
|
def loads(self) -> List[str]:
|
||||||
"""Compute a list of active loads."""
|
"""Compute a list of active loads."""
|
||||||
active_loads = []
|
active_loads = []
|
||||||
|
|
||||||
# Loop through measurement_loadx
|
# Loop through loadx
|
||||||
for i in range(self.measurement_max_loads):
|
for i in range(self.max_loads):
|
||||||
load_attr = f"measurement_load{i}_mr"
|
load_attr = f"load{i}_mr"
|
||||||
|
|
||||||
# Check if either attribute is set and add to active loads
|
# Check if either attribute is set and add to active loads
|
||||||
if getattr(self, load_attr, None):
|
if getattr(self, load_attr, None):
|
||||||
@@ -103,9 +105,14 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
)
|
)
|
||||||
|
|
||||||
topics: ClassVar[List[str]] = [
|
topics: ClassVar[List[str]] = [
|
||||||
"measurement_load",
|
"load",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def _interval_count(
|
def _interval_count(
|
||||||
self, start_datetime: DateTime, end_datetime: DateTime, interval: Duration
|
self, start_datetime: DateTime, end_datetime: DateTime, interval: Duration
|
||||||
) -> int:
|
) -> int:
|
||||||
@@ -143,11 +150,16 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
if topic not in self.topics:
|
if topic not in self.topics:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
topic_keys = [key for key in self.config.config_keys if key.startswith(topic)]
|
topic_keys = [
|
||||||
|
key for key in self.config.measurement.model_fields.keys() if key.startswith(topic)
|
||||||
|
]
|
||||||
key = None
|
key = None
|
||||||
if topic == "measurement_load":
|
if topic == "load":
|
||||||
for config_key in topic_keys:
|
for config_key in topic_keys:
|
||||||
if config_key.endswith("_name") and getattr(self.config, config_key) == name:
|
if (
|
||||||
|
config_key.endswith("_name")
|
||||||
|
and getattr(self.config.measurement, config_key) == name
|
||||||
|
):
|
||||||
key = topic + config_key[len(topic) : len(topic) + 1] + "_mr"
|
key = topic + config_key[len(topic) : len(topic) + 1] + "_mr"
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -243,9 +255,9 @@ class Measurement(SingletonMixin, DataImportMixin, DataSequence):
|
|||||||
end_datetime = self[-1].date_time
|
end_datetime = self[-1].date_time
|
||||||
size = self._interval_count(start_datetime, end_datetime, interval)
|
size = self._interval_count(start_datetime, end_datetime, interval)
|
||||||
load_total_array = np.zeros(size)
|
load_total_array = np.zeros(size)
|
||||||
# Loop through measurement_load<x>_mr
|
# Loop through load<x>_mr
|
||||||
for i in range(self.record_class().measurement_max_loads):
|
for i in range(self.record_class().max_loads):
|
||||||
key = f"measurement_load{i}_mr"
|
key = f"load{i}_mr"
|
||||||
# Calculate load per interval
|
# Calculate load per interval
|
||||||
load_array = self._energy_from_meter_readings(
|
load_array = self._energy_from_meter_readings(
|
||||||
key=key, start_datetime=start_datetime, end_datetime=end_datetime, interval=interval
|
key=key, start_datetime=start_datetime, end_datetime=end_datetime, interval=interval
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -25,7 +24,6 @@ from akkudoktoreos.devices.battery import (
|
|||||||
)
|
)
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
||||||
from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
||||||
from akkudoktoreos.prediction.interpolator import SelfConsumptionProbabilityInterpolator
|
|
||||||
from akkudoktoreos.utils.utils import NumpyEncoder
|
from akkudoktoreos.utils.utils import NumpyEncoder
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@@ -112,8 +110,8 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
):
|
):
|
||||||
"""Initialize the optimization problem with the required parameters."""
|
"""Initialize the optimization problem with the required parameters."""
|
||||||
self.opti_param: dict[str, Any] = {}
|
self.opti_param: dict[str, Any] = {}
|
||||||
self.fixed_eauto_hours = self.config.prediction_hours - self.config.optimization_hours
|
self.fixed_eauto_hours = self.config.prediction.hours - self.config.optimization.hours
|
||||||
self.possible_charge_values = self.config.optimization_ev_available_charge_rates_percent
|
self.possible_charge_values = self.config.optimization.ev_available_charge_rates_percent
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.fix_seed = fixed_seed
|
self.fix_seed = fixed_seed
|
||||||
self.optimize_ev = True
|
self.optimize_ev = True
|
||||||
@@ -180,23 +178,23 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
total_states = 3 * len_ac
|
total_states = 3 * len_ac
|
||||||
|
|
||||||
# 1. Mutating the charge_discharge part
|
# 1. Mutating the charge_discharge part
|
||||||
charge_discharge_part = individual[: self.config.prediction_hours]
|
charge_discharge_part = individual[: self.config.prediction.hours]
|
||||||
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
(charge_discharge_mutated,) = self.toolbox.mutate_charge_discharge(charge_discharge_part)
|
||||||
|
|
||||||
# Instead of a fixed clamping to 0..8 or 0..6 dynamically:
|
# Instead of a fixed clamping to 0..8 or 0..6 dynamically:
|
||||||
charge_discharge_mutated = np.clip(charge_discharge_mutated, 0, total_states - 1)
|
charge_discharge_mutated = np.clip(charge_discharge_mutated, 0, total_states - 1)
|
||||||
individual[: self.config.prediction_hours] = charge_discharge_mutated
|
individual[: self.config.prediction.hours] = charge_discharge_mutated
|
||||||
|
|
||||||
# 2. Mutating the EV charge part, if active
|
# 2. Mutating the EV charge part, if active
|
||||||
if self.optimize_ev:
|
if self.optimize_ev:
|
||||||
ev_charge_part = individual[
|
ev_charge_part = individual[
|
||||||
self.config.prediction_hours : self.config.prediction_hours * 2
|
self.config.prediction.hours : self.config.prediction.hours * 2
|
||||||
]
|
]
|
||||||
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
(ev_charge_part_mutated,) = self.toolbox.mutate_ev_charge_index(ev_charge_part)
|
||||||
ev_charge_part_mutated[self.config.prediction_hours - self.fixed_eauto_hours :] = [
|
ev_charge_part_mutated[self.config.prediction.hours - self.fixed_eauto_hours :] = [
|
||||||
0
|
0
|
||||||
] * self.fixed_eauto_hours
|
] * self.fixed_eauto_hours
|
||||||
individual[self.config.prediction_hours : self.config.prediction_hours * 2] = (
|
individual[self.config.prediction.hours : self.config.prediction.hours * 2] = (
|
||||||
ev_charge_part_mutated
|
ev_charge_part_mutated
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -212,13 +210,13 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
def create_individual(self) -> list[int]:
|
def create_individual(self) -> list[int]:
|
||||||
# Start with discharge states for the individual
|
# Start with discharge states for the individual
|
||||||
individual_components = [
|
individual_components = [
|
||||||
self.toolbox.attr_discharge_state() for _ in range(self.config.prediction_hours)
|
self.toolbox.attr_discharge_state() for _ in range(self.config.prediction.hours)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add EV charge index values if optimize_ev is True
|
# Add EV charge index values if optimize_ev is True
|
||||||
if self.optimize_ev:
|
if self.optimize_ev:
|
||||||
individual_components += [
|
individual_components += [
|
||||||
self.toolbox.attr_ev_charge_index() for _ in range(self.config.prediction_hours)
|
self.toolbox.attr_ev_charge_index() for _ in range(self.config.prediction.hours)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add the start time of the household appliance if it's being optimized
|
# Add the start time of the household appliance if it's being optimized
|
||||||
@@ -251,7 +249,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
individual.extend(eautocharge_hours_index.tolist())
|
individual.extend(eautocharge_hours_index.tolist())
|
||||||
elif self.optimize_ev:
|
elif self.optimize_ev:
|
||||||
# Falls optimize_ev aktiv ist, aber keine EV-Daten vorhanden sind, fügen wir Nullen hinzu
|
# Falls optimize_ev aktiv ist, aber keine EV-Daten vorhanden sind, fügen wir Nullen hinzu
|
||||||
individual.extend([0] * self.config.prediction_hours)
|
individual.extend([0] * self.config.prediction.hours)
|
||||||
|
|
||||||
# Add dishwasher start time if applicable
|
# Add dishwasher start time if applicable
|
||||||
if self.opti_param.get("home_appliance", 0) > 0 and washingstart_int is not None:
|
if self.opti_param.get("home_appliance", 0) > 0 and washingstart_int is not None:
|
||||||
@@ -273,12 +271,13 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
3. Dishwasher start time (integer if applicable).
|
3. Dishwasher start time (integer if applicable).
|
||||||
"""
|
"""
|
||||||
# Discharge hours as a NumPy array of ints
|
# Discharge hours as a NumPy array of ints
|
||||||
discharge_hours_bin = np.array(individual[: self.config.prediction_hours], dtype=int)
|
discharge_hours_bin = np.array(individual[: self.config.prediction.hours], dtype=int)
|
||||||
|
|
||||||
# EV charge hours as a NumPy array of ints (if optimize_ev is True)
|
# EV charge hours as a NumPy array of ints (if optimize_ev is True)
|
||||||
eautocharge_hours_index = (
|
eautocharge_hours_index = (
|
||||||
|
# append ev charging states to individual
|
||||||
np.array(
|
np.array(
|
||||||
individual[self.config.prediction_hours : self.config.prediction_hours * 2],
|
individual[self.config.prediction.hours : self.config.prediction.hours * 2],
|
||||||
dtype=int,
|
dtype=int,
|
||||||
)
|
)
|
||||||
if self.optimize_ev
|
if self.optimize_ev
|
||||||
@@ -390,7 +389,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
)
|
)
|
||||||
self.ems.set_ev_charge_hours(eautocharge_hours_float)
|
self.ems.set_ev_charge_hours(eautocharge_hours_float)
|
||||||
else:
|
else:
|
||||||
self.ems.set_ev_charge_hours(np.full(self.config.prediction_hours, 0))
|
self.ems.set_ev_charge_hours(np.full(self.config.prediction.hours, 0))
|
||||||
|
|
||||||
return self.ems.simulate(self.ems.start_datetime.hour)
|
return self.ems.simulate(self.ems.start_datetime.hour)
|
||||||
|
|
||||||
@@ -452,7 +451,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
# min_length = min(battery_soc_per_hour.size, discharge_hours_bin.size)
|
# min_length = min(battery_soc_per_hour.size, discharge_hours_bin.size)
|
||||||
# battery_soc_per_hour_tail = battery_soc_per_hour[-min_length:]
|
# battery_soc_per_hour_tail = battery_soc_per_hour[-min_length:]
|
||||||
# discharge_hours_bin_tail = discharge_hours_bin[-min_length:]
|
# discharge_hours_bin_tail = discharge_hours_bin[-min_length:]
|
||||||
# len_ac = len(self.config.optimization_ev_available_charge_rates_percent)
|
# len_ac = len(self.config.optimization.ev_available_charge_rates_percent)
|
||||||
|
|
||||||
# # # Find hours where battery SoC is 0
|
# # # Find hours where battery SoC is 0
|
||||||
# # zero_soc_mask = battery_soc_per_hour_tail == 0
|
# # zero_soc_mask = battery_soc_per_hour_tail == 0
|
||||||
@@ -501,7 +500,7 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
if parameters.eauto and self.ems.ev
|
if parameters.eauto and self.ems.ev
|
||||||
else 0
|
else 0
|
||||||
)
|
)
|
||||||
* self.config.optimization_penalty,
|
* self.config.optimization.penalty,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (gesamtbilanz,)
|
return (gesamtbilanz,)
|
||||||
@@ -569,30 +568,26 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
start_hour = self.ems.start_datetime.hour
|
start_hour = self.ems.start_datetime.hour
|
||||||
|
|
||||||
einspeiseverguetung_euro_pro_wh = np.full(
|
einspeiseverguetung_euro_pro_wh = np.full(
|
||||||
self.config.prediction_hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
self.config.prediction.hours, parameters.ems.einspeiseverguetung_euro_pro_wh
|
||||||
)
|
)
|
||||||
|
|
||||||
# 1h Load to Sub 1h Load Distribution -> SelfConsumptionRate
|
# TODO: Refactor device setup phase out
|
||||||
sc = SelfConsumptionProbabilityInterpolator(
|
self.devices.reset()
|
||||||
Path(__file__).parent.resolve() / ".." / "data" / "regular_grid_interpolator.pkl"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Initialize PV and EV batteries
|
# Initialize PV and EV batteries
|
||||||
akku: Optional[Battery] = None
|
akku: Optional[Battery] = None
|
||||||
if parameters.pv_akku:
|
if parameters.pv_akku:
|
||||||
akku = Battery(
|
akku = Battery(parameters.pv_akku)
|
||||||
parameters.pv_akku,
|
self.devices.add_device(akku)
|
||||||
hours=self.config.prediction_hours,
|
akku.set_charge_per_hour(np.full(self.config.prediction.hours, 1))
|
||||||
)
|
|
||||||
akku.set_charge_per_hour(np.full(self.config.prediction_hours, 1))
|
|
||||||
|
|
||||||
eauto: Optional[Battery] = None
|
eauto: Optional[Battery] = None
|
||||||
if parameters.eauto:
|
if parameters.eauto:
|
||||||
eauto = Battery(
|
eauto = Battery(
|
||||||
parameters.eauto,
|
parameters.eauto,
|
||||||
hours=self.config.prediction_hours,
|
|
||||||
)
|
)
|
||||||
eauto.set_charge_per_hour(np.full(self.config.prediction_hours, 1))
|
self.devices.add_device(eauto)
|
||||||
|
eauto.set_charge_per_hour(np.full(self.config.prediction.hours, 1))
|
||||||
self.optimize_ev = (
|
self.optimize_ev = (
|
||||||
parameters.eauto.min_soc_percentage - parameters.eauto.initial_soc_percentage >= 0
|
parameters.eauto.min_soc_percentage - parameters.eauto.initial_soc_percentage >= 0
|
||||||
)
|
)
|
||||||
@@ -603,20 +598,22 @@ class optimization_problem(ConfigMixin, DevicesMixin, EnergyManagementSystemMixi
|
|||||||
dishwasher = (
|
dishwasher = (
|
||||||
HomeAppliance(
|
HomeAppliance(
|
||||||
parameters=parameters.dishwasher,
|
parameters=parameters.dishwasher,
|
||||||
hours=self.config.prediction_hours,
|
|
||||||
)
|
)
|
||||||
if parameters.dishwasher is not None
|
if parameters.dishwasher is not None
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
|
self.devices.add_device(dishwasher)
|
||||||
|
|
||||||
# Initialize the inverter and energy management system
|
# Initialize the inverter and energy management system
|
||||||
inverter: Optional[Inverter] = None
|
inverter: Optional[Inverter] = None
|
||||||
if parameters.inverter:
|
if parameters.inverter:
|
||||||
inverter = Inverter(
|
inverter = Inverter(
|
||||||
sc,
|
|
||||||
parameters.inverter,
|
parameters.inverter,
|
||||||
akku,
|
|
||||||
)
|
)
|
||||||
|
self.devices.add_device(inverter)
|
||||||
|
|
||||||
|
self.devices.post_setup()
|
||||||
|
|
||||||
self.ems.set_parameters(
|
self.ems.set_parameters(
|
||||||
parameters.ems,
|
parameters.ems,
|
||||||
inverter=inverter,
|
inverter=inverter,
|
||||||
|
@@ -9,21 +9,19 @@ logger = get_logger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class OptimizationCommonSettings(SettingsBaseModel):
|
class OptimizationCommonSettings(SettingsBaseModel):
|
||||||
"""Base configuration for optimization settings.
|
"""General Optimization Configuration.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
optimization_hours (int): Number of hours for optimizations.
|
hours (int): Number of hours for optimizations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
optimization_hours: Optional[int] = Field(
|
hours: Optional[int] = Field(
|
||||||
default=24, ge=0, description="Number of hours into the future for optimizations."
|
default=48, ge=0, description="Number of hours into the future for optimizations."
|
||||||
)
|
)
|
||||||
|
|
||||||
optimization_penalty: Optional[int] = Field(
|
penalty: Optional[int] = Field(default=10, description="Penalty factor used in optimization.")
|
||||||
default=10, description="Penalty factor used in optimization."
|
|
||||||
)
|
|
||||||
|
|
||||||
optimization_ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
ev_available_charge_rates_percent: Optional[List[float]] = Field(
|
||||||
default=[
|
default=[
|
||||||
0.0,
|
0.0,
|
||||||
6.0 / 16.0,
|
6.0 / 16.0,
|
||||||
|
@@ -3,12 +3,21 @@ from typing import Optional
|
|||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.elecpriceimport import ElecPriceImportCommonSettings
|
||||||
|
|
||||||
|
|
||||||
class ElecPriceCommonSettings(SettingsBaseModel):
|
class ElecPriceCommonSettings(SettingsBaseModel):
|
||||||
elecprice_provider: Optional[str] = Field(
|
"""Electricity Price Prediction Configuration."""
|
||||||
default=None, description="Electricity price provider id of provider to be used."
|
|
||||||
|
provider: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Electricity price provider id of provider to be used.",
|
||||||
|
examples=["ElecPriceAkkudoktor"],
|
||||||
)
|
)
|
||||||
elecprice_charges_kwh: Optional[float] = Field(
|
charges_kwh: Optional[float] = Field(
|
||||||
default=None, ge=0, description="Electricity price charges (€/kWh)."
|
default=None, ge=0, description="Electricity price charges (€/kWh).", examples=[0.21]
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_settings: Optional[ElecPriceImportCommonSettings] = Field(
|
||||||
|
default=None, description="Provider settings", examples=[None]
|
||||||
)
|
)
|
||||||
|
@@ -49,15 +49,15 @@ class ElecPriceProvider(PredictionProvider):
|
|||||||
electricity price_provider (str): Prediction provider for electricity price.
|
electricity price_provider (str): Prediction provider for electricity price.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@@ -71,4 +71,4 @@ class ElecPriceProvider(PredictionProvider):
|
|||||||
return "ElecPriceProvider"
|
return "ElecPriceProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.elecprice_provider
|
return self.provider_id() == self.config.elecprice.provider
|
||||||
|
@@ -54,11 +54,11 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
hours (int, optional): Number of hours in the future for the forecast.
|
||||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
@@ -108,13 +108,13 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
# Try to take data from 5 weeks back for prediction
|
# Try to take data from 5 weeks back for prediction
|
||||||
date = to_datetime(self.start_datetime - to_duration("35 days"), as_string="YYYY-MM-DD")
|
date = to_datetime(self.start_datetime - to_duration("35 days"), as_string="YYYY-MM-DD")
|
||||||
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
||||||
url = f"{source}/prices?start={date}&end={last_date}&tz={self.config.timezone}"
|
url = f"{source}/prices?start={date}&end={last_date}&tz={self.config.general.timezone}"
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
logger.debug(f"Response from {url}: {response}")
|
logger.debug(f"Response from {url}: {response}")
|
||||||
response.raise_for_status() # Raise an error for bad responses
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
akkudoktor_data = self._validate_data(response.content)
|
akkudoktor_data = self._validate_data(response.content)
|
||||||
# We are working on fresh data (no cache), report update time
|
# We are working on fresh data (no cache), report update time
|
||||||
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
||||||
return akkudoktor_data
|
return akkudoktor_data
|
||||||
|
|
||||||
def _cap_outliers(self, data: np.ndarray, sigma: int = 2) -> np.ndarray:
|
def _cap_outliers(self, data: np.ndarray, sigma: int = 2) -> np.ndarray:
|
||||||
@@ -125,18 +125,16 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
capped_data = data.clip(min=lower_bound, max=upper_bound)
|
||||||
return capped_data
|
return capped_data
|
||||||
|
|
||||||
def _predict_ets(
|
def _predict_ets(self, history: np.ndarray, seasonal_periods: int, hours: int) -> np.ndarray:
|
||||||
self, history: np.ndarray, seasonal_periods: int, prediction_hours: int
|
|
||||||
) -> np.ndarray:
|
|
||||||
clean_history = self._cap_outliers(history)
|
clean_history = self._cap_outliers(history)
|
||||||
model = ExponentialSmoothing(
|
model = ExponentialSmoothing(
|
||||||
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
clean_history, seasonal="add", seasonal_periods=seasonal_periods
|
||||||
).fit()
|
).fit()
|
||||||
return model.forecast(prediction_hours)
|
return model.forecast(hours)
|
||||||
|
|
||||||
def _predict_median(self, history: np.ndarray, prediction_hours: int) -> np.ndarray:
|
def _predict_median(self, history: np.ndarray, hours: int) -> np.ndarray:
|
||||||
clean_history = self._cap_outliers(history)
|
clean_history = self._cap_outliers(history)
|
||||||
return np.full(prediction_hours, np.median(clean_history))
|
return np.full(hours, np.median(clean_history))
|
||||||
|
|
||||||
def _update_data(
|
def _update_data(
|
||||||
self, force_update: Optional[bool] = False
|
self, force_update: Optional[bool] = False
|
||||||
@@ -155,14 +153,14 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
# Assumption that all lists are the same length and are ordered chronologically
|
# Assumption that all lists are the same length and are ordered chronologically
|
||||||
# in ascending order and have the same timestamps.
|
# in ascending order and have the same timestamps.
|
||||||
|
|
||||||
# Get elecprice_charges_kwh in wh
|
# Get charges_kwh in wh
|
||||||
charges_wh = (self.config.elecprice_charges_kwh or 0) / 1000
|
charges_wh = (self.config.elecprice.charges_kwh or 0) / 1000
|
||||||
|
|
||||||
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
highest_orig_datetime = None # newest datetime from the api after that we want to update.
|
||||||
series_data = pd.Series(dtype=float) # Initialize an empty series
|
series_data = pd.Series(dtype=float) # Initialize an empty series
|
||||||
|
|
||||||
for value in akkudoktor_data.values:
|
for value in akkudoktor_data.values:
|
||||||
orig_datetime = to_datetime(value.start, in_timezone=self.config.timezone)
|
orig_datetime = to_datetime(value.start, in_timezone=self.config.general.timezone)
|
||||||
if highest_orig_datetime is None or orig_datetime > highest_orig_datetime:
|
if highest_orig_datetime is None or orig_datetime > highest_orig_datetime:
|
||||||
highest_orig_datetime = orig_datetime
|
highest_orig_datetime = orig_datetime
|
||||||
|
|
||||||
@@ -183,27 +181,23 @@ class ElecPriceAkkudoktor(ElecPriceProvider):
|
|||||||
assert highest_orig_datetime # mypy fix
|
assert highest_orig_datetime # mypy fix
|
||||||
|
|
||||||
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
# some of our data is already in the future, so we need to predict less. If we got less data we increase the prediction hours
|
||||||
needed_prediction_hours = int(
|
needed_hours = int(
|
||||||
self.config.prediction_hours
|
self.config.prediction.hours
|
||||||
- ((highest_orig_datetime - self.start_datetime).total_seconds() // 3600)
|
- ((highest_orig_datetime - self.start_datetime).total_seconds() // 3600)
|
||||||
)
|
)
|
||||||
|
|
||||||
if needed_prediction_hours <= 0:
|
if needed_hours <= 0:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"No prediction needed. needed_prediction_hours={needed_prediction_hours}, prediction_hours={self.config.prediction_hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.start_datetime}"
|
f"No prediction needed. needed_hours={needed_hours}, hours={self.config.prediction.hours},highest_orig_datetime {highest_orig_datetime}, start_datetime {self.start_datetime}"
|
||||||
) # this might keep data longer than self.start_datetime + self.config.prediction_hours in the records
|
) # this might keep data longer than self.start_datetime + self.config.prediction.hours in the records
|
||||||
return
|
return
|
||||||
|
|
||||||
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
if amount_datasets > 800: # we do the full ets with seasons of 1 week
|
||||||
prediction = self._predict_ets(
|
prediction = self._predict_ets(history, seasonal_periods=168, hours=needed_hours)
|
||||||
history, seasonal_periods=168, prediction_hours=needed_prediction_hours
|
|
||||||
)
|
|
||||||
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
elif amount_datasets > 168: # not enough data to do seasons of 1 week, but enough for 1 day
|
||||||
prediction = self._predict_ets(
|
prediction = self._predict_ets(history, seasonal_periods=24, hours=needed_hours)
|
||||||
history, seasonal_periods=24, prediction_hours=needed_prediction_hours
|
|
||||||
)
|
|
||||||
elif amount_datasets > 0: # not enough data for ets, do median
|
elif amount_datasets > 0: # not enough data for ets, do median
|
||||||
prediction = self._predict_median(history, prediction_hours=needed_prediction_hours)
|
prediction = self._predict_median(history, hours=needed_hours)
|
||||||
else:
|
else:
|
||||||
logger.error("No data available for prediction")
|
logger.error("No data available for prediction")
|
||||||
raise ValueError("No data available")
|
raise ValueError("No data available")
|
||||||
|
@@ -22,21 +22,22 @@ logger = get_logger(__name__)
|
|||||||
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
class ElecPriceImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for elecprice data import from file or JSON String."""
|
"""Common settings for elecprice data import from file or JSON String."""
|
||||||
|
|
||||||
elecpriceimport_file_path: Optional[Union[str, Path]] = Field(
|
import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None, description="Path to the file to import elecprice data from."
|
default=None,
|
||||||
|
description="Path to the file to import elecprice data from.",
|
||||||
|
examples=[None, "/path/to/prices.json"],
|
||||||
)
|
)
|
||||||
|
|
||||||
elecpriceimport_json: Optional[str] = Field(
|
import_json: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="JSON string, dictionary of electricity price forecast value lists.",
|
description="JSON string, dictionary of electricity price forecast value lists.",
|
||||||
|
examples=['{"elecprice_marketprice_wh": [0.0003384, 0.0003318, 0.0003284]}'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("elecpriceimport_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_elecpriceimport_file_path(
|
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
cls, value: Optional[Union[str, Path]]
|
|
||||||
) -> Optional[Path]:
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
@@ -62,7 +63,12 @@ class ElecPriceImport(ElecPriceProvider, PredictionImportProvider):
|
|||||||
return "ElecPriceImport"
|
return "ElecPriceImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.elecpriceimport_file_path is not None:
|
if self.config.elecprice.provider_settings.import_file_path:
|
||||||
self.import_from_file(self.config.elecpriceimport_file_path, key_prefix="elecprice")
|
self.import_from_file(
|
||||||
if self.config.elecpriceimport_json is not None:
|
self.config.elecprice.provider_settings.import_file_path,
|
||||||
self.import_from_json(self.config.elecpriceimport_json, key_prefix="elecprice")
|
key_prefix="elecprice",
|
||||||
|
)
|
||||||
|
if self.config.elecprice.provider_settings.import_json:
|
||||||
|
self.import_from_json(
|
||||||
|
self.config.elecprice.provider_settings.import_json, key_prefix="elecprice"
|
||||||
|
)
|
||||||
|
@@ -6,6 +6,8 @@ from pathlib import Path
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy.interpolate import RegularGridInterpolator
|
from scipy.interpolate import RegularGridInterpolator
|
||||||
|
|
||||||
|
from akkudoktoreos.core.coreabc import SingletonMixin
|
||||||
|
|
||||||
|
|
||||||
class SelfConsumptionProbabilityInterpolator:
|
class SelfConsumptionProbabilityInterpolator:
|
||||||
def __init__(self, filepath: str | Path):
|
def __init__(self, filepath: str | Path):
|
||||||
@@ -67,5 +69,17 @@ class SelfConsumptionProbabilityInterpolator:
|
|||||||
# return self_consumption_rate
|
# return self_consumption_rate
|
||||||
|
|
||||||
|
|
||||||
# Test the function
|
class EOSLoadInterpolator(SelfConsumptionProbabilityInterpolator, SingletonMixin):
|
||||||
# print(calculate_self_consumption(1000, 1200))
|
def __init__(self) -> None:
|
||||||
|
if hasattr(self, "_initialized"):
|
||||||
|
return
|
||||||
|
filename = Path(__file__).parent.resolve() / ".." / "data" / "regular_grid_interpolator.pkl"
|
||||||
|
super().__init__(filename)
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize the Energy Management System, it is a singleton.
|
||||||
|
eos_load_interpolator = EOSLoadInterpolator()
|
||||||
|
|
||||||
|
|
||||||
|
def get_eos_load_interpolator() -> EOSLoadInterpolator:
|
||||||
|
return eos_load_interpolator
|
||||||
|
@@ -1,18 +1,26 @@
|
|||||||
"""Load forecast module for load predictions."""
|
"""Load forecast module for load predictions."""
|
||||||
|
|
||||||
from typing import Optional
|
from typing import Optional, Union
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||||
|
from akkudoktoreos.prediction.loadimport import LoadImportCommonSettings
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class LoadCommonSettings(SettingsBaseModel):
|
class LoadCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for loaod forecast providers."""
|
"""Load Prediction Configuration."""
|
||||||
|
|
||||||
load_provider: Optional[str] = Field(
|
provider: Optional[str] = Field(
|
||||||
default=None, description="Load provider id of provider to be used."
|
default=None,
|
||||||
|
description="Load provider id of provider to be used.",
|
||||||
|
examples=["LoadAkkudoktor"],
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_settings: Optional[Union[LoadAkkudoktorCommonSettings, LoadImportCommonSettings]] = (
|
||||||
|
Field(default=None, description="Provider settings", examples=[None])
|
||||||
)
|
)
|
||||||
|
@@ -33,18 +33,18 @@ class LoadProvider(PredictionProvider):
|
|||||||
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
LoadProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Configuration variables:
|
Configuration variables:
|
||||||
load_provider (str): Prediction provider for load.
|
provider (str): Prediction provider for load.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@@ -58,4 +58,4 @@ class LoadProvider(PredictionProvider):
|
|||||||
return "LoadProvider"
|
return "LoadProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.load_provider
|
return self.provider_id() == self.config.load.provider
|
||||||
|
@@ -17,7 +17,7 @@ class LoadAkkudoktorCommonSettings(SettingsBaseModel):
|
|||||||
"""Common settings for load data import from file."""
|
"""Common settings for load data import from file."""
|
||||||
|
|
||||||
loadakkudoktor_year_energy: Optional[float] = Field(
|
loadakkudoktor_year_energy: Optional[float] = Field(
|
||||||
default=None, description="Yearly energy consumption (kWh)."
|
default=None, description="Yearly energy consumption (kWh).", examples=[40421]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -91,7 +91,9 @@ class LoadAkkudoktor(LoadProvider):
|
|||||||
list(zip(file_data["yearly_profiles"], file_data["yearly_profiles_std"]))
|
list(zip(file_data["yearly_profiles"], file_data["yearly_profiles_std"]))
|
||||||
)
|
)
|
||||||
# Calculate values in W by relative profile data and yearly consumption given in kWh
|
# Calculate values in W by relative profile data and yearly consumption given in kWh
|
||||||
data_year_energy = profile_data * self.config.loadakkudoktor_year_energy * 1000
|
data_year_energy = (
|
||||||
|
profile_data * self.config.load.provider_settings.loadakkudoktor_year_energy * 1000
|
||||||
|
)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
error_msg = f"Error: File {load_file} not found."
|
error_msg = f"Error: File {load_file} not found."
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
@@ -109,7 +111,7 @@ class LoadAkkudoktor(LoadProvider):
|
|||||||
# We provide prediction starting at start of day, to be compatible to old system.
|
# We provide prediction starting at start of day, to be compatible to old system.
|
||||||
# End date for prediction is prediction hours from now.
|
# End date for prediction is prediction hours from now.
|
||||||
date = self.start_datetime.start_of("day")
|
date = self.start_datetime.start_of("day")
|
||||||
end_date = self.start_datetime.add(hours=self.config.prediction_hours)
|
end_date = self.start_datetime.add(hours=self.config.prediction.hours)
|
||||||
while compare_datetimes(date, end_date).lt:
|
while compare_datetimes(date, end_date).lt:
|
||||||
# Extract mean (index 0) and standard deviation (index 1) for the given day and hour
|
# Extract mean (index 0) and standard deviation (index 1) for the given day and hour
|
||||||
# Day indexing starts at 0, -1 because of that
|
# Day indexing starts at 0, -1 because of that
|
||||||
@@ -127,4 +129,4 @@ class LoadAkkudoktor(LoadProvider):
|
|||||||
self.update_value(date, values)
|
self.update_value(date, values)
|
||||||
date += to_duration("1 hour")
|
date += to_duration("1 hour")
|
||||||
# We are working on fresh data (no cache), report update time
|
# We are working on fresh data (no cache), report update time
|
||||||
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
||||||
|
@@ -22,15 +22,19 @@ logger = get_logger(__name__)
|
|||||||
class LoadImportCommonSettings(SettingsBaseModel):
|
class LoadImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for load data import from file or JSON string."""
|
"""Common settings for load data import from file or JSON string."""
|
||||||
|
|
||||||
load_import_file_path: Optional[Union[str, Path]] = Field(
|
import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None, description="Path to the file to import load data from."
|
default=None,
|
||||||
|
description="Path to the file to import load data from.",
|
||||||
|
examples=[None, "/path/to/yearly_load.json"],
|
||||||
)
|
)
|
||||||
load_import_json: Optional[str] = Field(
|
import_json: Optional[str] = Field(
|
||||||
default=None, description="JSON string, dictionary of load forecast value lists."
|
default=None,
|
||||||
|
description="JSON string, dictionary of load forecast value lists.",
|
||||||
|
examples=['{"load0_mean": [676.71, 876.19, 527.13]}'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("load_import_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
def validate_loadimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
if value is None:
|
if value is None:
|
||||||
@@ -58,7 +62,7 @@ class LoadImport(LoadProvider, PredictionImportProvider):
|
|||||||
return "LoadImport"
|
return "LoadImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.load_import_file_path is not None:
|
if self.config.load.provider_settings.import_file_path:
|
||||||
self.import_from_file(self.config.load_import_file_path, key_prefix="load")
|
self.import_from_file(self.config.provider_settings.import_file_path, key_prefix="load")
|
||||||
if self.config.load_import_json is not None:
|
if self.config.load.provider_settings.import_json:
|
||||||
self.import_from_json(self.config.load_import_json, key_prefix="load")
|
self.import_from_json(self.config.load.provider_settings.import_json, key_prefix="load")
|
||||||
|
@@ -28,7 +28,7 @@ Attributes:
|
|||||||
|
|
||||||
from typing import List, Optional, Union
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
from pydantic import Field, computed_field
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.prediction.elecpriceakkudoktor import ElecPriceAkkudoktor
|
from akkudoktoreos.prediction.elecpriceakkudoktor import ElecPriceAkkudoktor
|
||||||
@@ -41,65 +41,34 @@ from akkudoktoreos.prediction.pvforecastimport import PVForecastImport
|
|||||||
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
from akkudoktoreos.prediction.weatherbrightsky import WeatherBrightSky
|
||||||
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
||||||
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
||||||
from akkudoktoreos.utils.datetimeutil import to_timezone
|
|
||||||
|
|
||||||
|
|
||||||
class PredictionCommonSettings(SettingsBaseModel):
|
class PredictionCommonSettings(SettingsBaseModel):
|
||||||
"""Base configuration for prediction settings, including forecast duration, geographic location, and time zone.
|
"""General Prediction Configuration.
|
||||||
|
|
||||||
This class provides configuration for prediction settings, allowing users to specify
|
This class provides configuration for prediction settings, allowing users to specify
|
||||||
parameters such as the forecast duration (in hours) and location (latitude and longitude).
|
parameters such as the forecast duration (in hours).
|
||||||
Validators ensure each parameter is within a specified range. A computed property, `timezone`,
|
Validators ensure each parameter is within a specified range.
|
||||||
determines the time zone based on latitude and longitude.
|
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (Optional[int]): Number of hours into the future for predictions.
|
hours (Optional[int]): Number of hours into the future for predictions.
|
||||||
Must be non-negative.
|
Must be non-negative.
|
||||||
prediction_historic_hours (Optional[int]): Number of hours into the past for historical data.
|
historic_hours (Optional[int]): Number of hours into the past for historical data.
|
||||||
Must be non-negative.
|
Must be non-negative.
|
||||||
latitude (Optional[float]): Latitude in degrees, must be between -90 and 90.
|
|
||||||
longitude (Optional[float]): Longitude in degrees, must be between -180 and 180.
|
|
||||||
|
|
||||||
Properties:
|
|
||||||
timezone (Optional[str]): Computed time zone string based on the specified latitude
|
|
||||||
and longitude.
|
|
||||||
|
|
||||||
Validators:
|
Validators:
|
||||||
validate_prediction_hours (int): Ensures `prediction_hours` is a non-negative integer.
|
validate_hours (int): Ensures `hours` is a non-negative integer.
|
||||||
validate_prediction_historic_hours (int): Ensures `prediction_historic_hours` is a non-negative integer.
|
validate_historic_hours (int): Ensures `historic_hours` is a non-negative integer.
|
||||||
validate_latitude (float): Ensures `latitude` is within the range -90 to 90.
|
|
||||||
validate_longitude (float): Ensures `longitude` is within the range -180 to 180.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
prediction_hours: Optional[int] = Field(
|
hours: Optional[int] = Field(
|
||||||
default=48, ge=0, description="Number of hours into the future for predictions"
|
default=48, ge=0, description="Number of hours into the future for predictions"
|
||||||
)
|
)
|
||||||
prediction_historic_hours: Optional[int] = Field(
|
historic_hours: Optional[int] = Field(
|
||||||
default=48,
|
default=48,
|
||||||
ge=0,
|
ge=0,
|
||||||
description="Number of hours into the past for historical predictions data",
|
description="Number of hours into the past for historical predictions data",
|
||||||
)
|
)
|
||||||
latitude: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
ge=-90.0,
|
|
||||||
le=90.0,
|
|
||||||
description="Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)",
|
|
||||||
)
|
|
||||||
longitude: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
ge=-180.0,
|
|
||||||
le=180.0,
|
|
||||||
description="Longitude in decimal degrees, within -180 to 180 (°)",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Computed fields
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def timezone(self) -> Optional[str]:
|
|
||||||
"""Compute timezone based on latitude and longitude."""
|
|
||||||
if self.latitude and self.longitude:
|
|
||||||
return to_timezone(location=(self.latitude, self.longitude), as_string=True)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Prediction(PredictionContainer):
|
class Prediction(PredictionContainer):
|
||||||
|
@@ -114,16 +114,16 @@ class PredictionStartEndKeepMixin(PredictionBase):
|
|||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def end_datetime(self) -> Optional[DateTime]:
|
def end_datetime(self) -> Optional[DateTime]:
|
||||||
"""Compute the end datetime based on the `start_datetime` and `prediction_hours`.
|
"""Compute the end datetime based on the `start_datetime` and `hours`.
|
||||||
|
|
||||||
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
Ajusts the calculated end time if DST transitions occur within the prediction window.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
Optional[DateTime]: The calculated end datetime, or `None` if inputs are missing.
|
||||||
"""
|
"""
|
||||||
if self.start_datetime and self.config.prediction_hours:
|
if self.start_datetime and self.config.prediction.hours:
|
||||||
end_datetime = self.start_datetime + to_duration(
|
end_datetime = self.start_datetime + to_duration(
|
||||||
f"{self.config.prediction_hours} hours"
|
f"{self.config.prediction.hours} hours"
|
||||||
)
|
)
|
||||||
dst_change = end_datetime.offset_hours - self.start_datetime.offset_hours
|
dst_change = end_datetime.offset_hours - self.start_datetime.offset_hours
|
||||||
logger.debug(f"Pre: {self.start_datetime}..{end_datetime}: DST change: {dst_change}")
|
logger.debug(f"Pre: {self.start_datetime}..{end_datetime}: DST change: {dst_change}")
|
||||||
@@ -147,10 +147,10 @@ class PredictionStartEndKeepMixin(PredictionBase):
|
|||||||
return None
|
return None
|
||||||
historic_hours = self.historic_hours_min()
|
historic_hours = self.historic_hours_min()
|
||||||
if (
|
if (
|
||||||
self.config.prediction_historic_hours
|
self.config.prediction.historic_hours
|
||||||
and self.config.prediction_historic_hours > historic_hours
|
and self.config.prediction.historic_hours > historic_hours
|
||||||
):
|
):
|
||||||
historic_hours = int(self.config.prediction_historic_hours)
|
historic_hours = int(self.config.prediction.historic_hours)
|
||||||
return self.start_datetime - to_duration(f"{historic_hours} hours")
|
return self.start_datetime - to_duration(f"{historic_hours} hours")
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
|
@@ -1,469 +1,229 @@
|
|||||||
"""PV forecast module for PV power predictions."""
|
"""PV forecast module for PV power predictions."""
|
||||||
|
|
||||||
from typing import Any, ClassVar, List, Optional
|
from typing import Any, ClassVar, List, Optional, Self
|
||||||
|
|
||||||
from pydantic import Field, computed_field
|
from pydantic import Field, computed_field, field_validator, model_validator
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.prediction.pvforecastimport import PVForecastImportCommonSettings
|
||||||
|
from akkudoktoreos.utils.docs import get_model_structure_from_examples
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PVForecastPlaneSetting(SettingsBaseModel):
|
||||||
|
"""PV Forecast Plane Configuration."""
|
||||||
|
|
||||||
|
# latitude: Optional[float] = Field(default=None, description="Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)")
|
||||||
|
surface_tilt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Tilt angle from horizontal plane. Ignored for two-axis tracking.",
|
||||||
|
examples=[10.0, 20.0],
|
||||||
|
)
|
||||||
|
surface_azimuth: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
||||||
|
examples=[10.0, 20.0],
|
||||||
|
)
|
||||||
|
userhorizon: Optional[List[float]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
||||||
|
examples=[[10.0, 20.0, 30.0], [5.0, 15.0, 25.0]],
|
||||||
|
)
|
||||||
|
peakpower: Optional[float] = Field(
|
||||||
|
default=None, description="Nominal power of PV system in kW.", examples=[5.0, 3.5]
|
||||||
|
)
|
||||||
|
pvtechchoice: Optional[str] = Field(
|
||||||
|
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
||||||
|
)
|
||||||
|
mountingplace: Optional[str] = Field(
|
||||||
|
default="free",
|
||||||
|
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
||||||
|
)
|
||||||
|
loss: Optional[float] = Field(default=14.0, description="Sum of PV system losses in percent")
|
||||||
|
trackingtype: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
ge=0,
|
||||||
|
le=5,
|
||||||
|
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
||||||
|
examples=[0, 1, 2, 3, 4, 5],
|
||||||
|
)
|
||||||
|
optimal_surface_tilt: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
||||||
|
examples=[False],
|
||||||
|
)
|
||||||
|
optimalangles: Optional[bool] = Field(
|
||||||
|
default=False,
|
||||||
|
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
||||||
|
examples=[False],
|
||||||
|
)
|
||||||
|
albedo: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Proportion of the light hitting the ground that it reflects back.",
|
||||||
|
examples=[None],
|
||||||
|
)
|
||||||
|
module_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the PV modules of this plane.", examples=[None]
|
||||||
|
)
|
||||||
|
inverter_model: Optional[str] = Field(
|
||||||
|
default=None, description="Model of the inverter of this plane.", examples=[None]
|
||||||
|
)
|
||||||
|
inverter_paco: Optional[int] = Field(
|
||||||
|
default=None, description="AC power rating of the inverter. [W]", examples=[6000, 4000]
|
||||||
|
)
|
||||||
|
modules_per_string: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of the PV modules of the strings of this plane.",
|
||||||
|
examples=[20],
|
||||||
|
)
|
||||||
|
strings_per_inverter: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of the strings of the inverter of this plane.",
|
||||||
|
examples=[2],
|
||||||
|
)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def validate_list_length(self) -> Self:
|
||||||
|
# Check if either attribute is set and add to active planes
|
||||||
|
if self.trackingtype == 2:
|
||||||
|
# Tilt angle from horizontal plane is ignored for two-axis tracking.
|
||||||
|
if self.surface_azimuth is None:
|
||||||
|
raise ValueError("If trackingtype is set, azimuth must be set as well.")
|
||||||
|
elif self.surface_tilt is None or self.surface_azimuth is None:
|
||||||
|
raise ValueError("surface_tilt and surface_azimuth must be set.")
|
||||||
|
return self
|
||||||
|
|
||||||
|
@field_validator("mountingplace")
|
||||||
|
def validate_mountingplace(cls, mountingplace: Optional[str]) -> Optional[str]:
|
||||||
|
if mountingplace is not None and mountingplace not in ["free", "building"]:
|
||||||
|
raise ValueError(f"Invalid mountingplace: {mountingplace}")
|
||||||
|
return mountingplace
|
||||||
|
|
||||||
|
@field_validator("pvtechchoice")
|
||||||
|
def validate_pvtechchoice(cls, pvtechchoice: Optional[str]) -> Optional[str]:
|
||||||
|
if pvtechchoice is not None and pvtechchoice not in ["crystSi", "CIS", "CdTe", "Unknown"]:
|
||||||
|
raise ValueError(f"Invalid pvtechchoice: {pvtechchoice}")
|
||||||
|
return pvtechchoice
|
||||||
|
|
||||||
|
|
||||||
class PVForecastCommonSettings(SettingsBaseModel):
|
class PVForecastCommonSettings(SettingsBaseModel):
|
||||||
|
"""PV Forecast Configuration."""
|
||||||
|
|
||||||
# General plane parameters
|
# General plane parameters
|
||||||
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html
|
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/iotools/pvgis.html
|
||||||
# Inverter Parameters
|
# Inverter Parameters
|
||||||
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/inverter.html
|
# https://pvlib-python.readthedocs.io/en/stable/_modules/pvlib/inverter.html
|
||||||
|
|
||||||
pvforecast_provider: Optional[str] = Field(
|
provider: Optional[str] = Field(
|
||||||
default=None, description="PVForecast provider id of provider to be used."
|
|
||||||
)
|
|
||||||
# pvforecast0_latitude: Optional[float] = Field(default=None, description="Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°)")
|
|
||||||
# Plane 0
|
|
||||||
pvforecast0_surface_tilt: Optional[float] = Field(
|
|
||||||
default=None, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
|
||||||
)
|
|
||||||
pvforecast0_surface_azimuth: Optional[float] = Field(
|
|
||||||
default=None,
|
default=None,
|
||||||
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
description="PVForecast provider id of provider to be used.",
|
||||||
)
|
examples=["PVForecastAkkudoktor"],
|
||||||
pvforecast0_userhorizon: Optional[List[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
|
||||||
)
|
|
||||||
pvforecast0_peakpower: Optional[float] = Field(
|
|
||||||
default=None, description="Nominal power of PV system in kW."
|
|
||||||
)
|
|
||||||
pvforecast0_pvtechchoice: Optional[str] = Field(
|
|
||||||
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
|
||||||
)
|
|
||||||
pvforecast0_mountingplace: Optional[str] = Field(
|
|
||||||
default="free",
|
|
||||||
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
|
||||||
)
|
|
||||||
pvforecast0_loss: Optional[float] = Field(
|
|
||||||
default=14.0, description="Sum of PV system losses in percent"
|
|
||||||
)
|
|
||||||
pvforecast0_trackingtype: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
|
||||||
)
|
|
||||||
pvforecast0_optimal_surface_tilt: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast0_optimalangles: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast0_albedo: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Proportion of the light hitting the ground that it reflects back.",
|
|
||||||
)
|
|
||||||
pvforecast0_module_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the PV modules of this plane."
|
|
||||||
)
|
|
||||||
pvforecast0_inverter_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the inverter of this plane."
|
|
||||||
)
|
|
||||||
pvforecast0_inverter_paco: Optional[int] = Field(
|
|
||||||
default=None, description="AC power rating of the inverter. [W]"
|
|
||||||
)
|
|
||||||
pvforecast0_modules_per_string: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the PV modules of the strings of this plane."
|
|
||||||
)
|
|
||||||
pvforecast0_strings_per_inverter: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the strings of the inverter of this plane."
|
|
||||||
)
|
|
||||||
# Plane 1
|
|
||||||
pvforecast1_surface_tilt: Optional[float] = Field(
|
|
||||||
default=None, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
|
||||||
)
|
|
||||||
pvforecast1_surface_azimuth: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
|
||||||
)
|
|
||||||
pvforecast1_userhorizon: Optional[List[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
|
||||||
)
|
|
||||||
pvforecast1_peakpower: Optional[float] = Field(
|
|
||||||
default=None, description="Nominal power of PV system in kW."
|
|
||||||
)
|
|
||||||
pvforecast1_pvtechchoice: Optional[str] = Field(
|
|
||||||
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
|
||||||
)
|
|
||||||
pvforecast1_mountingplace: Optional[str] = Field(
|
|
||||||
default="free",
|
|
||||||
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
|
||||||
)
|
|
||||||
pvforecast1_loss: Optional[float] = Field(
|
|
||||||
default=14.0, description="Sum of PV system losses in percent"
|
|
||||||
)
|
|
||||||
pvforecast1_trackingtype: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
|
||||||
)
|
|
||||||
pvforecast1_optimal_surface_tilt: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast1_optimalangles: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast1_albedo: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Proportion of the light hitting the ground that it reflects back.",
|
|
||||||
)
|
|
||||||
pvforecast1_module_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the PV modules of this plane."
|
|
||||||
)
|
|
||||||
pvforecast1_inverter_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the inverter of this plane."
|
|
||||||
)
|
|
||||||
pvforecast1_inverter_paco: Optional[int] = Field(
|
|
||||||
default=None, description="AC power rating of the inverter. [W]"
|
|
||||||
)
|
|
||||||
pvforecast1_modules_per_string: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the PV modules of the strings of this plane."
|
|
||||||
)
|
|
||||||
pvforecast1_strings_per_inverter: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the strings of the inverter of this plane."
|
|
||||||
)
|
|
||||||
# Plane 2
|
|
||||||
pvforecast2_surface_tilt: Optional[float] = Field(
|
|
||||||
default=None, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
|
||||||
)
|
|
||||||
pvforecast2_surface_azimuth: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
|
||||||
)
|
|
||||||
pvforecast2_userhorizon: Optional[List[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
|
||||||
)
|
|
||||||
pvforecast2_peakpower: Optional[float] = Field(
|
|
||||||
default=None, description="Nominal power of PV system in kW."
|
|
||||||
)
|
|
||||||
pvforecast2_pvtechchoice: Optional[str] = Field(
|
|
||||||
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
|
||||||
)
|
|
||||||
pvforecast2_mountingplace: Optional[str] = Field(
|
|
||||||
default="free",
|
|
||||||
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
|
||||||
)
|
|
||||||
pvforecast2_loss: Optional[float] = Field(
|
|
||||||
default=14.0, description="Sum of PV system losses in percent"
|
|
||||||
)
|
|
||||||
pvforecast2_trackingtype: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
|
||||||
)
|
|
||||||
pvforecast2_optimal_surface_tilt: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast2_optimalangles: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast2_albedo: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Proportion of the light hitting the ground that it reflects back.",
|
|
||||||
)
|
|
||||||
pvforecast2_module_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the PV modules of this plane."
|
|
||||||
)
|
|
||||||
pvforecast2_inverter_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the inverter of this plane."
|
|
||||||
)
|
|
||||||
pvforecast2_inverter_paco: Optional[int] = Field(
|
|
||||||
default=None, description="AC power rating of the inverter. [W]"
|
|
||||||
)
|
|
||||||
pvforecast2_modules_per_string: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the PV modules of the strings of this plane."
|
|
||||||
)
|
|
||||||
pvforecast2_strings_per_inverter: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the strings of the inverter of this plane."
|
|
||||||
)
|
|
||||||
# Plane 3
|
|
||||||
pvforecast3_surface_tilt: Optional[float] = Field(
|
|
||||||
default=None, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
|
||||||
)
|
|
||||||
pvforecast3_surface_azimuth: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
|
||||||
)
|
|
||||||
pvforecast3_userhorizon: Optional[List[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
|
||||||
)
|
|
||||||
pvforecast3_peakpower: Optional[float] = Field(
|
|
||||||
default=None, description="Nominal power of PV system in kW."
|
|
||||||
)
|
|
||||||
pvforecast3_pvtechchoice: Optional[str] = Field(
|
|
||||||
default="crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
|
||||||
)
|
|
||||||
pvforecast3_mountingplace: Optional[str] = Field(
|
|
||||||
default="free",
|
|
||||||
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
|
||||||
)
|
|
||||||
pvforecast3_loss: Optional[float] = Field(
|
|
||||||
default=14.0, description="Sum of PV system losses in percent"
|
|
||||||
)
|
|
||||||
pvforecast3_trackingtype: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
|
||||||
)
|
|
||||||
pvforecast3_optimal_surface_tilt: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast3_optimalangles: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast3_albedo: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Proportion of the light hitting the ground that it reflects back.",
|
|
||||||
)
|
|
||||||
pvforecast3_module_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the PV modules of this plane."
|
|
||||||
)
|
|
||||||
pvforecast3_inverter_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the inverter of this plane."
|
|
||||||
)
|
|
||||||
pvforecast3_inverter_paco: Optional[int] = Field(
|
|
||||||
default=None, description="AC power rating of the inverter. [W]"
|
|
||||||
)
|
|
||||||
pvforecast3_modules_per_string: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the PV modules of the strings of this plane."
|
|
||||||
)
|
|
||||||
pvforecast3_strings_per_inverter: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the strings of the inverter of this plane."
|
|
||||||
)
|
|
||||||
# Plane 4
|
|
||||||
pvforecast4_surface_tilt: Optional[float] = Field(
|
|
||||||
default=None, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
|
||||||
)
|
|
||||||
pvforecast4_surface_azimuth: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
|
||||||
)
|
|
||||||
pvforecast4_userhorizon: Optional[List[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
|
||||||
)
|
|
||||||
pvforecast4_peakpower: Optional[float] = Field(
|
|
||||||
default=None, description="Nominal power of PV system in kW."
|
|
||||||
)
|
|
||||||
pvforecast4_pvtechchoice: Optional[str] = Field(
|
|
||||||
"crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
|
||||||
)
|
|
||||||
pvforecast4_mountingplace: Optional[str] = Field(
|
|
||||||
default="free",
|
|
||||||
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
|
||||||
)
|
|
||||||
pvforecast4_loss: Optional[float] = Field(
|
|
||||||
default=14.0, description="Sum of PV system losses in percent"
|
|
||||||
)
|
|
||||||
pvforecast4_trackingtype: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
|
||||||
)
|
|
||||||
pvforecast4_optimal_surface_tilt: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast4_optimalangles: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast4_albedo: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Proportion of the light hitting the ground that it reflects back.",
|
|
||||||
)
|
|
||||||
pvforecast4_module_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the PV modules of this plane."
|
|
||||||
)
|
|
||||||
pvforecast4_inverter_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the inverter of this plane."
|
|
||||||
)
|
|
||||||
pvforecast4_inverter_paco: Optional[int] = Field(
|
|
||||||
default=None, description="AC power rating of the inverter. [W]"
|
|
||||||
)
|
|
||||||
pvforecast4_modules_per_string: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the PV modules of the strings of this plane."
|
|
||||||
)
|
|
||||||
pvforecast4_strings_per_inverter: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the strings of the inverter of this plane."
|
|
||||||
)
|
|
||||||
# Plane 5
|
|
||||||
pvforecast5_surface_tilt: Optional[float] = Field(
|
|
||||||
default=None, description="Tilt angle from horizontal plane. Ignored for two-axis tracking."
|
|
||||||
)
|
|
||||||
pvforecast5_surface_azimuth: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Orientation (azimuth angle) of the (fixed) plane. Clockwise from north (north=0, east=90, south=180, west=270).",
|
|
||||||
)
|
|
||||||
pvforecast5_userhorizon: Optional[List[float]] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Elevation of horizon in degrees, at equally spaced azimuth clockwise from north.",
|
|
||||||
)
|
|
||||||
pvforecast5_peakpower: Optional[float] = Field(
|
|
||||||
default=None, description="Nominal power of PV system in kW."
|
|
||||||
)
|
|
||||||
pvforecast5_pvtechchoice: Optional[str] = Field(
|
|
||||||
"crystSi", description="PV technology. One of 'crystSi', 'CIS', 'CdTe', 'Unknown'."
|
|
||||||
)
|
|
||||||
pvforecast5_mountingplace: Optional[str] = Field(
|
|
||||||
default="free",
|
|
||||||
description="Type of mounting for PV system. Options are 'free' for free-standing and 'building' for building-integrated.",
|
|
||||||
)
|
|
||||||
pvforecast5_loss: Optional[float] = Field(
|
|
||||||
default=14.0, description="Sum of PV system losses in percent"
|
|
||||||
)
|
|
||||||
pvforecast5_trackingtype: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Type of suntracking. 0=fixed, 1=single horizontal axis aligned north-south, 2=two-axis tracking, 3=vertical axis tracking, 4=single horizontal axis aligned east-west, 5=single inclined axis aligned north-south.",
|
|
||||||
)
|
|
||||||
pvforecast5_optimal_surface_tilt: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt angle. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast5_optimalangles: Optional[bool] = Field(
|
|
||||||
default=False,
|
|
||||||
description="Calculate the optimum tilt and azimuth angles. Ignored for two-axis tracking.",
|
|
||||||
)
|
|
||||||
pvforecast5_albedo: Optional[float] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Proportion of the light hitting the ground that it reflects back.",
|
|
||||||
)
|
|
||||||
pvforecast5_module_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the PV modules of this plane."
|
|
||||||
)
|
|
||||||
pvforecast5_inverter_model: Optional[str] = Field(
|
|
||||||
default=None, description="Model of the inverter of this plane."
|
|
||||||
)
|
|
||||||
pvforecast5_inverter_paco: Optional[int] = Field(
|
|
||||||
default=None, description="AC power rating of the inverter. [W]"
|
|
||||||
)
|
|
||||||
pvforecast5_modules_per_string: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the PV modules of the strings of this plane."
|
|
||||||
)
|
|
||||||
pvforecast5_strings_per_inverter: Optional[int] = Field(
|
|
||||||
default=None, description="Number of the strings of the inverter of this plane."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
pvforecast_max_planes: ClassVar[int] = 6 # Maximum number of planes that can be set
|
planes: Optional[list[PVForecastPlaneSetting]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Plane configuration.",
|
||||||
|
examples=[get_model_structure_from_examples(PVForecastPlaneSetting, True)],
|
||||||
|
)
|
||||||
|
|
||||||
# Computed fields
|
max_planes: ClassVar[int] = 6 # Maximum number of planes that can be set
|
||||||
|
|
||||||
|
@field_validator("planes")
|
||||||
|
def validate_planes(
|
||||||
|
cls, planes: Optional[list[PVForecastPlaneSetting]]
|
||||||
|
) -> Optional[list[PVForecastPlaneSetting]]:
|
||||||
|
if planes is not None and len(planes) > cls.max_planes:
|
||||||
|
raise ValueError(f"Maximum number of supported planes: {cls.max_planes}.")
|
||||||
|
return planes
|
||||||
|
|
||||||
|
provider_settings: Optional[PVForecastImportCommonSettings] = Field(
|
||||||
|
default=None, description="Provider settings", examples=[None]
|
||||||
|
)
|
||||||
|
|
||||||
|
## Computed fields
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def pvforecast_planes(self) -> List[str]:
|
def planes_peakpower(self) -> List[float]:
|
||||||
"""Compute a list of active planes."""
|
|
||||||
active_planes = []
|
|
||||||
|
|
||||||
# Loop through pvforecast0 to pvforecast4
|
|
||||||
for i in range(self.pvforecast_max_planes):
|
|
||||||
plane = f"pvforecast{i}"
|
|
||||||
tackingtype_attr = f"{plane}_trackingtype"
|
|
||||||
tilt_attr = f"{plane}_surface_tilt"
|
|
||||||
azimuth_attr = f"{plane}_surface_azimuth"
|
|
||||||
|
|
||||||
# Check if either attribute is set and add to active planes
|
|
||||||
if getattr(self, tackingtype_attr, None) == 2:
|
|
||||||
# Tilt angle from horizontal plane is gnored for two-axis tracking.
|
|
||||||
if getattr(self, azimuth_attr, None) is not None:
|
|
||||||
active_planes.append(f"pvforecast{i}")
|
|
||||||
elif getattr(self, tilt_attr, None) and getattr(self, azimuth_attr, None):
|
|
||||||
active_planes.append(f"pvforecast{i}")
|
|
||||||
|
|
||||||
return active_planes
|
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
|
||||||
@property
|
|
||||||
def pvforecast_planes_peakpower(self) -> List[float]:
|
|
||||||
"""Compute a list of the peak power per active planes."""
|
"""Compute a list of the peak power per active planes."""
|
||||||
planes_peakpower = []
|
planes_peakpower = []
|
||||||
|
|
||||||
for plane in self.pvforecast_planes:
|
if self.planes:
|
||||||
peakpower_attr = f"{plane}_peakpower"
|
for plane in self.planes:
|
||||||
peakpower = getattr(self, peakpower_attr, None)
|
peakpower = plane.peakpower
|
||||||
if peakpower is None:
|
if peakpower is None:
|
||||||
# TODO calculate peak power from modules/strings
|
# TODO calculate peak power from modules/strings
|
||||||
planes_peakpower.append(float(5000))
|
planes_peakpower.append(float(5000))
|
||||||
else:
|
else:
|
||||||
planes_peakpower.append(float(peakpower))
|
planes_peakpower.append(float(peakpower))
|
||||||
|
|
||||||
return planes_peakpower
|
return planes_peakpower
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def pvforecast_planes_azimuth(self) -> List[float]:
|
def planes_azimuth(self) -> List[float]:
|
||||||
"""Compute a list of the azimuths per active planes."""
|
"""Compute a list of the azimuths per active planes."""
|
||||||
planes_azimuth = []
|
planes_azimuth = []
|
||||||
|
|
||||||
for plane in self.pvforecast_planes:
|
if self.planes:
|
||||||
azimuth_attr = f"{plane}_surface_azimuth"
|
for plane in self.planes:
|
||||||
azimuth = getattr(self, azimuth_attr, None)
|
azimuth = plane.surface_azimuth
|
||||||
if azimuth is None:
|
if azimuth is None:
|
||||||
# TODO Use default
|
# TODO Use default
|
||||||
planes_azimuth.append(float(180))
|
planes_azimuth.append(float(180))
|
||||||
else:
|
else:
|
||||||
planes_azimuth.append(float(azimuth))
|
planes_azimuth.append(float(azimuth))
|
||||||
|
|
||||||
return planes_azimuth
|
return planes_azimuth
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def pvforecast_planes_tilt(self) -> List[float]:
|
def planes_tilt(self) -> List[float]:
|
||||||
"""Compute a list of the tilts per active planes."""
|
"""Compute a list of the tilts per active planes."""
|
||||||
planes_tilt = []
|
planes_tilt = []
|
||||||
|
|
||||||
for plane in self.pvforecast_planes:
|
if self.planes:
|
||||||
tilt_attr = f"{plane}_surface_tilt"
|
for plane in self.planes:
|
||||||
tilt = getattr(self, tilt_attr, None)
|
tilt = plane.surface_tilt
|
||||||
if tilt is None:
|
if tilt is None:
|
||||||
# TODO Use default
|
# TODO Use default
|
||||||
planes_tilt.append(float(30))
|
planes_tilt.append(float(30))
|
||||||
else:
|
else:
|
||||||
planes_tilt.append(float(tilt))
|
planes_tilt.append(float(tilt))
|
||||||
|
|
||||||
return planes_tilt
|
return planes_tilt
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def pvforecast_planes_userhorizon(self) -> Any:
|
def planes_userhorizon(self) -> Any:
|
||||||
"""Compute a list of the user horizon per active planes."""
|
"""Compute a list of the user horizon per active planes."""
|
||||||
planes_userhorizon = []
|
planes_userhorizon = []
|
||||||
|
|
||||||
for plane in self.pvforecast_planes:
|
if self.planes:
|
||||||
userhorizon_attr = f"{plane}_userhorizon"
|
for plane in self.planes:
|
||||||
userhorizon = getattr(self, userhorizon_attr, None)
|
userhorizon = plane.userhorizon
|
||||||
if userhorizon is None:
|
if userhorizon is None:
|
||||||
# TODO Use default
|
# TODO Use default
|
||||||
planes_userhorizon.append([float(0), float(0)])
|
planes_userhorizon.append([float(0), float(0)])
|
||||||
else:
|
else:
|
||||||
planes_userhorizon.append(userhorizon)
|
planes_userhorizon.append(userhorizon)
|
||||||
|
|
||||||
return planes_userhorizon
|
return planes_userhorizon
|
||||||
|
|
||||||
@computed_field # type: ignore[prop-decorator]
|
@computed_field # type: ignore[prop-decorator]
|
||||||
@property
|
@property
|
||||||
def pvforecast_planes_inverter_paco(self) -> Any:
|
def planes_inverter_paco(self) -> Any:
|
||||||
"""Compute a list of the maximum power rating of the inverter per active planes."""
|
"""Compute a list of the maximum power rating of the inverter per active planes."""
|
||||||
planes_inverter_paco = []
|
planes_inverter_paco = []
|
||||||
|
|
||||||
for plane in self.pvforecast_planes:
|
if self.planes:
|
||||||
inverter_paco_attr = f"{plane}_inverter_paco"
|
for plane in self.planes:
|
||||||
inverter_paco = getattr(self, inverter_paco_attr, None)
|
inverter_paco = plane.inverter_paco
|
||||||
if inverter_paco is None:
|
if inverter_paco is None:
|
||||||
# TODO Use default - no clipping
|
# TODO Use default - no clipping
|
||||||
planes_inverter_paco.append(25000.0)
|
planes_inverter_paco.append(25000.0)
|
||||||
else:
|
else:
|
||||||
planes_inverter_paco.append(float(inverter_paco))
|
planes_inverter_paco.append(float(inverter_paco))
|
||||||
|
|
||||||
return planes_inverter_paco
|
return planes_inverter_paco
|
||||||
|
@@ -28,18 +28,18 @@ class PVForecastProvider(PredictionProvider):
|
|||||||
PVForecastProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
PVForecastProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Configuration variables:
|
Configuration variables:
|
||||||
pvforecast_provider (str): Prediction provider for pvforecast.
|
provider (str): Prediction provider for pvforecast.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions (inlcusive), defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions (inlcusive), defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range (exclusive),
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range (exclusive),
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data (inclusive), calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data (inclusive), calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@@ -54,6 +54,6 @@ class PVForecastProvider(PredictionProvider):
|
|||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"PVForecastProvider ID {self.provider_id()} vs. config {self.config.pvforecast_provider}"
|
f"PVForecastProvider ID {self.provider_id()} vs. config {self.config.pvforecast.provider}"
|
||||||
)
|
)
|
||||||
return self.provider_id() == self.config.pvforecast_provider
|
return self.provider_id() == self.config.pvforecast.provider
|
||||||
|
@@ -14,21 +14,33 @@ Classes:
|
|||||||
Example:
|
Example:
|
||||||
# Set up the configuration with necessary fields for URL generation
|
# Set up the configuration with necessary fields for URL generation
|
||||||
settings_data = {
|
settings_data = {
|
||||||
"prediction_hours": 48,
|
"general": {
|
||||||
"prediction_historic_hours": 24,
|
"latitude": 52.52,
|
||||||
"latitude": 52.52,
|
"longitude": 13.405,
|
||||||
"longitude": 13.405,
|
},
|
||||||
"pvforecast_provider": "Akkudoktor",
|
"prediction": {
|
||||||
"pvforecast0_peakpower": 5.0,
|
"hours": 48,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"historic_hours": 24,
|
||||||
"pvforecast0_surface_tilt": 7,
|
},
|
||||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
"pvforecast": {
|
||||||
"pvforecast0_inverter_paco": 10000,
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast1_peakpower": 4.8,
|
"planes": [
|
||||||
"pvforecast1_surface_azimuth": -90,
|
{
|
||||||
"pvforecast1_surface_tilt": 7,
|
"peakpower": 5.0,
|
||||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
"surface_azimuth": -10,
|
||||||
"pvforecast1_inverter_paco": 10000,
|
"surface_tilt": 7,
|
||||||
|
"userhorizon": [20, 27, 22, 20],
|
||||||
|
"inverter_paco": 10000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"peakpower": 4.8,
|
||||||
|
"surface_azimuth": -90,
|
||||||
|
"surface_tilt": 7,
|
||||||
|
"userhorizon": [30, 30, 30, 50],
|
||||||
|
"inverter_paco": 10000,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Create the config instance from the provided data
|
# Create the config instance from the provided data
|
||||||
@@ -47,12 +59,12 @@ Example:
|
|||||||
print(forecast.report_ac_power_and_measurement())
|
print(forecast.report_ac_power_and_measurement())
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int): Number of hours into the future to forecast. Default is 48.
|
hours (int): Number of hours into the future to forecast. Default is 48.
|
||||||
prediction_historic_hours (int): Number of past hours to retain for analysis. Default is 24.
|
historic_hours (int): Number of past hours to retain for analysis. Default is 24.
|
||||||
latitude (float): Latitude for the forecast location.
|
latitude (float): Latitude for the forecast location.
|
||||||
longitude (float): Longitude for the forecast location.
|
longitude (float): Longitude for the forecast location.
|
||||||
start_datetime (datetime): Start time for the forecast, defaulting to current datetime.
|
start_datetime (datetime): Start time for the forecast, defaulting to current datetime.
|
||||||
end_datetime (datetime): Computed end datetime based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime): Computed end datetime based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime): Computed threshold datetime for retaining historical data.
|
keep_datetime (datetime): Computed threshold datetime for retaining historical data.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
@@ -159,13 +171,13 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
hours (int, optional): Number of hours in the future for the forecast.
|
||||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||||
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
@@ -203,19 +215,19 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
"""Build akkudoktor.net API request URL."""
|
"""Build akkudoktor.net API request URL."""
|
||||||
base_url = "https://api.akkudoktor.net/forecast"
|
base_url = "https://api.akkudoktor.net/forecast"
|
||||||
query_params = [
|
query_params = [
|
||||||
f"lat={self.config.latitude}",
|
f"lat={self.config.general.latitude}",
|
||||||
f"lon={self.config.longitude}",
|
f"lon={self.config.general.longitude}",
|
||||||
]
|
]
|
||||||
|
|
||||||
for i in range(len(self.config.pvforecast_planes)):
|
for i in range(len(self.config.pvforecast.planes)):
|
||||||
query_params.append(f"power={int(self.config.pvforecast_planes_peakpower[i] * 1000)}")
|
query_params.append(f"power={int(self.config.pvforecast.planes_peakpower[i] * 1000)}")
|
||||||
query_params.append(f"azimuth={int(self.config.pvforecast_planes_azimuth[i])}")
|
query_params.append(f"azimuth={int(self.config.pvforecast.planes_azimuth[i])}")
|
||||||
query_params.append(f"tilt={int(self.config.pvforecast_planes_tilt[i])}")
|
query_params.append(f"tilt={int(self.config.pvforecast.planes_tilt[i])}")
|
||||||
query_params.append(
|
query_params.append(
|
||||||
f"powerInverter={int(self.config.pvforecast_planes_inverter_paco[i])}"
|
f"powerInverter={int(self.config.pvforecast.planes_inverter_paco[i])}"
|
||||||
)
|
)
|
||||||
horizon_values = ",".join(
|
horizon_values = ",".join(
|
||||||
str(int(h)) for h in self.config.pvforecast_planes_userhorizon[i]
|
str(int(h)) for h in self.config.pvforecast.planes_userhorizon[i]
|
||||||
)
|
)
|
||||||
query_params.append(f"horizont={horizon_values}")
|
query_params.append(f"horizont={horizon_values}")
|
||||||
|
|
||||||
@@ -226,7 +238,7 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
"cellCoEff=-0.36",
|
"cellCoEff=-0.36",
|
||||||
"inverterEfficiency=0.8",
|
"inverterEfficiency=0.8",
|
||||||
"albedo=0.25",
|
"albedo=0.25",
|
||||||
f"timezone={self.config.timezone}",
|
f"timezone={self.config.general.timezone}",
|
||||||
"hourly=relativehumidity_2m%2Cwindspeed_10m",
|
"hourly=relativehumidity_2m%2Cwindspeed_10m",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -255,7 +267,7 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
logger.debug(f"Response from {self._url()}: {response}")
|
logger.debug(f"Response from {self._url()}: {response}")
|
||||||
akkudoktor_data = self._validate_data(response.content)
|
akkudoktor_data = self._validate_data(response.content)
|
||||||
# We are working on fresh data (no cache), report update time
|
# We are working on fresh data (no cache), report update time
|
||||||
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
||||||
return akkudoktor_data
|
return akkudoktor_data
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
@@ -265,7 +277,7 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
`PVForecastAkkudoktorDataRecord`.
|
`PVForecastAkkudoktorDataRecord`.
|
||||||
"""
|
"""
|
||||||
# Assure we have something to request PV power for.
|
# Assure we have something to request PV power for.
|
||||||
if not self.config.pvforecast_planes:
|
if not self.config.pvforecast.planes:
|
||||||
# No planes for PV
|
# No planes for PV
|
||||||
error_msg = "Requested PV forecast, but no planes configured."
|
error_msg = "Requested PV forecast, but no planes configured."
|
||||||
logger.error(f"Configuration error: {error_msg}")
|
logger.error(f"Configuration error: {error_msg}")
|
||||||
@@ -275,17 +287,17 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
akkudoktor_data = self._request_forecast(force_update=force_update) # type: ignore
|
akkudoktor_data = self._request_forecast(force_update=force_update) # type: ignore
|
||||||
|
|
||||||
# Timezone of the PV system
|
# Timezone of the PV system
|
||||||
if self.config.timezone != akkudoktor_data.meta.timezone:
|
if self.config.general.timezone != akkudoktor_data.meta.timezone:
|
||||||
error_msg = f"Configured timezone '{self.config.timezone}' does not match Akkudoktor timezone '{akkudoktor_data.meta.timezone}'."
|
error_msg = f"Configured timezone '{self.config.general.timezone}' does not match Akkudoktor timezone '{akkudoktor_data.meta.timezone}'."
|
||||||
logger.error(f"Akkudoktor schema change: {error_msg}")
|
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
|
|
||||||
# Assumption that all lists are the same length and are ordered chronologically
|
# Assumption that all lists are the same length and are ordered chronologically
|
||||||
# in ascending order and have the same timestamps.
|
# in ascending order and have the same timestamps.
|
||||||
if len(akkudoktor_data.values[0]) < self.config.prediction_hours:
|
if len(akkudoktor_data.values[0]) < self.config.prediction.hours:
|
||||||
# Expect one value set per prediction hour
|
# Expect one value set per prediction hour
|
||||||
error_msg = (
|
error_msg = (
|
||||||
f"The forecast must cover at least {self.config.prediction_hours} hours, "
|
f"The forecast must cover at least {self.config.prediction.hours} hours, "
|
||||||
f"but only {len(akkudoktor_data.values[0])} data sets are given in forecast data."
|
f"but only {len(akkudoktor_data.values[0])} data sets are given in forecast data."
|
||||||
)
|
)
|
||||||
logger.error(f"Akkudoktor schema change: {error_msg}")
|
logger.error(f"Akkudoktor schema change: {error_msg}")
|
||||||
@@ -296,7 +308,7 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
# Iterate over forecast data points
|
# Iterate over forecast data points
|
||||||
for forecast_values in zip(*akkudoktor_data.values):
|
for forecast_values in zip(*akkudoktor_data.values):
|
||||||
original_datetime = forecast_values[0].datetime
|
original_datetime = forecast_values[0].datetime
|
||||||
dt = to_datetime(original_datetime, in_timezone=self.config.timezone)
|
dt = to_datetime(original_datetime, in_timezone=self.config.general.timezone)
|
||||||
|
|
||||||
# Skip outdated forecast data
|
# Skip outdated forecast data
|
||||||
if compare_datetimes(dt, self.start_datetime.start_of("day")).lt:
|
if compare_datetimes(dt, self.start_datetime.start_of("day")).lt:
|
||||||
@@ -314,9 +326,9 @@ class PVForecastAkkudoktor(PVForecastProvider):
|
|||||||
|
|
||||||
self.update_value(dt, data)
|
self.update_value(dt, data)
|
||||||
|
|
||||||
if len(self) < self.config.prediction_hours:
|
if len(self) < self.config.prediction.hours:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"The forecast must cover at least {self.config.prediction_hours} hours, "
|
f"The forecast must cover at least {self.config.prediction.hours} hours, "
|
||||||
f"but only {len(self)} hours starting from {self.start_datetime} "
|
f"but only {len(self)} hours starting from {self.start_datetime} "
|
||||||
f"were predicted."
|
f"were predicted."
|
||||||
)
|
)
|
||||||
@@ -365,31 +377,47 @@ if __name__ == "__main__":
|
|||||||
"""
|
"""
|
||||||
# Set up the configuration with necessary fields for URL generation
|
# Set up the configuration with necessary fields for URL generation
|
||||||
settings_data = {
|
settings_data = {
|
||||||
"prediction_hours": 48,
|
"general": {
|
||||||
"prediction_historic_hours": 24,
|
"latitude": 52.52,
|
||||||
"latitude": 52.52,
|
"longitude": 13.405,
|
||||||
"longitude": 13.405,
|
},
|
||||||
"pvforecast_provider": "PVForecastAkkudoktor",
|
"prediction": {
|
||||||
"pvforecast0_peakpower": 5.0,
|
"hours": 48,
|
||||||
"pvforecast0_surface_azimuth": -10,
|
"historic_hours": 24,
|
||||||
"pvforecast0_surface_tilt": 7,
|
},
|
||||||
"pvforecast0_userhorizon": [20, 27, 22, 20],
|
"pvforecast": {
|
||||||
"pvforecast0_inverter_paco": 10000,
|
"provider": "PVForecastAkkudoktor",
|
||||||
"pvforecast1_peakpower": 4.8,
|
"planes": [
|
||||||
"pvforecast1_surface_azimuth": -90,
|
{
|
||||||
"pvforecast1_surface_tilt": 7,
|
"peakpower": 5.0,
|
||||||
"pvforecast1_userhorizon": [30, 30, 30, 50],
|
"surface_azimuth": -10,
|
||||||
"pvforecast1_inverter_paco": 10000,
|
"surface_tilt": 7,
|
||||||
"pvforecast2_peakpower": 1.4,
|
"userhorizon": [20, 27, 22, 20],
|
||||||
"pvforecast2_surface_azimuth": -40,
|
"inverter_paco": 10000,
|
||||||
"pvforecast2_surface_tilt": 60,
|
},
|
||||||
"pvforecast2_userhorizon": [60, 30, 0, 30],
|
{
|
||||||
"pvforecast2_inverter_paco": 2000,
|
"peakpower": 4.8,
|
||||||
"pvforecast3_peakpower": 1.6,
|
"surface_azimuth": -90,
|
||||||
"pvforecast3_surface_azimuth": 5,
|
"surface_tilt": 7,
|
||||||
"pvforecast3_surface_tilt": 45,
|
"userhorizon": [30, 30, 30, 50],
|
||||||
"pvforecast3_userhorizon": [45, 25, 30, 60],
|
"inverter_paco": 10000,
|
||||||
"pvforecast3_inverter_paco": 1400,
|
},
|
||||||
|
{
|
||||||
|
"peakpower": 1.4,
|
||||||
|
"surface_azimuth": -40,
|
||||||
|
"surface_tilt": 60,
|
||||||
|
"userhorizon": [60, 30, 0, 30],
|
||||||
|
"inverter_paco": 2000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"peakpower": 1.6,
|
||||||
|
"surface_azimuth": 5,
|
||||||
|
"surface_tilt": 45,
|
||||||
|
"userhorizon": [45, 25, 30, 60],
|
||||||
|
"inverter_paco": 1400,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# Initialize the forecast object with the generated configuration
|
# Initialize the forecast object with the generated configuration
|
||||||
|
@@ -22,21 +22,22 @@ logger = get_logger(__name__)
|
|||||||
class PVForecastImportCommonSettings(SettingsBaseModel):
|
class PVForecastImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for pvforecast data import from file or JSON string."""
|
"""Common settings for pvforecast data import from file or JSON string."""
|
||||||
|
|
||||||
pvforecastimport_file_path: Optional[Union[str, Path]] = Field(
|
import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None, description="Path to the file to import PV forecast data from."
|
default=None,
|
||||||
|
description="Path to the file to import PV forecast data from.",
|
||||||
|
examples=[None, "/path/to/pvforecast.json"],
|
||||||
)
|
)
|
||||||
|
|
||||||
pvforecastimport_json: Optional[str] = Field(
|
import_json: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="JSON string, dictionary of PV forecast value lists.",
|
description="JSON string, dictionary of PV forecast value lists.",
|
||||||
|
examples=['{"pvforecast_ac_power": [0, 8.05, 352.91]}'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("pvforecastimport_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_pvforecastimport_file_path(
|
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
cls, value: Optional[Union[str, Path]]
|
|
||||||
) -> Optional[Path]:
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
@@ -62,7 +63,13 @@ class PVForecastImport(PVForecastProvider, PredictionImportProvider):
|
|||||||
return "PVForecastImport"
|
return "PVForecastImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.pvforecastimport_file_path is not None:
|
if self.config.pvforecast.provider_settings.import_file_path is not None:
|
||||||
self.import_from_file(self.config.pvforecastimport_file_path, key_prefix="pvforecast")
|
self.import_from_file(
|
||||||
if self.config.pvforecastimport_json is not None:
|
self.config.pvforecast.provider_settings.import_file_path,
|
||||||
self.import_from_json(self.config.pvforecastimport_json, key_prefix="pvforecast")
|
key_prefix="pvforecast",
|
||||||
|
)
|
||||||
|
if self.config.pvforecast.provider_settings.import_json is not None:
|
||||||
|
self.import_from_json(
|
||||||
|
self.config.pvforecast.provider_settings.import_json,
|
||||||
|
key_prefix="pvforecast",
|
||||||
|
)
|
||||||
|
@@ -5,9 +5,18 @@ from typing import Optional
|
|||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
from akkudoktoreos.config.configabc import SettingsBaseModel
|
from akkudoktoreos.config.configabc import SettingsBaseModel
|
||||||
|
from akkudoktoreos.prediction.weatherimport import WeatherImportCommonSettings
|
||||||
|
|
||||||
|
|
||||||
class WeatherCommonSettings(SettingsBaseModel):
|
class WeatherCommonSettings(SettingsBaseModel):
|
||||||
weather_provider: Optional[str] = Field(
|
"""Weather Forecast Configuration."""
|
||||||
default=None, description="Weather provider id of provider to be used."
|
|
||||||
|
provider: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Weather provider id of provider to be used.",
|
||||||
|
examples=["WeatherImport"],
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_settings: Optional[WeatherImportCommonSettings] = Field(
|
||||||
|
default=None, description="Provider settings", examples=[None]
|
||||||
)
|
)
|
||||||
|
@@ -101,18 +101,18 @@ class WeatherProvider(PredictionProvider):
|
|||||||
WeatherProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
WeatherProvider is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Configuration variables:
|
Configuration variables:
|
||||||
weather_provider (str): Prediction provider for weather.
|
provider (str): Prediction provider for weather.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# overload
|
# overload
|
||||||
@@ -126,7 +126,7 @@ class WeatherProvider(PredictionProvider):
|
|||||||
return "WeatherProvider"
|
return "WeatherProvider"
|
||||||
|
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
return self.provider_id() == self.config.weather_provider
|
return self.provider_id() == self.config.weather.provider
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def estimate_irradiance_from_cloud_cover(
|
def estimate_irradiance_from_cloud_cover(
|
||||||
|
@@ -62,13 +62,13 @@ class WeatherBrightSky(WeatherProvider):
|
|||||||
of hours into the future and retains historical data.
|
of hours into the future and retains historical data.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): Number of hours in the future for the forecast.
|
hours (int, optional): Number of hours in the future for the forecast.
|
||||||
prediction_historic_hours (int, optional): Number of past hours for retaining data.
|
historic_hours (int, optional): Number of past hours for retaining data.
|
||||||
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
latitude (float, optional): The latitude in degrees, validated to be between -90 and 90.
|
||||||
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
longitude (float, optional): The longitude in degrees, validated to be between -180 and 180.
|
||||||
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
start_datetime (datetime, optional): Start datetime for forecasts, defaults to the current datetime.
|
||||||
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `prediction_hours`.
|
end_datetime (datetime, computed): The forecast's end datetime, computed based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `prediction_historic_hours`.
|
keep_datetime (datetime, computed): The datetime to retain historical data, computed from `start_datetime` and `historic_hours`.
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
provider_id(): Returns a unique identifier for the provider.
|
provider_id(): Returns a unique identifier for the provider.
|
||||||
@@ -99,7 +99,7 @@ class WeatherBrightSky(WeatherProvider):
|
|||||||
date = to_datetime(self.start_datetime, as_string="YYYY-MM-DD")
|
date = to_datetime(self.start_datetime, as_string="YYYY-MM-DD")
|
||||||
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
last_date = to_datetime(self.end_datetime, as_string="YYYY-MM-DD")
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
f"{source}/weather?lat={self.config.latitude}&lon={self.config.longitude}&date={date}&last_date={last_date}&tz={self.config.timezone}"
|
f"{source}/weather?lat={self.config.general.latitude}&lon={self.config.general.longitude}&date={date}&last_date={last_date}&tz={self.config.general.timezone}"
|
||||||
)
|
)
|
||||||
response.raise_for_status() # Raise an error for bad responses
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
logger.debug(f"Response from {source}: {response}")
|
logger.debug(f"Response from {source}: {response}")
|
||||||
@@ -109,7 +109,7 @@ class WeatherBrightSky(WeatherProvider):
|
|||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg)
|
raise ValueError(error_msg)
|
||||||
# We are working on fresh data (no cache), report update time
|
# We are working on fresh data (no cache), report update time
|
||||||
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
||||||
return brightsky_data
|
return brightsky_data
|
||||||
|
|
||||||
def _description_to_series(self, description: str) -> pd.Series:
|
def _description_to_series(self, description: str) -> pd.Series:
|
||||||
@@ -200,7 +200,7 @@ class WeatherBrightSky(WeatherProvider):
|
|||||||
description = "Total Clouds (% Sky Obscured)"
|
description = "Total Clouds (% Sky Obscured)"
|
||||||
cloud_cover = self._description_to_series(description)
|
cloud_cover = self._description_to_series(description)
|
||||||
ghi, dni, dhi = self.estimate_irradiance_from_cloud_cover(
|
ghi, dni, dhi = self.estimate_irradiance_from_cloud_cover(
|
||||||
self.config.latitude, self.config.longitude, cloud_cover
|
self.config.general.latitude, self.config.general.longitude, cloud_cover
|
||||||
)
|
)
|
||||||
|
|
||||||
description = "Global Horizontal Irradiance (W/m2)"
|
description = "Global Horizontal Irradiance (W/m2)"
|
||||||
|
@@ -68,15 +68,15 @@ class WeatherClearOutside(WeatherProvider):
|
|||||||
WeatherClearOutside is a thread-safe singleton, ensuring only one instance of this class is created.
|
WeatherClearOutside is a thread-safe singleton, ensuring only one instance of this class is created.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
prediction_hours (int, optional): The number of hours into the future for which predictions are generated.
|
hours (int, optional): The number of hours into the future for which predictions are generated.
|
||||||
prediction_historic_hours (int, optional): The number of past hours for which historical data is retained.
|
historic_hours (int, optional): The number of past hours for which historical data is retained.
|
||||||
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
latitude (float, optional): The latitude in degrees, must be within -90 to 90.
|
||||||
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
longitude (float, optional): The longitude in degrees, must be within -180 to 180.
|
||||||
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
start_datetime (datetime, optional): The starting datetime for predictions, defaults to the current datetime if unspecified.
|
||||||
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
end_datetime (datetime, computed): The datetime representing the end of the prediction range,
|
||||||
calculated based on `start_datetime` and `prediction_hours`.
|
calculated based on `start_datetime` and `hours`.
|
||||||
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
keep_datetime (datetime, computed): The earliest datetime for retaining historical data, calculated
|
||||||
based on `start_datetime` and `prediction_historic_hours`.
|
based on `start_datetime` and `historic_hours`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -91,13 +91,13 @@ class WeatherClearOutside(WeatherProvider):
|
|||||||
response: Weather forecast request reponse from ClearOutside.
|
response: Weather forecast request reponse from ClearOutside.
|
||||||
"""
|
"""
|
||||||
source = "https://clearoutside.com/forecast"
|
source = "https://clearoutside.com/forecast"
|
||||||
latitude = round(self.config.latitude, 2)
|
latitude = round(self.config.general.latitude, 2)
|
||||||
longitude = round(self.config.longitude, 2)
|
longitude = round(self.config.general.longitude, 2)
|
||||||
response = requests.get(f"{source}/{latitude}/{longitude}?desktop=true")
|
response = requests.get(f"{source}/{latitude}/{longitude}?desktop=true")
|
||||||
response.raise_for_status() # Raise an error for bad responses
|
response.raise_for_status() # Raise an error for bad responses
|
||||||
logger.debug(f"Response from {source}: {response}")
|
logger.debug(f"Response from {source}: {response}")
|
||||||
# We are working on fresh data (no cache), report update time
|
# We are working on fresh data (no cache), report update time
|
||||||
self.update_datetime = to_datetime(in_timezone=self.config.timezone)
|
self.update_datetime = to_datetime(in_timezone=self.config.general.timezone)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = None) -> None:
|
def _update_data(self, force_update: Optional[bool] = None) -> None:
|
||||||
@@ -307,7 +307,7 @@ class WeatherClearOutside(WeatherProvider):
|
|||||||
data=clearout_data["Total Clouds (% Sky Obscured)"], index=clearout_data["DateTime"]
|
data=clearout_data["Total Clouds (% Sky Obscured)"], index=clearout_data["DateTime"]
|
||||||
)
|
)
|
||||||
ghi, dni, dhi = self.estimate_irradiance_from_cloud_cover(
|
ghi, dni, dhi = self.estimate_irradiance_from_cloud_cover(
|
||||||
self.config.latitude, self.config.longitude, cloud_cover
|
self.config.general.latitude, self.config.general.longitude, cloud_cover
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add GHI, DNI, DHI to clearout data
|
# Add GHI, DNI, DHI to clearout data
|
||||||
|
@@ -22,18 +22,22 @@ logger = get_logger(__name__)
|
|||||||
class WeatherImportCommonSettings(SettingsBaseModel):
|
class WeatherImportCommonSettings(SettingsBaseModel):
|
||||||
"""Common settings for weather data import from file or JSON string."""
|
"""Common settings for weather data import from file or JSON string."""
|
||||||
|
|
||||||
weatherimport_file_path: Optional[Union[str, Path]] = Field(
|
import_file_path: Optional[Union[str, Path]] = Field(
|
||||||
default=None, description="Path to the file to import weather data from."
|
default=None,
|
||||||
|
description="Path to the file to import weather data from.",
|
||||||
|
examples=[None, "/path/to/weather_data.json"],
|
||||||
)
|
)
|
||||||
|
|
||||||
weatherimport_json: Optional[str] = Field(
|
import_json: Optional[str] = Field(
|
||||||
default=None, description="JSON string, dictionary of weather forecast value lists."
|
default=None,
|
||||||
|
description="JSON string, dictionary of weather forecast value lists.",
|
||||||
|
examples=['{"weather_temp_air": [18.3, 17.8, 16.9]}'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validators
|
# Validators
|
||||||
@field_validator("weatherimport_file_path", mode="after")
|
@field_validator("import_file_path", mode="after")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_weatherimport_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
def validate_import_file_path(cls, value: Optional[Union[str, Path]]) -> Optional[Path]:
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
@@ -59,7 +63,11 @@ class WeatherImport(WeatherProvider, PredictionImportProvider):
|
|||||||
return "WeatherImport"
|
return "WeatherImport"
|
||||||
|
|
||||||
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
def _update_data(self, force_update: Optional[bool] = False) -> None:
|
||||||
if self.config.weatherimport_file_path is not None:
|
if self.config.weather.provider_settings.import_file_path:
|
||||||
self.import_from_file(self.config.weatherimport_file_path, key_prefix="weather")
|
self.import_from_file(
|
||||||
if self.config.weatherimport_json is not None:
|
self.config.weather.provider_settings.import_file_path, key_prefix="weather"
|
||||||
self.import_from_json(self.config.weatherimport_json, key_prefix="weather")
|
)
|
||||||
|
if self.config.weather.provider_settings.import_json:
|
||||||
|
self.import_from_json(
|
||||||
|
self.config.weather.provider_settings.import_json, key_prefix="weather"
|
||||||
|
)
|
||||||
|
@@ -29,7 +29,11 @@ from akkudoktoreos.optimization.genetic import (
|
|||||||
OptimizeResponse,
|
OptimizeResponse,
|
||||||
optimization_problem,
|
optimization_problem,
|
||||||
)
|
)
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
from akkudoktoreos.prediction.elecprice import ElecPriceCommonSettings
|
||||||
|
from akkudoktoreos.prediction.load import LoadCommonSettings
|
||||||
|
from akkudoktoreos.prediction.loadakkudoktor import LoadAkkudoktorCommonSettings
|
||||||
|
from akkudoktoreos.prediction.prediction import PredictionCommonSettings, get_prediction
|
||||||
|
from akkudoktoreos.prediction.pvforecast import PVForecastCommonSettings
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
from akkudoktoreos.utils.datetimeutil import to_datetime, to_duration
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@@ -149,16 +153,16 @@ def start_eosdash() -> subprocess.Popen:
|
|||||||
|
|
||||||
if args is None:
|
if args is None:
|
||||||
# No command line arguments
|
# No command line arguments
|
||||||
host = config_eos.server_eosdash_host
|
host = config_eos.server.eosdash_host
|
||||||
port = config_eos.server_eosdash_port
|
port = config_eos.server.eosdash_port
|
||||||
eos_host = config_eos.server_eos_host
|
eos_host = config_eos.server.host
|
||||||
eos_port = config_eos.server_eos_port
|
eos_port = config_eos.server.port
|
||||||
log_level = "info"
|
log_level = "info"
|
||||||
access_log = False
|
access_log = False
|
||||||
reload = False
|
reload = False
|
||||||
else:
|
else:
|
||||||
host = args.host
|
host = args.host
|
||||||
port = config_eos.server_eosdash_port if config_eos.server_eosdash_port else (args.port + 1)
|
port = config_eos.server.eosdash_port if config_eos.server.eosdash_port else (args.port + 1)
|
||||||
eos_host = args.host
|
eos_host = args.host
|
||||||
eos_port = args.port
|
eos_port = args.port
|
||||||
log_level = args.log_level
|
log_level = args.log_level
|
||||||
@@ -201,7 +205,7 @@ def start_eosdash() -> subprocess.Popen:
|
|||||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||||
"""Lifespan manager for the app."""
|
"""Lifespan manager for the app."""
|
||||||
# On startup
|
# On startup
|
||||||
if config_eos.server_eos_startup_eosdash:
|
if config_eos.server.startup_eosdash:
|
||||||
try:
|
try:
|
||||||
eosdash_process = start_eosdash()
|
eosdash_process = start_eosdash()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -223,12 +227,9 @@ app = FastAPI(
|
|||||||
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
|
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
|
||||||
},
|
},
|
||||||
lifespan=lifespan,
|
lifespan=lifespan,
|
||||||
|
root_path=str(Path(__file__).parent),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# That's the problem
|
|
||||||
opt_class = optimization_problem(verbose=bool(config_eos.server_eos_verbose))
|
|
||||||
|
|
||||||
server_dir = Path(__file__).parent.resolve()
|
server_dir = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
|
|
||||||
@@ -236,66 +237,24 @@ class PdfResponse(FileResponse):
|
|||||||
media_type = "application/pdf"
|
media_type = "application/pdf"
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/config/value")
|
@app.put("/v1/config/reset", tags=["config"])
|
||||||
def fastapi_config_value_put(
|
|
||||||
key: Annotated[str, Query(description="configuration key")],
|
|
||||||
value: Annotated[Any, Query(description="configuration value")],
|
|
||||||
) -> ConfigEOS:
|
|
||||||
"""Set the configuration option in the settings.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): configuration key
|
|
||||||
value (Any): configuration value
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
configuration (ConfigEOS): The current configuration after the write.
|
|
||||||
"""
|
|
||||||
if key not in config_eos.config_keys:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is not available.")
|
|
||||||
if key in config_eos.config_keys_read_only:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Key '{key}' is read only.")
|
|
||||||
try:
|
|
||||||
setattr(config_eos, key, value)
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=400, detail=f"Error on update of configuration: {e}")
|
|
||||||
return config_eos
|
|
||||||
|
|
||||||
|
|
||||||
@app.post("/v1/config/update")
|
|
||||||
def fastapi_config_update_post() -> ConfigEOS:
|
def fastapi_config_update_post() -> ConfigEOS:
|
||||||
"""Update the configuration from the EOS configuration file.
|
"""Reset the configuration to the EOS configuration file.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
configuration (ConfigEOS): The current configuration after update.
|
configuration (ConfigEOS): The current configuration after update.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_, config_file_path = config_eos.from_config_file()
|
config_eos.reset_settings()
|
||||||
except:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=404,
|
status_code=404,
|
||||||
detail=f"Cannot update configuration from file '{config_file_path}'.",
|
detail=f"Cannot update configuration from file '{config_eos.config_file_path}': {e}",
|
||||||
)
|
)
|
||||||
return config_eos
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/config/file")
|
@app.put("/v1/config/file", tags=["config"])
|
||||||
def fastapi_config_file_get() -> SettingsEOS:
|
|
||||||
"""Get the settings as defined by the EOS configuration file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
settings (SettingsEOS): The settings defined by the EOS configuration file.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
settings, config_file_path = config_eos.settings_from_config_file()
|
|
||||||
except:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=404,
|
|
||||||
detail=f"Cannot read configuration from file '{config_file_path}'.",
|
|
||||||
)
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/config/file")
|
|
||||||
def fastapi_config_file_put() -> ConfigEOS:
|
def fastapi_config_file_put() -> ConfigEOS:
|
||||||
"""Save the current configuration to the EOS configuration file.
|
"""Save the current configuration to the EOS configuration file.
|
||||||
|
|
||||||
@@ -312,7 +271,7 @@ def fastapi_config_file_put() -> ConfigEOS:
|
|||||||
return config_eos
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/config")
|
@app.get("/v1/config", tags=["config"])
|
||||||
def fastapi_config_get() -> ConfigEOS:
|
def fastapi_config_get() -> ConfigEOS:
|
||||||
"""Get the current configuration.
|
"""Get the current configuration.
|
||||||
|
|
||||||
@@ -322,15 +281,13 @@ def fastapi_config_get() -> ConfigEOS:
|
|||||||
return config_eos
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/config")
|
@app.put("/v1/config", tags=["config"])
|
||||||
def fastapi_config_put(
|
def fastapi_config_put(settings: SettingsEOS) -> ConfigEOS:
|
||||||
settings: Annotated[SettingsEOS, Query(description="settings")],
|
"""Update the current config with the provided settings.
|
||||||
) -> ConfigEOS:
|
|
||||||
"""Write the provided settings into the current settings.
|
|
||||||
|
|
||||||
The existing settings are completely overwritten. Note that for any setting
|
Note that for any setting value that is None or unset, the configuration will fall back to
|
||||||
value that is None, the configuration will fall back to values from other sources such as
|
values from other sources such as environment variables, the EOS configuration file, or default
|
||||||
environment variables, the EOS configuration file, or default values.
|
values.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
settings (SettingsEOS): The settings to write into the current settings.
|
settings (SettingsEOS): The settings to write into the current settings.
|
||||||
@@ -339,19 +296,19 @@ def fastapi_config_put(
|
|||||||
configuration (ConfigEOS): The current configuration after the write.
|
configuration (ConfigEOS): The current configuration after the write.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
config_eos.merge_settings(settings, force=True)
|
config_eos.merge_settings(settings)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=400, detail=f"Error on update of configuration: {e}")
|
raise HTTPException(status_code=400, detail=f"Error on update of configuration: {e}")
|
||||||
return config_eos
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/measurement/keys")
|
@app.get("/v1/measurement/keys", tags=["measurement"])
|
||||||
def fastapi_measurement_keys_get() -> list[str]:
|
def fastapi_measurement_keys_get() -> list[str]:
|
||||||
"""Get a list of available measurement keys."""
|
"""Get a list of available measurement keys."""
|
||||||
return sorted(measurement_eos.record_keys)
|
return sorted(measurement_eos.record_keys)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/measurement/load-mr/series/by-name")
|
@app.get("/v1/measurement/load-mr/series/by-name", tags=["measurement"])
|
||||||
def fastapi_measurement_load_mr_series_by_name_get(
|
def fastapi_measurement_load_mr_series_by_name_get(
|
||||||
name: Annotated[str, Query(description="Load name.")],
|
name: Annotated[str, Query(description="Load name.")],
|
||||||
) -> PydanticDateTimeSeries:
|
) -> PydanticDateTimeSeries:
|
||||||
@@ -367,7 +324,7 @@ def fastapi_measurement_load_mr_series_by_name_get(
|
|||||||
return PydanticDateTimeSeries.from_series(pdseries)
|
return PydanticDateTimeSeries.from_series(pdseries)
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/measurement/load-mr/value/by-name")
|
@app.put("/v1/measurement/load-mr/value/by-name", tags=["measurement"])
|
||||||
def fastapi_measurement_load_mr_value_by_name_put(
|
def fastapi_measurement_load_mr_value_by_name_put(
|
||||||
datetime: Annotated[str, Query(description="Datetime.")],
|
datetime: Annotated[str, Query(description="Datetime.")],
|
||||||
name: Annotated[str, Query(description="Load name.")],
|
name: Annotated[str, Query(description="Load name.")],
|
||||||
@@ -386,7 +343,7 @@ def fastapi_measurement_load_mr_value_by_name_put(
|
|||||||
return PydanticDateTimeSeries.from_series(pdseries)
|
return PydanticDateTimeSeries.from_series(pdseries)
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/measurement/load-mr/series/by-name")
|
@app.put("/v1/measurement/load-mr/series/by-name", tags=["measurement"])
|
||||||
def fastapi_measurement_load_mr_series_by_name_put(
|
def fastapi_measurement_load_mr_series_by_name_put(
|
||||||
name: Annotated[str, Query(description="Load name.")], series: PydanticDateTimeSeries
|
name: Annotated[str, Query(description="Load name.")], series: PydanticDateTimeSeries
|
||||||
) -> PydanticDateTimeSeries:
|
) -> PydanticDateTimeSeries:
|
||||||
@@ -404,7 +361,7 @@ def fastapi_measurement_load_mr_series_by_name_put(
|
|||||||
return PydanticDateTimeSeries.from_series(pdseries)
|
return PydanticDateTimeSeries.from_series(pdseries)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/measurement/series")
|
@app.get("/v1/measurement/series", tags=["measurement"])
|
||||||
def fastapi_measurement_series_get(
|
def fastapi_measurement_series_get(
|
||||||
key: Annotated[str, Query(description="Prediction key.")],
|
key: Annotated[str, Query(description="Prediction key.")],
|
||||||
) -> PydanticDateTimeSeries:
|
) -> PydanticDateTimeSeries:
|
||||||
@@ -415,7 +372,7 @@ def fastapi_measurement_series_get(
|
|||||||
return PydanticDateTimeSeries.from_series(pdseries)
|
return PydanticDateTimeSeries.from_series(pdseries)
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/measurement/value")
|
@app.put("/v1/measurement/value", tags=["measurement"])
|
||||||
def fastapi_measurement_value_put(
|
def fastapi_measurement_value_put(
|
||||||
datetime: Annotated[str, Query(description="Datetime.")],
|
datetime: Annotated[str, Query(description="Datetime.")],
|
||||||
key: Annotated[str, Query(description="Prediction key.")],
|
key: Annotated[str, Query(description="Prediction key.")],
|
||||||
@@ -429,7 +386,7 @@ def fastapi_measurement_value_put(
|
|||||||
return PydanticDateTimeSeries.from_series(pdseries)
|
return PydanticDateTimeSeries.from_series(pdseries)
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/measurement/series")
|
@app.put("/v1/measurement/series", tags=["measurement"])
|
||||||
def fastapi_measurement_series_put(
|
def fastapi_measurement_series_put(
|
||||||
key: Annotated[str, Query(description="Prediction key.")], series: PydanticDateTimeSeries
|
key: Annotated[str, Query(description="Prediction key.")], series: PydanticDateTimeSeries
|
||||||
) -> PydanticDateTimeSeries:
|
) -> PydanticDateTimeSeries:
|
||||||
@@ -442,27 +399,47 @@ def fastapi_measurement_series_put(
|
|||||||
return PydanticDateTimeSeries.from_series(pdseries)
|
return PydanticDateTimeSeries.from_series(pdseries)
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/measurement/dataframe")
|
@app.put("/v1/measurement/dataframe", tags=["measurement"])
|
||||||
def fastapi_measurement_dataframe_put(data: PydanticDateTimeDataFrame) -> None:
|
def fastapi_measurement_dataframe_put(data: PydanticDateTimeDataFrame) -> None:
|
||||||
"""Merge the measurement data given as dataframe into EOS measurements."""
|
"""Merge the measurement data given as dataframe into EOS measurements."""
|
||||||
dataframe = data.to_dataframe()
|
dataframe = data.to_dataframe()
|
||||||
measurement_eos.import_from_dataframe(dataframe)
|
measurement_eos.import_from_dataframe(dataframe)
|
||||||
|
|
||||||
|
|
||||||
@app.put("/v1/measurement/data")
|
@app.put("/v1/measurement/data", tags=["measurement"])
|
||||||
def fastapi_measurement_data_put(data: PydanticDateTimeData) -> None:
|
def fastapi_measurement_data_put(data: PydanticDateTimeData) -> None:
|
||||||
"""Merge the measurement data given as datetime data into EOS measurements."""
|
"""Merge the measurement data given as datetime data into EOS measurements."""
|
||||||
datetimedata = data.to_dict()
|
datetimedata = data.to_dict()
|
||||||
measurement_eos.import_from_dict(datetimedata)
|
measurement_eos.import_from_dict(datetimedata)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/prediction/keys")
|
@app.get("/v1/prediction/providers", tags=["prediction"])
|
||||||
|
def fastapi_prediction_providers_get(enabled: Optional[bool] = None) -> list[str]:
|
||||||
|
"""Get a list of available prediction providers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
enabled (bool): Return enabled/disabled providers. If unset, return all providers.
|
||||||
|
"""
|
||||||
|
if enabled is not None:
|
||||||
|
enabled_status = [enabled]
|
||||||
|
else:
|
||||||
|
enabled_status = [True, False]
|
||||||
|
return sorted(
|
||||||
|
[
|
||||||
|
provider.provider_id()
|
||||||
|
for provider in prediction_eos.providers
|
||||||
|
if provider.enabled() in enabled_status
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/v1/prediction/keys", tags=["prediction"])
|
||||||
def fastapi_prediction_keys_get() -> list[str]:
|
def fastapi_prediction_keys_get() -> list[str]:
|
||||||
"""Get a list of available prediction keys."""
|
"""Get a list of available prediction keys."""
|
||||||
return sorted(prediction_eos.record_keys)
|
return sorted(prediction_eos.record_keys)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/prediction/series")
|
@app.get("/v1/prediction/series", tags=["prediction"])
|
||||||
def fastapi_prediction_series_get(
|
def fastapi_prediction_series_get(
|
||||||
key: Annotated[str, Query(description="Prediction key.")],
|
key: Annotated[str, Query(description="Prediction key.")],
|
||||||
start_datetime: Annotated[
|
start_datetime: Annotated[
|
||||||
@@ -499,7 +476,7 @@ def fastapi_prediction_series_get(
|
|||||||
return PydanticDateTimeSeries.from_series(pdseries)
|
return PydanticDateTimeSeries.from_series(pdseries)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/v1/prediction/list")
|
@app.get("/v1/prediction/list", tags=["prediction"])
|
||||||
def fastapi_prediction_list_get(
|
def fastapi_prediction_list_get(
|
||||||
key: Annotated[str, Query(description="Prediction key.")],
|
key: Annotated[str, Query(description="Prediction key.")],
|
||||||
start_datetime: Annotated[
|
start_datetime: Annotated[
|
||||||
@@ -549,7 +526,7 @@ def fastapi_prediction_list_get(
|
|||||||
return prediction_list
|
return prediction_list
|
||||||
|
|
||||||
|
|
||||||
@app.post("/v1/prediction/update")
|
@app.post("/v1/prediction/update", tags=["prediction"])
|
||||||
def fastapi_prediction_update(force_update: bool = False, force_enable: bool = False) -> Response:
|
def fastapi_prediction_update(force_update: bool = False, force_enable: bool = False) -> Response:
|
||||||
"""Update predictions for all providers.
|
"""Update predictions for all providers.
|
||||||
|
|
||||||
@@ -562,11 +539,12 @@ def fastapi_prediction_update(force_update: bool = False, force_enable: bool = F
|
|||||||
try:
|
try:
|
||||||
prediction_eos.update_data(force_update=force_update, force_enable=force_enable)
|
prediction_eos.update_data(force_update=force_update, force_enable=force_enable)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=400, detail=f"Error on update of provider: {e}")
|
raise e
|
||||||
|
# raise HTTPException(status_code=400, detail=f"Error on update of provider: {e}")
|
||||||
return Response()
|
return Response()
|
||||||
|
|
||||||
|
|
||||||
@app.post("/v1/prediction/update/{provider_id}")
|
@app.post("/v1/prediction/update/{provider_id}", tags=["prediction"])
|
||||||
def fastapi_prediction_update_provider(
|
def fastapi_prediction_update_provider(
|
||||||
provider_id: str, force_update: Optional[bool] = False, force_enable: Optional[bool] = False
|
provider_id: str, force_update: Optional[bool] = False, force_enable: Optional[bool] = False
|
||||||
) -> Response:
|
) -> Response:
|
||||||
@@ -590,7 +568,7 @@ def fastapi_prediction_update_provider(
|
|||||||
return Response()
|
return Response()
|
||||||
|
|
||||||
|
|
||||||
@app.get("/strompreis")
|
@app.get("/strompreis", tags=["prediction"])
|
||||||
def fastapi_strompreis() -> list[float]:
|
def fastapi_strompreis() -> list[float]:
|
||||||
"""Deprecated: Electricity Market Price Prediction per Wh (€/Wh).
|
"""Deprecated: Electricity Market Price Prediction per Wh (€/Wh).
|
||||||
|
|
||||||
@@ -602,14 +580,16 @@ def fastapi_strompreis() -> list[float]:
|
|||||||
Electricity price charges are added.
|
Electricity price charges are added.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set ElecPriceAkkudoktor as elecprice_provider, then update data with
|
Set ElecPriceAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
'/v1/prediction/list?key=elecprice_marketprice_wh' or
|
||||||
'/v1/prediction/list?key=elecprice_marketprice_kwh' instead.
|
'/v1/prediction/list?key=elecprice_marketprice_kwh' instead.
|
||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
elecprice_provider="ElecPriceAkkudoktor",
|
elecprice=ElecPriceCommonSettings(
|
||||||
|
provider="ElecPriceAkkudoktor",
|
||||||
|
)
|
||||||
)
|
)
|
||||||
config_eos.merge_settings(settings=settings)
|
config_eos.merge_settings(settings=settings)
|
||||||
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
||||||
@@ -642,7 +622,7 @@ class GesamtlastRequest(PydanticBaseModel):
|
|||||||
hours: int
|
hours: int
|
||||||
|
|
||||||
|
|
||||||
@app.post("/gesamtlast")
|
@app.post("/gesamtlast", tags=["prediction"])
|
||||||
def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
||||||
"""Deprecated: Total Load Prediction with adjustment.
|
"""Deprecated: Total Load Prediction with adjustment.
|
||||||
|
|
||||||
@@ -659,16 +639,22 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
|||||||
'/v1/measurement/value'
|
'/v1/measurement/value'
|
||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
prediction_hours=request.hours,
|
prediction=PredictionCommonSettings(
|
||||||
load_provider="LoadAkkudoktor",
|
hours=request.hours,
|
||||||
loadakkudoktor_year_energy=request.year_energy,
|
),
|
||||||
|
load=LoadCommonSettings(
|
||||||
|
provider="LoadAkkudoktor",
|
||||||
|
provider_settings=LoadAkkudoktorCommonSettings(
|
||||||
|
loadakkudoktor_year_energy=request.year_energy,
|
||||||
|
),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
config_eos.merge_settings(settings=settings)
|
config_eos.merge_settings(settings=settings)
|
||||||
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
||||||
|
|
||||||
# Insert measured data into EOS measurement
|
# Insert measured data into EOS measurement
|
||||||
# Convert from energy per interval to dummy energy meter readings
|
# Convert from energy per interval to dummy energy meter readings
|
||||||
measurement_key = "measurement_load0_mr"
|
measurement_key = "load0_mr"
|
||||||
measurement_eos.key_delete_by_datetime(key=measurement_key) # delete all load0_mr measurements
|
measurement_eos.key_delete_by_datetime(key=measurement_key) # delete all load0_mr measurements
|
||||||
energy = {}
|
energy = {}
|
||||||
try:
|
try:
|
||||||
@@ -717,7 +703,7 @@ def fastapi_gesamtlast(request: GesamtlastRequest) -> list[float]:
|
|||||||
return prediction_list
|
return prediction_list
|
||||||
|
|
||||||
|
|
||||||
@app.get("/gesamtlast_simple")
|
@app.get("/gesamtlast_simple", tags=["prediction"])
|
||||||
def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
||||||
"""Deprecated: Total Load Prediction.
|
"""Deprecated: Total Load Prediction.
|
||||||
|
|
||||||
@@ -731,14 +717,18 @@ def fastapi_gesamtlast_simple(year_energy: float) -> list[float]:
|
|||||||
year_energy (float): Yearly energy consumption in Wh.
|
year_energy (float): Yearly energy consumption in Wh.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set LoadAkkudoktor as load_provider, then update data with
|
Set LoadAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=load_mean' instead.
|
'/v1/prediction/list?key=load_mean' instead.
|
||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
load_provider="LoadAkkudoktor",
|
load=LoadCommonSettings(
|
||||||
loadakkudoktor_year_energy=year_energy / 1000, # Convert to kWh
|
provider="LoadAkkudoktor",
|
||||||
|
provider_settings=LoadAkkudoktorCommonSettings(
|
||||||
|
loadakkudoktor_year_energy=year_energy / 1000, # Convert to kWh
|
||||||
|
),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
config_eos.merge_settings(settings=settings)
|
config_eos.merge_settings(settings=settings)
|
||||||
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
||||||
@@ -769,7 +759,7 @@ class ForecastResponse(PydanticBaseModel):
|
|||||||
pvpower: list[float]
|
pvpower: list[float]
|
||||||
|
|
||||||
|
|
||||||
@app.get("/pvforecast")
|
@app.get("/pvforecast", tags=["prediction"])
|
||||||
def fastapi_pvforecast() -> ForecastResponse:
|
def fastapi_pvforecast() -> ForecastResponse:
|
||||||
"""Deprecated: PV Forecast Prediction.
|
"""Deprecated: PV Forecast Prediction.
|
||||||
|
|
||||||
@@ -780,21 +770,25 @@ def fastapi_pvforecast() -> ForecastResponse:
|
|||||||
filled with the first available forecast value.
|
filled with the first available forecast value.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
Set PVForecastAkkudoktor as pvforecast_provider, then update data with
|
Set PVForecastAkkudoktor as provider, then update data with
|
||||||
'/v1/prediction/update'
|
'/v1/prediction/update'
|
||||||
and then request data with
|
and then request data with
|
||||||
'/v1/prediction/list?key=pvforecast_ac_power' and
|
'/v1/prediction/list?key=pvforecast_ac_power' and
|
||||||
'/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.
|
'/v1/prediction/list?key=pvforecastakkudoktor_temp_air' instead.
|
||||||
"""
|
"""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(pvforecast=PVForecastCommonSettings(provider="PVForecastAkkudoktor"))
|
||||||
elecprice_provider="PVForecastAkkudoktor",
|
|
||||||
)
|
|
||||||
config_eos.merge_settings(settings=settings)
|
config_eos.merge_settings(settings=settings)
|
||||||
|
|
||||||
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
ems_eos.set_start_datetime() # Set energy management start datetime to current hour.
|
||||||
|
|
||||||
# Create PV forecast
|
# Create PV forecast
|
||||||
prediction_eos.update_data(force_update=True)
|
try:
|
||||||
|
prediction_eos.update_data(force_update=True)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404,
|
||||||
|
detail=f"Can not get the PV forecast: {e}",
|
||||||
|
)
|
||||||
|
|
||||||
# Get the forcast starting at start of day
|
# Get the forcast starting at start of day
|
||||||
start_datetime = to_datetime().start_of("day")
|
start_datetime = to_datetime().start_of("day")
|
||||||
@@ -820,30 +814,35 @@ def fastapi_pvforecast() -> ForecastResponse:
|
|||||||
return ForecastResponse(temperature=temp_air, pvpower=ac_power)
|
return ForecastResponse(temperature=temp_air, pvpower=ac_power)
|
||||||
|
|
||||||
|
|
||||||
@app.post("/optimize")
|
@app.post("/optimize", tags=["optimize"])
|
||||||
def fastapi_optimize(
|
def fastapi_optimize(
|
||||||
parameters: OptimizationParameters,
|
parameters: OptimizationParameters,
|
||||||
start_hour: Annotated[
|
start_hour: Annotated[
|
||||||
Optional[int], Query(description="Defaults to current hour of the day.")
|
Optional[int], Query(description="Defaults to current hour of the day.")
|
||||||
] = None,
|
] = None,
|
||||||
|
ngen: Optional[int] = None,
|
||||||
) -> OptimizeResponse:
|
) -> OptimizeResponse:
|
||||||
if start_hour is None:
|
if start_hour is None:
|
||||||
start_hour = to_datetime().hour
|
start_hour = to_datetime().hour
|
||||||
|
extra_args: dict[str, Any] = dict()
|
||||||
|
if ngen is not None:
|
||||||
|
extra_args["ngen"] = ngen
|
||||||
|
|
||||||
# TODO: Remove when config and prediction update is done by EMS.
|
# TODO: Remove when config and prediction update is done by EMS.
|
||||||
config_eos.update()
|
config_eos.update()
|
||||||
prediction_eos.update_data()
|
prediction_eos.update_data()
|
||||||
|
|
||||||
# Perform optimization simulation
|
# Perform optimization simulation
|
||||||
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour)
|
opt_class = optimization_problem(verbose=bool(config_eos.server.verbose))
|
||||||
|
result = opt_class.optimierung_ems(parameters=parameters, start_hour=start_hour, **extra_args)
|
||||||
# print(result)
|
# print(result)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@app.get("/visualization_results.pdf", response_class=PdfResponse)
|
@app.get("/visualization_results.pdf", response_class=PdfResponse, tags=["optimize"])
|
||||||
def get_pdf() -> PdfResponse:
|
def get_pdf() -> PdfResponse:
|
||||||
# Endpoint to serve the generated PDF with visualization results
|
# Endpoint to serve the generated PDF with visualization results
|
||||||
output_path = config_eos.data_output_path
|
output_path = config_eos.general.data_output_path
|
||||||
if output_path is None or not output_path.is_dir():
|
if output_path is None or not output_path.is_dir():
|
||||||
raise HTTPException(status_code=404, detail=f"Output path does not exist: {output_path}.")
|
raise HTTPException(status_code=404, detail=f"Output path does not exist: {output_path}.")
|
||||||
file_path = output_path / "visualization_results.pdf"
|
file_path = output_path / "visualization_results.pdf"
|
||||||
@@ -859,35 +858,34 @@ def site_map() -> RedirectResponse:
|
|||||||
|
|
||||||
# Keep the proxy last to handle all requests that are not taken by the Rest API.
|
# Keep the proxy last to handle all requests that are not taken by the Rest API.
|
||||||
|
|
||||||
|
if config_eos.server.startup_eosdash:
|
||||||
|
|
||||||
@app.delete("/{path:path}", include_in_schema=False)
|
@app.delete("/{path:path}", include_in_schema=False)
|
||||||
async def proxy_delete(request: Request, path: str) -> Response:
|
async def proxy_delete(request: Request, path: str) -> Response:
|
||||||
return await proxy(request, path)
|
return await proxy(request, path)
|
||||||
|
|
||||||
|
@app.get("/{path:path}", include_in_schema=False)
|
||||||
|
async def proxy_get(request: Request, path: str) -> Response:
|
||||||
|
return await proxy(request, path)
|
||||||
|
|
||||||
@app.get("/{path:path}", include_in_schema=False)
|
@app.post("/{path:path}", include_in_schema=False)
|
||||||
async def proxy_get(request: Request, path: str) -> Response:
|
async def proxy_post(request: Request, path: str) -> Response:
|
||||||
return await proxy(request, path)
|
return await proxy(request, path)
|
||||||
|
|
||||||
|
@app.put("/{path:path}", include_in_schema=False)
|
||||||
|
async def proxy_put(request: Request, path: str) -> Response:
|
||||||
|
return await proxy(request, path)
|
||||||
|
else:
|
||||||
|
|
||||||
@app.post("/{path:path}", include_in_schema=False)
|
@app.get("/", include_in_schema=False)
|
||||||
async def proxy_post(request: Request, path: str) -> Response:
|
def root() -> RedirectResponse:
|
||||||
return await proxy(request, path)
|
return RedirectResponse(url="/docs")
|
||||||
|
|
||||||
|
|
||||||
@app.put("/{path:path}", include_in_schema=False)
|
|
||||||
async def proxy_put(request: Request, path: str) -> Response:
|
|
||||||
return await proxy(request, path)
|
|
||||||
|
|
||||||
|
|
||||||
async def proxy(request: Request, path: str) -> Union[Response | RedirectResponse | HTMLResponse]:
|
async def proxy(request: Request, path: str) -> Union[Response | RedirectResponse | HTMLResponse]:
|
||||||
# Make hostname Windows friendly
|
if config_eos.server.eosdash_host and config_eos.server.eosdash_port:
|
||||||
host = str(config_eos.server_eosdash_host)
|
|
||||||
if host == "0.0.0.0" and os.name == "nt":
|
|
||||||
host = "localhost"
|
|
||||||
if host and config_eos.server_eosdash_port:
|
|
||||||
# Proxy to EOSdash server
|
# Proxy to EOSdash server
|
||||||
url = f"http://{host}:{config_eos.server_eosdash_port}/{path}"
|
url = f"http://{config_eos.server.eosdash_host}:{config_eos.server.eosdash_port}/{path}"
|
||||||
headers = dict(request.headers)
|
headers = dict(request.headers)
|
||||||
|
|
||||||
data = await request.body()
|
data = await request.body()
|
||||||
@@ -909,9 +907,9 @@ async def proxy(request: Request, path: str) -> Union[Response | RedirectRespons
|
|||||||
error_message=f"""<pre>
|
error_message=f"""<pre>
|
||||||
EOSdash server not reachable: '{url}'
|
EOSdash server not reachable: '{url}'
|
||||||
Did you start the EOSdash server
|
Did you start the EOSdash server
|
||||||
or set 'server_eos_startup_eosdash'?
|
or set 'startup_eosdash'?
|
||||||
If there is no application server intended please
|
If there is no application server intended please
|
||||||
set 'server_eosdash_host' or 'server_eosdash_port' to None.
|
set 'eosdash_host' or 'eosdash_port' to None.
|
||||||
</pre>
|
</pre>
|
||||||
""",
|
""",
|
||||||
error_details=f"{e}",
|
error_details=f"{e}",
|
||||||
@@ -975,8 +973,8 @@ def main() -> None:
|
|||||||
it starts the EOS server with the specified configurations.
|
it starts the EOS server with the specified configurations.
|
||||||
|
|
||||||
Command-line Arguments:
|
Command-line Arguments:
|
||||||
--host (str): Host for the EOS server (default: value from config_eos).
|
--host (str): Host for the EOS server (default: value from config).
|
||||||
--port (int): Port for the EOS server (default: value from config_eos).
|
--port (int): Port for the EOS server (default: value from config).
|
||||||
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
||||||
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
||||||
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
||||||
@@ -987,14 +985,14 @@ def main() -> None:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--host",
|
"--host",
|
||||||
type=str,
|
type=str,
|
||||||
default=str(config_eos.server_eos_host),
|
default=str(config_eos.server.host),
|
||||||
help="Host for the EOS server (default: value from config_eos)",
|
help="Host for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--port",
|
"--port",
|
||||||
type=int,
|
type=int,
|
||||||
default=config_eos.server_eos_port,
|
default=config_eos.server.port,
|
||||||
help="Port for the EOS server (default: value from config_eos)",
|
help="Port for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Optional arguments for log_level, access_log, and reload
|
# Optional arguments for log_level, access_log, and reload
|
||||||
@@ -1022,7 +1020,7 @@ def main() -> None:
|
|||||||
try:
|
try:
|
||||||
run_eos(args.host, args.port, args.log_level, args.access_log, args.reload)
|
run_eos(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||||
except:
|
except:
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@@ -1,11 +1,17 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from functools import reduce
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from fasthtml.common import H1, FastHTML, Table, Td, Th, Thead, Titled, Tr
|
from fasthtml.common import H1, Table, Td, Th, Thead, Titled, Tr, fast_app
|
||||||
|
from pydantic.fields import ComputedFieldInfo, FieldInfo
|
||||||
|
from pydantic_core import PydanticUndefined
|
||||||
|
|
||||||
from akkudoktoreos.config.config import get_config
|
from akkudoktoreos.config.config import get_config
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
@@ -14,18 +20,84 @@ config_eos = get_config()
|
|||||||
# Command line arguments
|
# Command line arguments
|
||||||
args = None
|
args = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_value(field_info: Union[FieldInfo, ComputedFieldInfo], regular_field: bool) -> Any:
|
||||||
|
default_value = ""
|
||||||
|
if regular_field:
|
||||||
|
if (val := field_info.default) is not PydanticUndefined:
|
||||||
|
default_value = val
|
||||||
|
else:
|
||||||
|
default_value = "N/A"
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_nested_types(field_type: Any, parent_types: list[str]) -> list[tuple[Any, list[str]]]:
|
||||||
|
resolved_types: list[tuple[Any, list[str]]] = []
|
||||||
|
|
||||||
|
origin = getattr(field_type, "__origin__", field_type)
|
||||||
|
if origin is Union:
|
||||||
|
for arg in getattr(field_type, "__args__", []):
|
||||||
|
if arg is not type(None):
|
||||||
|
resolved_types.extend(resolve_nested_types(arg, parent_types))
|
||||||
|
else:
|
||||||
|
resolved_types.append((field_type, parent_types))
|
||||||
|
|
||||||
|
return resolved_types
|
||||||
|
|
||||||
|
|
||||||
configs = []
|
configs = []
|
||||||
for field_name in config_eos.model_fields:
|
inner_types: set[type[PydanticBaseModel]] = set()
|
||||||
config = {}
|
for field_name, field_info in list(config_eos.model_fields.items()) + list(
|
||||||
config["name"] = field_name
|
config_eos.model_computed_fields.items()
|
||||||
config["value"] = getattr(config_eos, field_name)
|
):
|
||||||
config["default"] = config_eos.model_fields[field_name].default
|
|
||||||
config["description"] = config_eos.model_fields[field_name].description
|
def extract_nested_models(
|
||||||
configs.append(config)
|
subfield_info: Union[ComputedFieldInfo, FieldInfo], parent_types: list[str]
|
||||||
|
) -> None:
|
||||||
|
regular_field = isinstance(subfield_info, FieldInfo)
|
||||||
|
subtype = subfield_info.annotation if regular_field else subfield_info.return_type
|
||||||
|
|
||||||
|
if subtype in inner_types:
|
||||||
|
return
|
||||||
|
|
||||||
|
nested_types = resolve_nested_types(subtype, [])
|
||||||
|
found_basic = False
|
||||||
|
for nested_type, nested_parent_types in nested_types:
|
||||||
|
if not isinstance(nested_type, type) or not issubclass(nested_type, PydanticBaseModel):
|
||||||
|
if found_basic:
|
||||||
|
continue
|
||||||
|
|
||||||
|
config = {}
|
||||||
|
config["name"] = ".".join(parent_types)
|
||||||
|
try:
|
||||||
|
config["value"] = reduce(getattr, [config_eos] + parent_types)
|
||||||
|
except AttributeError:
|
||||||
|
# Parent value(s) are not set in current config
|
||||||
|
config["value"] = ""
|
||||||
|
config["default"] = get_default_value(subfield_info, regular_field)
|
||||||
|
config["description"] = (
|
||||||
|
subfield_info.description if subfield_info.description else ""
|
||||||
|
)
|
||||||
|
configs.append(config)
|
||||||
|
found_basic = True
|
||||||
|
else:
|
||||||
|
new_parent_types = parent_types + nested_parent_types
|
||||||
|
inner_types.add(nested_type)
|
||||||
|
for nested_field_name, nested_field_info in list(
|
||||||
|
nested_type.model_fields.items()
|
||||||
|
) + list(nested_type.model_computed_fields.items()):
|
||||||
|
extract_nested_models(
|
||||||
|
nested_field_info,
|
||||||
|
new_parent_types + [nested_field_name],
|
||||||
|
)
|
||||||
|
|
||||||
|
extract_nested_models(field_info, [field_name])
|
||||||
|
configs = sorted(configs, key=lambda x: x["name"])
|
||||||
|
|
||||||
|
|
||||||
app = FastHTML(secret_key=os.getenv("EOS_SERVER__EOSDASH_SESSKEY"))
|
app, rt = fast_app(
|
||||||
rt = app.route
|
secret_key=os.getenv("EOS_SERVER__EOSDASH_SESSKEY"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def config_table() -> Table:
|
def config_table() -> Table:
|
||||||
@@ -96,10 +168,10 @@ def main() -> None:
|
|||||||
it starts the EOSdash server with the specified configurations.
|
it starts the EOSdash server with the specified configurations.
|
||||||
|
|
||||||
Command-line Arguments:
|
Command-line Arguments:
|
||||||
--host (str): Host for the EOSdash server (default: value from config_eos).
|
--host (str): Host for the EOSdash server (default: value from config).
|
||||||
--port (int): Port for the EOSdash server (default: value from config_eos).
|
--port (int): Port for the EOSdash server (default: value from config).
|
||||||
--eos-host (str): Host for the EOS server (default: value from config_eos).
|
--eos-host (str): Host for the EOS server (default: value from config).
|
||||||
--eos-port (int): Port for the EOS server (default: value from config_eos).
|
--eos-port (int): Port for the EOS server (default: value from config).
|
||||||
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
--log_level (str): Log level for the server. Options: "critical", "error", "warning", "info", "debug", "trace" (default: "info").
|
||||||
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
--access_log (bool): Enable or disable access log. Options: True or False (default: False).
|
||||||
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
--reload (bool): Enable or disable auto-reload. Useful for development. Options: True or False (default: False).
|
||||||
@@ -110,28 +182,28 @@ def main() -> None:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--host",
|
"--host",
|
||||||
type=str,
|
type=str,
|
||||||
default=str(config_eos.server_eosdash_host),
|
default=str(config_eos.server.eosdash_host),
|
||||||
help="Host for the EOSdash server (default: value from config_eos)",
|
help="Host for the EOSdash server (default: value from config)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--port",
|
"--port",
|
||||||
type=int,
|
type=int,
|
||||||
default=config_eos.server_eosdash_port,
|
default=config_eos.server.eosdash_port,
|
||||||
help="Port for the EOSdash server (default: value from config_eos)",
|
help="Port for the EOSdash server (default: value from config)",
|
||||||
)
|
)
|
||||||
|
|
||||||
# EOS Host and port arguments with defaults from config_eos
|
# EOS Host and port arguments with defaults from config_eos
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--eos-host",
|
"--eos-host",
|
||||||
type=str,
|
type=str,
|
||||||
default=str(config_eos.server_eos_host),
|
default=str(config_eos.server.host),
|
||||||
help="Host for the EOS server (default: value from config_eos)",
|
help="Host for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--eos-port",
|
"--eos-port",
|
||||||
type=int,
|
type=int,
|
||||||
default=config_eos.server_eos_port,
|
default=config_eos.server.port,
|
||||||
help="Port for the EOS server (default: value from config_eos)",
|
help="Port for the EOS server (default: value from config)",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Optional arguments for log_level, access_log, and reload
|
# Optional arguments for log_level, access_log, and reload
|
||||||
@@ -159,7 +231,7 @@ def main() -> None:
|
|||||||
try:
|
try:
|
||||||
run_eosdash(args.host, args.port, args.log_level, args.access_log, args.reload)
|
run_eosdash(args.host, args.port, args.log_level, args.access_log, args.reload)
|
||||||
except:
|
except:
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
"""Server Module."""
|
"""Server Module."""
|
||||||
|
|
||||||
import os
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, IPvAnyAddress, field_validator
|
from pydantic import Field, IPvAnyAddress, field_validator
|
||||||
@@ -11,35 +10,25 @@ from akkudoktoreos.core.logging import get_logger
|
|||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_default_host() -> str:
|
|
||||||
if os.name == "nt":
|
|
||||||
return "127.0.0.1"
|
|
||||||
return "0.0.0.0"
|
|
||||||
|
|
||||||
|
|
||||||
class ServerCommonSettings(SettingsBaseModel):
|
class ServerCommonSettings(SettingsBaseModel):
|
||||||
"""Common server settings.
|
"""Server Configuration.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
To be added
|
To be added
|
||||||
"""
|
"""
|
||||||
|
|
||||||
server_eos_host: Optional[IPvAnyAddress] = Field(
|
host: Optional[IPvAnyAddress] = Field(default="0.0.0.0", description="EOS server IP address.")
|
||||||
default=get_default_host(), description="EOS server IP address."
|
port: Optional[int] = Field(default=8503, description="EOS server IP port number.")
|
||||||
)
|
verbose: Optional[bool] = Field(default=False, description="Enable debug output")
|
||||||
server_eos_port: Optional[int] = Field(default=8503, description="EOS server IP port number.")
|
startup_eosdash: Optional[bool] = Field(
|
||||||
server_eos_verbose: Optional[bool] = Field(default=False, description="Enable debug output")
|
|
||||||
server_eos_startup_eosdash: Optional[bool] = Field(
|
|
||||||
default=True, description="EOS server to start EOSdash server."
|
default=True, description="EOS server to start EOSdash server."
|
||||||
)
|
)
|
||||||
server_eosdash_host: Optional[IPvAnyAddress] = Field(
|
eosdash_host: Optional[IPvAnyAddress] = Field(
|
||||||
default=get_default_host(), description="EOSdash server IP address."
|
default="0.0.0.0", description="EOSdash server IP address."
|
||||||
)
|
|
||||||
server_eosdash_port: Optional[int] = Field(
|
|
||||||
default=8504, description="EOSdash server IP port number."
|
|
||||||
)
|
)
|
||||||
|
eosdash_port: Optional[int] = Field(default=8504, description="EOSdash server IP port number.")
|
||||||
|
|
||||||
@field_validator("server_eos_port", "server_eosdash_port")
|
@field_validator("port", "eosdash_port")
|
||||||
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
def validate_server_port(cls, value: Optional[int]) -> Optional[int]:
|
||||||
if value is not None and not (1024 <= value <= 49151):
|
if value is not None and not (1024 <= value <= 49151):
|
||||||
raise ValueError("Server port number must be between 1024 and 49151.")
|
raise ValueError("Server port number must be between 1024 and 49151.")
|
||||||
|
@@ -329,9 +329,9 @@ class CacheFileStore(ConfigMixin, metaclass=CacheFileStoreMeta):
|
|||||||
# File already available
|
# File already available
|
||||||
cache_file_obj = cache_item.cache_file
|
cache_file_obj = cache_item.cache_file
|
||||||
else:
|
else:
|
||||||
self.config.data_cache_path.mkdir(parents=True, exist_ok=True)
|
self.config.general.data_cache_path.mkdir(parents=True, exist_ok=True)
|
||||||
cache_file_obj = tempfile.NamedTemporaryFile(
|
cache_file_obj = tempfile.NamedTemporaryFile(
|
||||||
mode=mode, delete=delete, suffix=suffix, dir=self.config.data_cache_path
|
mode=mode, delete=delete, suffix=suffix, dir=self.config.general.data_cache_path
|
||||||
)
|
)
|
||||||
self._store[cache_file_key] = CacheFileRecord(
|
self._store[cache_file_key] = CacheFileRecord(
|
||||||
cache_file=cache_file_obj,
|
cache_file=cache_file_obj,
|
||||||
|
42
src/akkudoktoreos/utils/docs.py
Normal file
42
src/akkudoktoreos/utils/docs.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pydantic.fields import FieldInfo
|
||||||
|
|
||||||
|
from akkudoktoreos.core.pydantic import PydanticBaseModel
|
||||||
|
|
||||||
|
|
||||||
|
def get_example_or_default(field_name: str, field_info: FieldInfo, example_ix: int) -> Any:
|
||||||
|
"""Generate a default value for a field, considering constraints."""
|
||||||
|
if field_info.examples is not None:
|
||||||
|
try:
|
||||||
|
return field_info.examples[example_ix]
|
||||||
|
except IndexError:
|
||||||
|
return field_info.examples[-1]
|
||||||
|
|
||||||
|
if field_info.default is not None:
|
||||||
|
return field_info.default
|
||||||
|
|
||||||
|
raise NotImplementedError(f"No default or example provided '{field_name}': {field_info}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_model_structure_from_examples(
|
||||||
|
model_class: type[PydanticBaseModel], multiple: bool
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Create a model instance with default or example values, respecting constraints."""
|
||||||
|
example_max_length = 1
|
||||||
|
|
||||||
|
# Get first field with examples (non-default) to get example_max_length
|
||||||
|
if multiple:
|
||||||
|
for _, field_info in model_class.model_fields.items():
|
||||||
|
if field_info.examples is not None:
|
||||||
|
example_max_length = len(field_info.examples)
|
||||||
|
break
|
||||||
|
|
||||||
|
example_data: list[dict[str, Any]] = [{} for _ in range(example_max_length)]
|
||||||
|
|
||||||
|
for field_name, field_info in model_class.model_fields.items():
|
||||||
|
for example_ix in range(example_max_length):
|
||||||
|
example_data[example_ix][field_name] = get_example_or_default(
|
||||||
|
field_name, field_info, example_ix
|
||||||
|
)
|
||||||
|
return example_data
|
@@ -10,6 +10,8 @@ logger = get_logger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class UtilsCommonSettings(SettingsBaseModel):
|
class UtilsCommonSettings(SettingsBaseModel):
|
||||||
|
"""Utils Configuration."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -47,6 +49,6 @@ class NumpyEncoder(json.JSONEncoder):
|
|||||||
# # Example usage
|
# # Example usage
|
||||||
# start_date = datetime.datetime(2024, 3, 31) # Date of the DST change
|
# start_date = datetime.datetime(2024, 3, 31) # Date of the DST change
|
||||||
# if ist_dst_wechsel(start_date):
|
# if ist_dst_wechsel(start_date):
|
||||||
# prediction_hours = 23 # Adjust to 23 hours for DST change days
|
# hours = 23 # Adjust to 23 hours for DST change days
|
||||||
# else:
|
# else:
|
||||||
# prediction_hours = 24 # Default value for days without DST change
|
# hours = 24 # Default value for days without DST change
|
||||||
|
@@ -13,7 +13,6 @@ import pendulum
|
|||||||
from matplotlib.backends.backend_pdf import PdfPages
|
from matplotlib.backends.backend_pdf import PdfPages
|
||||||
|
|
||||||
from akkudoktoreos.core.coreabc import ConfigMixin
|
from akkudoktoreos.core.coreabc import ConfigMixin
|
||||||
from akkudoktoreos.core.ems import EnergieManagementSystem
|
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
from akkudoktoreos.optimization.genetic import OptimizationParameters
|
from akkudoktoreos.optimization.genetic import OptimizationParameters
|
||||||
from akkudoktoreos.utils.datetimeutil import to_datetime
|
from akkudoktoreos.utils.datetimeutil import to_datetime
|
||||||
@@ -25,7 +24,12 @@ matplotlib.use(
|
|||||||
|
|
||||||
|
|
||||||
class VisualizationReport(ConfigMixin):
|
class VisualizationReport(ConfigMixin):
|
||||||
def __init__(self, filename: str = "visualization_results.pdf", version: str = "0.0.1") -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
filename: str = "visualization_results.pdf",
|
||||||
|
version: str = "0.0.1",
|
||||||
|
create_img: bool = True,
|
||||||
|
) -> None:
|
||||||
# Initialize the report with a given filename and empty groups
|
# Initialize the report with a given filename and empty groups
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.groups: list[list[Callable[[], None]]] = [] # Store groups of charts
|
self.groups: list[list[Callable[[], None]]] = [] # Store groups of charts
|
||||||
@@ -35,12 +39,23 @@ class VisualizationReport(ConfigMixin):
|
|||||||
self.pdf_pages = PdfPages(filename, metadata={}) # Initialize PdfPages without metadata
|
self.pdf_pages = PdfPages(filename, metadata={}) # Initialize PdfPages without metadata
|
||||||
self.version = version # overwrite version as test for constant output of pdf for test
|
self.version = version # overwrite version as test for constant output of pdf for test
|
||||||
self.current_time = to_datetime(
|
self.current_time = to_datetime(
|
||||||
as_string="YYYY-MM-DD HH:mm:ss", in_timezone=self.config.timezone
|
as_string="YYYY-MM-DD HH:mm:ss", in_timezone=self.config.general.timezone
|
||||||
)
|
)
|
||||||
|
self.create_img = create_img
|
||||||
|
|
||||||
def add_chart_to_group(self, chart_func: Callable[[], None]) -> None:
|
def add_chart_to_group(self, chart_func: Callable[[], None], title: str | None) -> None:
|
||||||
"""Add a chart function to the current group."""
|
"""Add a chart function to the current group and save it as a PNG and SVG."""
|
||||||
self.current_group.append(chart_func)
|
self.current_group.append(chart_func)
|
||||||
|
if self.create_img and title:
|
||||||
|
server_output_dir = self.config.general.data_cache_path
|
||||||
|
server_output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
chart_func()
|
||||||
|
plt.tight_layout() # Adjust the layout to ensure titles are not cut off
|
||||||
|
sanitized_title = "".join(c if c.isalnum() else "_" for c in title)
|
||||||
|
chart_filename_base = os.path.join(server_output_dir, f"chart_{sanitized_title}")
|
||||||
|
fig.savefig(f"{chart_filename_base}.svg")
|
||||||
|
plt.close(fig)
|
||||||
|
|
||||||
def finalize_group(self) -> None:
|
def finalize_group(self) -> None:
|
||||||
"""Finalize the current group and prepare for a new group."""
|
"""Finalize the current group and prepare for a new group."""
|
||||||
@@ -52,7 +67,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
|
|
||||||
def _initialize_pdf(self) -> None:
|
def _initialize_pdf(self) -> None:
|
||||||
"""Create the output directory if it doesn't exist and initialize the PDF."""
|
"""Create the output directory if it doesn't exist and initialize the PDF."""
|
||||||
output_dir = self.config.data_output_path
|
output_dir = self.config.general.data_output_path
|
||||||
|
|
||||||
# If self.filename is already a valid path, use it; otherwise, combine it with output_dir
|
# If self.filename is already a valid path, use it; otherwise, combine it with output_dir
|
||||||
if os.path.isabs(self.filename):
|
if os.path.isabs(self.filename):
|
||||||
@@ -148,16 +163,14 @@ class VisualizationReport(ConfigMixin):
|
|||||||
|
|
||||||
# Format the time axis
|
# Format the time axis
|
||||||
plt.gca().xaxis.set_major_formatter(
|
plt.gca().xaxis.set_major_formatter(
|
||||||
mdates.DateFormatter("%Y-%m-%d", tz=self.config.timezone)
|
mdates.DateFormatter("%Y-%m-%d")
|
||||||
) # Show date and time
|
) # Show date and time
|
||||||
plt.gca().xaxis.set_major_locator(
|
plt.gca().xaxis.set_major_locator(
|
||||||
mdates.DayLocator(interval=1, tz=self.config.timezone)
|
mdates.DayLocator(interval=1, tz=None)
|
||||||
) # Major ticks every day
|
) # Major ticks every day
|
||||||
plt.gca().xaxis.set_minor_locator(
|
plt.gca().xaxis.set_minor_locator(mdates.HourLocator(interval=3, tz=None))
|
||||||
mdates.HourLocator(interval=2, tz=self.config.timezone)
|
|
||||||
)
|
|
||||||
# Minor ticks every 6 hours
|
# Minor ticks every 6 hours
|
||||||
plt.gca().xaxis.set_minor_formatter(mdates.DateFormatter("%H", tz=self.config.timezone))
|
plt.gca().xaxis.set_minor_formatter(mdates.DateFormatter("%H"))
|
||||||
# plt.gcf().autofmt_xdate(rotation=45, which="major")
|
# plt.gcf().autofmt_xdate(rotation=45, which="major")
|
||||||
# Auto-format the x-axis for readability
|
# Auto-format the x-axis for readability
|
||||||
|
|
||||||
@@ -176,8 +189,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
plt.grid(True)
|
plt.grid(True)
|
||||||
|
|
||||||
# Add vertical line for the current date if within the axis range
|
# Add vertical line for the current date if within the axis range
|
||||||
current_time = pendulum.now(self.config.timezone)
|
current_time = pendulum.now(self.config.general.timezone)
|
||||||
# current_time = pendulum.now().add(hours=1)
|
|
||||||
if timestamps[0].subtract(hours=2) <= current_time <= timestamps[-1]:
|
if timestamps[0].subtract(hours=2) <= current_time <= timestamps[-1]:
|
||||||
plt.axvline(current_time, color="r", linestyle="--", label="Now")
|
plt.axvline(current_time, color="r", linestyle="--", label="Now")
|
||||||
plt.text(current_time, plt.ylim()[1], "Now", color="r", ha="center", va="bottom")
|
plt.text(current_time, plt.ylim()[1], "Now", color="r", ha="center", va="bottom")
|
||||||
@@ -191,17 +203,15 @@ class VisualizationReport(ConfigMixin):
|
|||||||
hours_since_start = [(t - timestamps[0]).total_seconds() / 3600 for t in timestamps]
|
hours_since_start = [(t - timestamps[0]).total_seconds() / 3600 for t in timestamps]
|
||||||
# ax2.set_xticks(timestamps[::48]) # Set ticks every 12 hours
|
# ax2.set_xticks(timestamps[::48]) # Set ticks every 12 hours
|
||||||
# ax2.set_xticklabels([f"{int(h)}" for h in hours_since_start[::48]])
|
# ax2.set_xticklabels([f"{int(h)}" for h in hours_since_start[::48]])
|
||||||
# ax2.set_xticks(timestamps[:: len(timestamps) // 24]) # Select 10 evenly spaced ticks
|
ax2.set_xticks(timestamps[:: len(timestamps) // 24]) # Select 10 evenly spaced ticks
|
||||||
ax2.set_xticks(timestamps[:: len(timestamps) // 12]) # Select 10 evenly spaced ticks
|
ax2.set_xticklabels([f"{int(h)}" for h in hours_since_start[:: len(timestamps) // 24]])
|
||||||
# ax2.set_xticklabels([f"{int(h)}" for h in hours_since_start[:: len(timestamps) // 24]])
|
|
||||||
ax2.set_xticklabels([f"{int(h)}" for h in hours_since_start[:: len(timestamps) // 12]])
|
|
||||||
if x2label:
|
if x2label:
|
||||||
ax2.set_xlabel(x2label)
|
ax2.set_xlabel(x2label)
|
||||||
|
|
||||||
# Ensure ax1 and ax2 are aligned
|
# Ensure ax1 and ax2 are aligned
|
||||||
# assert ax1.get_xlim() == ax2.get_xlim(), "ax1 and ax2 are not aligned"
|
# assert ax1.get_xlim() == ax2.get_xlim(), "ax1 and ax2 are not aligned"
|
||||||
|
|
||||||
self.add_chart_to_group(chart) # Add chart function to current group
|
self.add_chart_to_group(chart, title) # Add chart function to current group
|
||||||
|
|
||||||
def create_line_chart(
|
def create_line_chart(
|
||||||
self,
|
self,
|
||||||
@@ -262,7 +272,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
plt.grid(True) # Show grid
|
plt.grid(True) # Show grid
|
||||||
plt.xlim(x[0] - 0.5, x[-1] + 0.5) # Adjust x-limits
|
plt.xlim(x[0] - 0.5, x[-1] + 0.5) # Adjust x-limits
|
||||||
|
|
||||||
self.add_chart_to_group(chart) # Add chart function to current group
|
self.add_chart_to_group(chart, title) # Add chart function to current group
|
||||||
|
|
||||||
def create_scatter_plot(
|
def create_scatter_plot(
|
||||||
self,
|
self,
|
||||||
@@ -284,7 +294,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
plt.colorbar(scatter, label="Constraint") # Add colorbar if color data is provided
|
plt.colorbar(scatter, label="Constraint") # Add colorbar if color data is provided
|
||||||
plt.grid(True) # Show grid
|
plt.grid(True) # Show grid
|
||||||
|
|
||||||
self.add_chart_to_group(chart) # Add chart function to current group
|
self.add_chart_to_group(chart, title) # Add chart function to current group
|
||||||
|
|
||||||
def create_bar_chart(
|
def create_bar_chart(
|
||||||
self,
|
self,
|
||||||
@@ -334,7 +344,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
plt.grid(True, zorder=0) # Show grid in the background
|
plt.grid(True, zorder=0) # Show grid in the background
|
||||||
plt.xlim(-0.5, len(labels) - 0.5) # Set x-axis limits
|
plt.xlim(-0.5, len(labels) - 0.5) # Set x-axis limits
|
||||||
|
|
||||||
self.add_chart_to_group(chart) # Add chart function to current group
|
self.add_chart_to_group(chart, title) # Add chart function to current group
|
||||||
|
|
||||||
def create_violin_plot(
|
def create_violin_plot(
|
||||||
self, data_list: list[np.ndarray], labels: list[str], title: str, xlabel: str, ylabel: str
|
self, data_list: list[np.ndarray], labels: list[str], title: str, xlabel: str, ylabel: str
|
||||||
@@ -349,7 +359,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
plt.ylabel(ylabel) # Set y-axis label
|
plt.ylabel(ylabel) # Set y-axis label
|
||||||
plt.grid(True) # Show grid
|
plt.grid(True) # Show grid
|
||||||
|
|
||||||
self.add_chart_to_group(chart) # Add chart function to current group
|
self.add_chart_to_group(chart, title) # Add chart function to current group
|
||||||
|
|
||||||
def add_text_page(self, text: str, title: Optional[str] = None, fontsize: int = 12) -> None:
|
def add_text_page(self, text: str, title: Optional[str] = None, fontsize: int = 12) -> None:
|
||||||
"""Add a page with text content to the PDF."""
|
"""Add a page with text content to the PDF."""
|
||||||
@@ -368,7 +378,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
self.pdf_pages.savefig(fig) # Save the figure as a page in the PDF
|
self.pdf_pages.savefig(fig) # Save the figure as a page in the PDF
|
||||||
plt.close(fig) # Close the figure to free up memory
|
plt.close(fig) # Close the figure to free up memory
|
||||||
|
|
||||||
self.add_chart_to_group(chart) # Treat the text page as a "chart" in the group
|
self.add_chart_to_group(chart, title) # Treat the text page as a "chart" in the group
|
||||||
|
|
||||||
def add_json_page(
|
def add_json_page(
|
||||||
self, json_obj: dict, title: Optional[str] = None, fontsize: int = 12
|
self, json_obj: dict, title: Optional[str] = None, fontsize: int = 12
|
||||||
@@ -406,7 +416,7 @@ class VisualizationReport(ConfigMixin):
|
|||||||
self.pdf_pages.savefig(fig) # Save the figure as a page in the PDF
|
self.pdf_pages.savefig(fig) # Save the figure as a page in the PDF
|
||||||
plt.close(fig) # Close the figure to free up memory
|
plt.close(fig) # Close the figure to free up memory
|
||||||
|
|
||||||
self.add_chart_to_group(chart) # Treat the JSON page as a "chart" in the group
|
self.add_chart_to_group(chart, title) # Treat the JSON page as a "chart" in the group
|
||||||
|
|
||||||
def generate_pdf(self) -> None:
|
def generate_pdf(self) -> None:
|
||||||
"""Generate the PDF report with all the added chart groups."""
|
"""Generate the PDF report with all the added chart groups."""
|
||||||
@@ -422,17 +432,15 @@ def prepare_visualize(
|
|||||||
parameters: OptimizationParameters,
|
parameters: OptimizationParameters,
|
||||||
results: dict,
|
results: dict,
|
||||||
filename: str = "visualization_results.pdf",
|
filename: str = "visualization_results.pdf",
|
||||||
start_hour: int = 0,
|
start_hour: Optional[int] = 0,
|
||||||
) -> None:
|
) -> None:
|
||||||
report = VisualizationReport(filename)
|
report = VisualizationReport(filename)
|
||||||
# next_full_hour_date = pendulum.now(report.config.timezone).start_of("day").add(hours=start_hour)
|
next_full_hour_date = pendulum.now(report.config.general.timezone).start_of("hour").add(hours=1)
|
||||||
# next_full_hour_date = to_datetime().set(minute=0, second=0, microsecond=0)
|
|
||||||
next_full_hour_date = EnergieManagementSystem.set_start_datetime()
|
|
||||||
# Group 1:
|
# Group 1:
|
||||||
report.create_line_chart_date(
|
report.create_line_chart_date(
|
||||||
next_full_hour_date,
|
next_full_hour_date, # start_date
|
||||||
[
|
[
|
||||||
parameters.ems.gesamtlast[start_hour:],
|
parameters.ems.gesamtlast,
|
||||||
],
|
],
|
||||||
title="Load Profile",
|
title="Load Profile",
|
||||||
# xlabel="Hours", # not enough space
|
# xlabel="Hours", # not enough space
|
||||||
@@ -440,9 +448,9 @@ def prepare_visualize(
|
|||||||
labels=["Total Load (Wh)"],
|
labels=["Total Load (Wh)"],
|
||||||
)
|
)
|
||||||
report.create_line_chart_date(
|
report.create_line_chart_date(
|
||||||
next_full_hour_date,
|
next_full_hour_date, # start_date
|
||||||
[
|
[
|
||||||
parameters.ems.pv_prognose_wh[start_hour:],
|
parameters.ems.pv_prognose_wh,
|
||||||
],
|
],
|
||||||
title="PV Forecast",
|
title="PV Forecast",
|
||||||
# xlabel="Hours", # not enough space
|
# xlabel="Hours", # not enough space
|
||||||
@@ -450,15 +458,8 @@ def prepare_visualize(
|
|||||||
)
|
)
|
||||||
|
|
||||||
report.create_line_chart_date(
|
report.create_line_chart_date(
|
||||||
next_full_hour_date,
|
next_full_hour_date, # start_date
|
||||||
[
|
[np.full(len(parameters.ems.gesamtlast), parameters.ems.einspeiseverguetung_euro_pro_wh)],
|
||||||
np.full(
|
|
||||||
len(parameters.ems.gesamtlast) - start_hour,
|
|
||||||
parameters.ems.einspeiseverguetung_euro_pro_wh[start_hour:]
|
|
||||||
if isinstance(parameters.ems.einspeiseverguetung_euro_pro_wh, list)
|
|
||||||
else parameters.ems.einspeiseverguetung_euro_pro_wh,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
title="Remuneration",
|
title="Remuneration",
|
||||||
# xlabel="Hours", # not enough space
|
# xlabel="Hours", # not enough space
|
||||||
ylabel="€/Wh",
|
ylabel="€/Wh",
|
||||||
@@ -466,9 +467,9 @@ def prepare_visualize(
|
|||||||
)
|
)
|
||||||
if parameters.temperature_forecast:
|
if parameters.temperature_forecast:
|
||||||
report.create_line_chart_date(
|
report.create_line_chart_date(
|
||||||
next_full_hour_date,
|
next_full_hour_date, # start_date
|
||||||
[
|
[
|
||||||
parameters.temperature_forecast[start_hour:],
|
parameters.temperature_forecast,
|
||||||
],
|
],
|
||||||
title="Temperature Forecast",
|
title="Temperature Forecast",
|
||||||
# xlabel="Hours", # not enough space
|
# xlabel="Hours", # not enough space
|
||||||
@@ -517,35 +518,21 @@ def prepare_visualize(
|
|||||||
)
|
)
|
||||||
report.create_line_chart_date(
|
report.create_line_chart_date(
|
||||||
next_full_hour_date, # start_date
|
next_full_hour_date, # start_date
|
||||||
[parameters.ems.strompreis_euro_pro_wh[start_hour:]],
|
[parameters.ems.strompreis_euro_pro_wh],
|
||||||
# title="Electricity Price", # not enough space
|
title="Electricity Price",
|
||||||
# xlabel="Date", # not enough space
|
# xlabel="Date", # not enough space
|
||||||
ylabel="Electricity Price (€/Wh)",
|
ylabel="Electricity Price (€/Wh)",
|
||||||
x2label=None, # not enough space
|
x2label=None, # not enough space
|
||||||
)
|
)
|
||||||
|
|
||||||
labels = list(
|
|
||||||
item
|
|
||||||
for sublist in zip(
|
|
||||||
list(str(i) for i in range(0, 23, 2)), list(str(" ") for i in range(0, 23, 2))
|
|
||||||
)
|
|
||||||
for item in sublist
|
|
||||||
)
|
|
||||||
labels = labels[start_hour:] + labels
|
|
||||||
|
|
||||||
report.create_bar_chart(
|
report.create_bar_chart(
|
||||||
labels,
|
list(str(i) for i in range(len(results["ac_charge"]))),
|
||||||
[
|
[results["ac_charge"], results["dc_charge"], results["discharge_allowed"]],
|
||||||
results["ac_charge"][start_hour:],
|
|
||||||
results["dc_charge"][start_hour:],
|
|
||||||
results["discharge_allowed"][start_hour:],
|
|
||||||
],
|
|
||||||
title="AC/DC Charging and Discharge Overview",
|
title="AC/DC Charging and Discharge Overview",
|
||||||
ylabel="Relative Power (0-1) / Discharge (0 or 1)",
|
ylabel="Relative Power (0-1) / Discharge (0 or 1)",
|
||||||
label_names=["AC Charging (relative)", "DC Charging (relative)", "Discharge Allowed"],
|
label_names=["AC Charging (relative)", "DC Charging (relative)", "Discharge Allowed"],
|
||||||
colors=["blue", "green", "red"],
|
colors=["blue", "green", "red"],
|
||||||
bottom=3,
|
bottom=3,
|
||||||
xlabels=labels,
|
|
||||||
)
|
)
|
||||||
report.finalize_group()
|
report.finalize_group()
|
||||||
|
|
||||||
@@ -567,7 +554,7 @@ def prepare_visualize(
|
|||||||
report.create_scatter_plot(
|
report.create_scatter_plot(
|
||||||
extra_data["verluste"],
|
extra_data["verluste"],
|
||||||
extra_data["bilanz"],
|
extra_data["bilanz"],
|
||||||
title="",
|
title="Scatter Plot",
|
||||||
xlabel="losses",
|
xlabel="losses",
|
||||||
ylabel="balance",
|
ylabel="balance",
|
||||||
c=extra_data["nebenbedingung"],
|
c=extra_data["nebenbedingung"],
|
||||||
|
@@ -64,6 +64,25 @@ def config_mixin(config_eos):
|
|||||||
yield config_mixin_patch
|
yield config_mixin_patch
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def devices_eos(config_mixin):
|
||||||
|
from akkudoktoreos.devices.devices import get_devices
|
||||||
|
|
||||||
|
devices = get_devices()
|
||||||
|
print("devices_eos reset!")
|
||||||
|
devices.reset()
|
||||||
|
return devices
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def devices_mixin(devices_eos):
|
||||||
|
with patch(
|
||||||
|
"akkudoktoreos.core.coreabc.DevicesMixin.devices", new_callable=PropertyMock
|
||||||
|
) as devices_mixin_patch:
|
||||||
|
devices_mixin_patch.return_value = devices_eos
|
||||||
|
yield devices_mixin_patch
|
||||||
|
|
||||||
|
|
||||||
# Test if test has side effect of writing to system (user) config file
|
# Test if test has side effect of writing to system (user) config file
|
||||||
# Before activating, make sure that no user config file exists (e.g. ~/.config/net.akkudoktoreos.eos/EOS.config.json)
|
# Before activating, make sure that no user config file exists (e.g. ~/.config/net.akkudoktoreos.eos/EOS.config.json)
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
@@ -114,20 +133,24 @@ def config_eos(
|
|||||||
monkeypatch,
|
monkeypatch,
|
||||||
) -> ConfigEOS:
|
) -> ConfigEOS:
|
||||||
"""Fixture to reset EOS config to default values."""
|
"""Fixture to reset EOS config to default values."""
|
||||||
monkeypatch.setenv("data_cache_subpath", str(config_default_dirs[-1] / "data/cache"))
|
monkeypatch.setenv(
|
||||||
monkeypatch.setenv("data_output_subpath", str(config_default_dirs[-1] / "data/output"))
|
"EOS_CONFIG__DATA_CACHE_SUBPATH", str(config_default_dirs[-1] / "data/cache")
|
||||||
|
)
|
||||||
|
monkeypatch.setenv(
|
||||||
|
"EOS_CONFIG__DATA_OUTPUT_SUBPATH", str(config_default_dirs[-1] / "data/output")
|
||||||
|
)
|
||||||
config_file = config_default_dirs[0] / ConfigEOS.CONFIG_FILE_NAME
|
config_file = config_default_dirs[0] / ConfigEOS.CONFIG_FILE_NAME
|
||||||
config_file_cwd = config_default_dirs[1] / ConfigEOS.CONFIG_FILE_NAME
|
config_file_cwd = config_default_dirs[1] / ConfigEOS.CONFIG_FILE_NAME
|
||||||
assert not config_file.exists()
|
assert not config_file.exists()
|
||||||
assert not config_file_cwd.exists()
|
assert not config_file_cwd.exists()
|
||||||
config_eos = get_config()
|
config_eos = get_config()
|
||||||
config_eos.reset_settings()
|
config_eos.reset_settings()
|
||||||
assert config_file == config_eos.config_file_path
|
assert config_file == config_eos.general.config_file_path
|
||||||
assert config_file.exists()
|
assert config_file.exists()
|
||||||
assert not config_file_cwd.exists()
|
assert not config_file_cwd.exists()
|
||||||
assert config_default_dirs[-1] / "data" == config_eos.data_folder_path
|
assert config_default_dirs[-1] / "data" == config_eos.general.data_folder_path
|
||||||
assert config_default_dirs[-1] / "data/cache" == config_eos.data_cache_path
|
assert config_default_dirs[-1] / "data/cache" == config_eos.general.data_cache_path
|
||||||
assert config_default_dirs[-1] / "data/output" == config_eos.data_output_path
|
assert config_default_dirs[-1] / "data/output" == config_eos.general.data_output_path
|
||||||
return config_eos
|
return config_eos
|
||||||
|
|
||||||
|
|
||||||
@@ -166,6 +189,7 @@ def server(xprocess, config_eos, config_default_dirs):
|
|||||||
# Set environment before any subprocess run, to keep custom config dir
|
# Set environment before any subprocess run, to keep custom config dir
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env["EOS_DIR"] = str(config_default_dirs[-1])
|
env["EOS_DIR"] = str(config_default_dirs[-1])
|
||||||
|
project_dir = config_eos.package_root_path
|
||||||
|
|
||||||
# assure server to be installed
|
# assure server to be installed
|
||||||
try:
|
try:
|
||||||
@@ -175,9 +199,9 @@ def server(xprocess, config_eos, config_default_dirs):
|
|||||||
env=env,
|
env=env,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
|
cwd=project_dir,
|
||||||
)
|
)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
project_dir = config_eos.package_root_path
|
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[sys.executable, "-m", "pip", "install", "-e", project_dir],
|
[sys.executable, "-m", "pip", "install", "-e", project_dir],
|
||||||
check=True,
|
check=True,
|
||||||
|
@@ -7,13 +7,15 @@ from akkudoktoreos.devices.battery import Battery, SolarPanelBatteryParameters
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def setup_pv_battery():
|
def setup_pv_battery():
|
||||||
params = SolarPanelBatteryParameters(
|
params = SolarPanelBatteryParameters(
|
||||||
|
device_id="battery1",
|
||||||
capacity_wh=10000,
|
capacity_wh=10000,
|
||||||
initial_soc_percentage=50,
|
initial_soc_percentage=50,
|
||||||
min_soc_percentage=20,
|
min_soc_percentage=20,
|
||||||
max_soc_percentage=80,
|
max_soc_percentage=80,
|
||||||
max_charge_power_w=8000,
|
max_charge_power_w=8000,
|
||||||
|
hours=24,
|
||||||
)
|
)
|
||||||
battery = Battery(params, hours=24)
|
battery = Battery(params)
|
||||||
battery.reset()
|
battery.reset()
|
||||||
return battery
|
return battery
|
||||||
|
|
||||||
@@ -113,7 +115,6 @@ def test_soc_limits(setup_pv_battery):
|
|||||||
|
|
||||||
def test_max_charge_power_w(setup_pv_battery):
|
def test_max_charge_power_w(setup_pv_battery):
|
||||||
battery = setup_pv_battery
|
battery = setup_pv_battery
|
||||||
battery.setup()
|
|
||||||
assert (
|
assert (
|
||||||
battery.parameters.max_charge_power_w == 8000
|
battery.parameters.max_charge_power_w == 8000
|
||||||
), "Default max charge power should be 5000W, We ask for 8000W here"
|
), "Default max charge power should be 5000W, We ask for 8000W here"
|
||||||
@@ -121,7 +122,6 @@ def test_max_charge_power_w(setup_pv_battery):
|
|||||||
|
|
||||||
def test_charge_energy_within_limits(setup_pv_battery):
|
def test_charge_energy_within_limits(setup_pv_battery):
|
||||||
battery = setup_pv_battery
|
battery = setup_pv_battery
|
||||||
battery.setup()
|
|
||||||
initial_soc_wh = battery.soc_wh
|
initial_soc_wh = battery.soc_wh
|
||||||
|
|
||||||
charged_wh, losses_wh = battery.charge_energy(wh=4000, hour=1)
|
charged_wh, losses_wh = battery.charge_energy(wh=4000, hour=1)
|
||||||
@@ -134,7 +134,6 @@ def test_charge_energy_within_limits(setup_pv_battery):
|
|||||||
|
|
||||||
def test_charge_energy_exceeds_capacity(setup_pv_battery):
|
def test_charge_energy_exceeds_capacity(setup_pv_battery):
|
||||||
battery = setup_pv_battery
|
battery = setup_pv_battery
|
||||||
battery.setup()
|
|
||||||
initial_soc_wh = battery.soc_wh
|
initial_soc_wh = battery.soc_wh
|
||||||
|
|
||||||
# Try to overcharge beyond max capacity
|
# Try to overcharge beyond max capacity
|
||||||
@@ -149,7 +148,6 @@ def test_charge_energy_exceeds_capacity(setup_pv_battery):
|
|||||||
|
|
||||||
def test_charge_energy_not_allowed_hour(setup_pv_battery):
|
def test_charge_energy_not_allowed_hour(setup_pv_battery):
|
||||||
battery = setup_pv_battery
|
battery = setup_pv_battery
|
||||||
battery.setup()
|
|
||||||
|
|
||||||
# Disable charging for all hours
|
# Disable charging for all hours
|
||||||
battery.set_charge_per_hour(np.zeros(battery.hours))
|
battery.set_charge_per_hour(np.zeros(battery.hours))
|
||||||
@@ -165,7 +163,6 @@ def test_charge_energy_not_allowed_hour(setup_pv_battery):
|
|||||||
|
|
||||||
def test_charge_energy_relative_power(setup_pv_battery):
|
def test_charge_energy_relative_power(setup_pv_battery):
|
||||||
battery = setup_pv_battery
|
battery = setup_pv_battery
|
||||||
battery.setup()
|
|
||||||
|
|
||||||
relative_power = 0.5 # 50% of max charge power
|
relative_power = 0.5 # 50% of max charge power
|
||||||
charged_wh, losses_wh = battery.charge_energy(wh=None, hour=4, relative_power=relative_power)
|
charged_wh, losses_wh = battery.charge_energy(wh=None, hour=4, relative_power=relative_power)
|
||||||
@@ -183,13 +180,15 @@ def setup_car_battery():
|
|||||||
from akkudoktoreos.devices.battery import ElectricVehicleParameters
|
from akkudoktoreos.devices.battery import ElectricVehicleParameters
|
||||||
|
|
||||||
params = ElectricVehicleParameters(
|
params = ElectricVehicleParameters(
|
||||||
|
device_id="ev1",
|
||||||
capacity_wh=40000,
|
capacity_wh=40000,
|
||||||
initial_soc_percentage=60,
|
initial_soc_percentage=60,
|
||||||
min_soc_percentage=10,
|
min_soc_percentage=10,
|
||||||
max_soc_percentage=90,
|
max_soc_percentage=90,
|
||||||
max_charge_power_w=7000,
|
max_charge_power_w=7000,
|
||||||
|
hours=24,
|
||||||
)
|
)
|
||||||
battery = Battery(params, hours=24)
|
battery = Battery(params)
|
||||||
battery.reset()
|
battery.reset()
|
||||||
return battery
|
return battery
|
||||||
|
|
||||||
|
@@ -1,5 +1,3 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -16,58 +14,58 @@ from akkudoktoreos.devices.battery import (
|
|||||||
)
|
)
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
||||||
from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
||||||
from akkudoktoreos.prediction.interpolator import SelfConsumptionProbabilityInterpolator
|
|
||||||
|
|
||||||
start_hour = 1
|
start_hour = 1
|
||||||
|
|
||||||
|
|
||||||
# Example initialization of necessary components
|
# Example initialization of necessary components
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def create_ems_instance(config_eos) -> EnergieManagementSystem:
|
def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||||
# Assure configuration holds the correct values
|
# Assure configuration holds the correct values
|
||||||
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 24})
|
config_eos.merge_settings_from_dict(
|
||||||
assert config_eos.prediction_hours is not None
|
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
|
||||||
|
)
|
||||||
|
assert config_eos.prediction.hours == 48
|
||||||
|
|
||||||
# Initialize the battery and the inverter
|
# Initialize the battery and the inverter
|
||||||
akku = Battery(
|
akku = Battery(
|
||||||
SolarPanelBatteryParameters(
|
SolarPanelBatteryParameters(
|
||||||
capacity_wh=5000, initial_soc_percentage=80, min_soc_percentage=10
|
device_id="battery1",
|
||||||
),
|
capacity_wh=5000,
|
||||||
hours=config_eos.prediction_hours,
|
initial_soc_percentage=80,
|
||||||
|
min_soc_percentage=10,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# 1h Load to Sub 1h Load Distribution -> SelfConsumptionRate
|
|
||||||
sc = SelfConsumptionProbabilityInterpolator(
|
|
||||||
Path(__file__).parent.resolve()
|
|
||||||
/ ".."
|
|
||||||
/ "src"
|
|
||||||
/ "akkudoktoreos"
|
|
||||||
/ "data"
|
|
||||||
/ "regular_grid_interpolator.pkl"
|
|
||||||
)
|
|
||||||
|
|
||||||
akku.reset()
|
akku.reset()
|
||||||
inverter = Inverter(sc, InverterParameters(max_power_wh=10000), akku)
|
devices_eos.add_device(akku)
|
||||||
|
|
||||||
|
inverter = Inverter(
|
||||||
|
InverterParameters(device_id="inverter1", max_power_wh=10000, battery_id=akku.device_id)
|
||||||
|
)
|
||||||
|
devices_eos.add_device(inverter)
|
||||||
|
|
||||||
# Household device (currently not used, set to None)
|
# Household device (currently not used, set to None)
|
||||||
home_appliance = HomeAppliance(
|
home_appliance = HomeAppliance(
|
||||||
HomeApplianceParameters(
|
HomeApplianceParameters(
|
||||||
|
device_id="dishwasher1",
|
||||||
consumption_wh=2000,
|
consumption_wh=2000,
|
||||||
duration_h=2,
|
duration_h=2,
|
||||||
),
|
),
|
||||||
hours=config_eos.prediction_hours,
|
|
||||||
)
|
)
|
||||||
home_appliance.set_starting_time(2)
|
home_appliance.set_starting_time(2)
|
||||||
|
devices_eos.add_device(home_appliance)
|
||||||
|
|
||||||
# Example initialization of electric car battery
|
# Example initialization of electric car battery
|
||||||
eauto = Battery(
|
eauto = Battery(
|
||||||
ElectricVehicleParameters(
|
ElectricVehicleParameters(
|
||||||
capacity_wh=26400, initial_soc_percentage=10, min_soc_percentage=10
|
device_id="ev1", capacity_wh=26400, initial_soc_percentage=10, min_soc_percentage=10
|
||||||
),
|
),
|
||||||
hours=config_eos.prediction_hours,
|
|
||||||
)
|
)
|
||||||
eauto.set_charge_per_hour(np.full(config_eos.prediction_hours, 1))
|
eauto.set_charge_per_hour(np.full(config_eos.prediction.hours, 1))
|
||||||
|
devices_eos.add_device(eauto)
|
||||||
|
|
||||||
|
devices_eos.post_setup()
|
||||||
|
|
||||||
# Parameters based on previous example data
|
# Parameters based on previous example data
|
||||||
pv_prognose_wh = [
|
pv_prognose_wh = [
|
||||||
|
@@ -1,5 +1,3 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -16,64 +14,61 @@ from akkudoktoreos.devices.battery import (
|
|||||||
)
|
)
|
||||||
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
from akkudoktoreos.devices.generic import HomeAppliance, HomeApplianceParameters
|
||||||
from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
||||||
from akkudoktoreos.prediction.interpolator import SelfConsumptionProbabilityInterpolator
|
|
||||||
|
|
||||||
start_hour = 0
|
start_hour = 0
|
||||||
|
|
||||||
|
|
||||||
# Example initialization of necessary components
|
# Example initialization of necessary components
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def create_ems_instance(config_eos) -> EnergieManagementSystem:
|
def create_ems_instance(devices_eos, config_eos) -> EnergieManagementSystem:
|
||||||
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
"""Fixture to create an EnergieManagementSystem instance with given test parameters."""
|
||||||
# Assure configuration holds the correct values
|
# Assure configuration holds the correct values
|
||||||
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 24})
|
config_eos.merge_settings_from_dict(
|
||||||
assert config_eos.prediction_hours is not None
|
{"prediction": {"hours": 48}, "optimization": {"hours": 24}}
|
||||||
|
)
|
||||||
|
assert config_eos.prediction.hours == 48
|
||||||
|
|
||||||
# Initialize the battery and the inverter
|
# Initialize the battery and the inverter
|
||||||
akku = Battery(
|
akku = Battery(
|
||||||
SolarPanelBatteryParameters(
|
SolarPanelBatteryParameters(
|
||||||
capacity_wh=5000, initial_soc_percentage=80, min_soc_percentage=10
|
device_id="pv1", capacity_wh=5000, initial_soc_percentage=80, min_soc_percentage=10
|
||||||
),
|
)
|
||||||
hours=config_eos.prediction_hours,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# 1h Load to Sub 1h Load Distribution -> SelfConsumptionRate
|
|
||||||
sc = SelfConsumptionProbabilityInterpolator(
|
|
||||||
Path(__file__).parent.resolve()
|
|
||||||
/ ".."
|
|
||||||
/ "src"
|
|
||||||
/ "akkudoktoreos"
|
|
||||||
/ "data"
|
|
||||||
/ "regular_grid_interpolator.pkl"
|
|
||||||
)
|
|
||||||
|
|
||||||
akku.reset()
|
akku.reset()
|
||||||
inverter = Inverter(sc, InverterParameters(max_power_wh=10000), akku)
|
devices_eos.add_device(akku)
|
||||||
|
|
||||||
|
inverter = Inverter(
|
||||||
|
InverterParameters(device_id="iv1", max_power_wh=10000, battery_id=akku.device_id)
|
||||||
|
)
|
||||||
|
devices_eos.add_device(inverter)
|
||||||
|
|
||||||
# Household device (currently not used, set to None)
|
# Household device (currently not used, set to None)
|
||||||
home_appliance = HomeAppliance(
|
home_appliance = HomeAppliance(
|
||||||
HomeApplianceParameters(
|
HomeApplianceParameters(
|
||||||
|
device_id="dishwasher1",
|
||||||
consumption_wh=2000,
|
consumption_wh=2000,
|
||||||
duration_h=2,
|
duration_h=2,
|
||||||
),
|
)
|
||||||
hours=config_eos.prediction_hours,
|
|
||||||
)
|
)
|
||||||
home_appliance.set_starting_time(2)
|
home_appliance.set_starting_time(2)
|
||||||
|
devices_eos.add_device(home_appliance)
|
||||||
|
|
||||||
# Example initialization of electric car battery
|
# Example initialization of electric car battery
|
||||||
eauto = Battery(
|
eauto = Battery(
|
||||||
ElectricVehicleParameters(
|
ElectricVehicleParameters(
|
||||||
capacity_wh=26400, initial_soc_percentage=100, min_soc_percentage=100
|
device_id="ev1", capacity_wh=26400, initial_soc_percentage=100, min_soc_percentage=100
|
||||||
),
|
),
|
||||||
hours=config_eos.prediction_hours,
|
|
||||||
)
|
)
|
||||||
|
devices_eos.add_device(eauto)
|
||||||
|
|
||||||
|
devices_eos.post_setup()
|
||||||
|
|
||||||
# Parameters based on previous example data
|
# Parameters based on previous example data
|
||||||
pv_prognose_wh = [0.0] * config_eos.prediction_hours
|
pv_prognose_wh = [0.0] * config_eos.prediction.hours
|
||||||
pv_prognose_wh[10] = 5000.0
|
pv_prognose_wh[10] = 5000.0
|
||||||
pv_prognose_wh[11] = 5000.0
|
pv_prognose_wh[11] = 5000.0
|
||||||
|
|
||||||
strompreis_euro_pro_wh = [0.001] * config_eos.prediction_hours
|
strompreis_euro_pro_wh = [0.001] * config_eos.prediction.hours
|
||||||
strompreis_euro_pro_wh[0:10] = [0.00001] * 10
|
strompreis_euro_pro_wh[0:10] = [0.00001] * 10
|
||||||
strompreis_euro_pro_wh[11:15] = [0.00005] * 4
|
strompreis_euro_pro_wh[11:15] = [0.00005] * 4
|
||||||
strompreis_euro_pro_wh[20] = 0.00001
|
strompreis_euro_pro_wh[20] = 0.00001
|
||||||
@@ -147,10 +142,10 @@ def create_ems_instance(config_eos) -> EnergieManagementSystem:
|
|||||||
home_appliance=home_appliance,
|
home_appliance=home_appliance,
|
||||||
)
|
)
|
||||||
|
|
||||||
ac = np.full(config_eos.prediction_hours, 0.0)
|
ac = np.full(config_eos.prediction.hours, 0.0)
|
||||||
ac[20] = 1
|
ac[20] = 1
|
||||||
ems.set_akku_ac_charge_hours(ac)
|
ems.set_akku_ac_charge_hours(ac)
|
||||||
dc = np.full(config_eos.prediction_hours, 0.0)
|
dc = np.full(config_eos.prediction.hours, 0.0)
|
||||||
dc[11] = 1
|
dc[11] = 1
|
||||||
ems.set_akku_dc_charge_hours(dc)
|
ems.set_akku_dc_charge_hours(dc)
|
||||||
|
|
||||||
@@ -274,7 +269,7 @@ def test_set_parameters(create_ems_instance):
|
|||||||
def test_set_akku_discharge_hours(create_ems_instance):
|
def test_set_akku_discharge_hours(create_ems_instance):
|
||||||
"""Test the set_akku_discharge_hours method of EnergieManagementSystem."""
|
"""Test the set_akku_discharge_hours method of EnergieManagementSystem."""
|
||||||
ems = create_ems_instance
|
ems = create_ems_instance
|
||||||
discharge_hours = np.full(ems.config.prediction_hours, 1.0)
|
discharge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||||
ems.set_akku_discharge_hours(discharge_hours)
|
ems.set_akku_discharge_hours(discharge_hours)
|
||||||
assert np.array_equal(
|
assert np.array_equal(
|
||||||
ems.battery.discharge_array, discharge_hours
|
ems.battery.discharge_array, discharge_hours
|
||||||
@@ -284,7 +279,7 @@ def test_set_akku_discharge_hours(create_ems_instance):
|
|||||||
def test_set_akku_ac_charge_hours(create_ems_instance):
|
def test_set_akku_ac_charge_hours(create_ems_instance):
|
||||||
"""Test the set_akku_ac_charge_hours method of EnergieManagementSystem."""
|
"""Test the set_akku_ac_charge_hours method of EnergieManagementSystem."""
|
||||||
ems = create_ems_instance
|
ems = create_ems_instance
|
||||||
ac_charge_hours = np.full(ems.config.prediction_hours, 1.0)
|
ac_charge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||||
ems.set_akku_ac_charge_hours(ac_charge_hours)
|
ems.set_akku_ac_charge_hours(ac_charge_hours)
|
||||||
assert np.array_equal(
|
assert np.array_equal(
|
||||||
ems.ac_charge_hours, ac_charge_hours
|
ems.ac_charge_hours, ac_charge_hours
|
||||||
@@ -294,7 +289,7 @@ def test_set_akku_ac_charge_hours(create_ems_instance):
|
|||||||
def test_set_akku_dc_charge_hours(create_ems_instance):
|
def test_set_akku_dc_charge_hours(create_ems_instance):
|
||||||
"""Test the set_akku_dc_charge_hours method of EnergieManagementSystem."""
|
"""Test the set_akku_dc_charge_hours method of EnergieManagementSystem."""
|
||||||
ems = create_ems_instance
|
ems = create_ems_instance
|
||||||
dc_charge_hours = np.full(ems.config.prediction_hours, 1.0)
|
dc_charge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||||
ems.set_akku_dc_charge_hours(dc_charge_hours)
|
ems.set_akku_dc_charge_hours(dc_charge_hours)
|
||||||
assert np.array_equal(
|
assert np.array_equal(
|
||||||
ems.dc_charge_hours, dc_charge_hours
|
ems.dc_charge_hours, dc_charge_hours
|
||||||
@@ -304,7 +299,7 @@ def test_set_akku_dc_charge_hours(create_ems_instance):
|
|||||||
def test_set_ev_charge_hours(create_ems_instance):
|
def test_set_ev_charge_hours(create_ems_instance):
|
||||||
"""Test the set_ev_charge_hours method of EnergieManagementSystem."""
|
"""Test the set_ev_charge_hours method of EnergieManagementSystem."""
|
||||||
ems = create_ems_instance
|
ems = create_ems_instance
|
||||||
ev_charge_hours = np.full(ems.config.prediction_hours, 1.0)
|
ev_charge_hours = np.full(ems.config.prediction.hours, 1.0)
|
||||||
ems.set_ev_charge_hours(ev_charge_hours)
|
ems.set_ev_charge_hours(ev_charge_hours)
|
||||||
assert np.array_equal(
|
assert np.array_equal(
|
||||||
ems.ev_charge_hours, ev_charge_hours
|
ems.ev_charge_hours, ev_charge_hours
|
||||||
|
@@ -49,7 +49,9 @@ def test_optimize(
|
|||||||
):
|
):
|
||||||
"""Test optimierung_ems."""
|
"""Test optimierung_ems."""
|
||||||
# Assure configuration holds the correct values
|
# Assure configuration holds the correct values
|
||||||
config_eos.merge_settings_from_dict({"prediction_hours": 48, "optimization_hours": 48})
|
config_eos.merge_settings_from_dict(
|
||||||
|
{"prediction": {"hours": 48}, "optimization": {"hours": 48}}
|
||||||
|
)
|
||||||
|
|
||||||
# Load input and output data
|
# Load input and output data
|
||||||
file = DIR_TESTDATA / fn_in
|
file = DIR_TESTDATA / fn_in
|
||||||
|
@@ -3,8 +3,9 @@ from pathlib import Path
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from pydantic import ValidationError
|
||||||
|
|
||||||
from akkudoktoreos.config.config import ConfigEOS
|
from akkudoktoreos.config.config import ConfigEOS, GeneralSettings
|
||||||
from akkudoktoreos.core.logging import get_logger
|
from akkudoktoreos.core.logging import get_logger
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@@ -38,22 +39,26 @@ def test_config_constants(config_eos):
|
|||||||
|
|
||||||
def test_computed_paths(config_eos):
|
def test_computed_paths(config_eos):
|
||||||
"""Test computed paths for output and cache."""
|
"""Test computed paths for output and cache."""
|
||||||
config_eos.merge_settings_from_dict(
|
# Don't actually try to create the data folder
|
||||||
{
|
with patch("pathlib.Path.mkdir"):
|
||||||
"data_folder_path": "/base/data",
|
config_eos.merge_settings_from_dict(
|
||||||
"data_output_subpath": "output",
|
{
|
||||||
"data_cache_subpath": "cache",
|
"general": {
|
||||||
}
|
"data_folder_path": "/base/data",
|
||||||
)
|
"data_output_subpath": "extra/output",
|
||||||
assert config_eos.data_output_path == Path("/base/data/output")
|
"data_cache_subpath": "somewhere/cache",
|
||||||
assert config_eos.data_cache_path == Path("/base/data/cache")
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
assert config_eos.general.data_output_path == Path("/base/data/extra/output")
|
||||||
|
assert config_eos.general.data_cache_path == Path("/base/data/somewhere/cache")
|
||||||
# reset settings so the config_eos fixture can verify the default paths
|
# reset settings so the config_eos fixture can verify the default paths
|
||||||
config_eos.reset_settings()
|
config_eos.reset_settings()
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_behavior(config_eos, config_default_dirs):
|
def test_singleton_behavior(config_eos, config_default_dirs):
|
||||||
"""Test that ConfigEOS behaves as a singleton."""
|
"""Test that ConfigEOS behaves as a singleton."""
|
||||||
initial_cfg_file = config_eos.config_file_path
|
initial_cfg_file = config_eos.general.config_file_path
|
||||||
with patch(
|
with patch(
|
||||||
"akkudoktoreos.config.config.user_config_dir", return_value=str(config_default_dirs[0])
|
"akkudoktoreos.config.config.user_config_dir", return_value=str(config_default_dirs[0])
|
||||||
):
|
):
|
||||||
@@ -61,7 +66,7 @@ def test_singleton_behavior(config_eos, config_default_dirs):
|
|||||||
instance2 = ConfigEOS()
|
instance2 = ConfigEOS()
|
||||||
assert instance1 is config_eos
|
assert instance1 is config_eos
|
||||||
assert instance1 is instance2
|
assert instance1 is instance2
|
||||||
assert instance1.config_file_path == initial_cfg_file
|
assert instance1.general.config_file_path == initial_cfg_file
|
||||||
|
|
||||||
|
|
||||||
def test_default_config_path(config_eos, config_default_dirs):
|
def test_default_config_path(config_eos, config_default_dirs):
|
||||||
@@ -82,13 +87,13 @@ def test_config_file_priority(config_default_dirs):
|
|||||||
config_file = Path(config_default_dir_cwd) / ConfigEOS.CONFIG_FILE_NAME
|
config_file = Path(config_default_dir_cwd) / ConfigEOS.CONFIG_FILE_NAME
|
||||||
config_file.write_text("{}")
|
config_file.write_text("{}")
|
||||||
config_eos = get_config()
|
config_eos = get_config()
|
||||||
assert config_eos.config_file_path == config_file
|
assert config_eos.general.config_file_path == config_file
|
||||||
|
|
||||||
config_file = Path(config_default_dir_user) / ConfigEOS.CONFIG_FILE_NAME
|
config_file = Path(config_default_dir_user) / ConfigEOS.CONFIG_FILE_NAME
|
||||||
config_file.parent.mkdir()
|
config_file.parent.mkdir()
|
||||||
config_file.write_text("{}")
|
config_file.write_text("{}")
|
||||||
config_eos = get_config()
|
config_eos.update()
|
||||||
assert config_eos.config_file_path == config_file
|
assert config_eos.general.config_file_path == config_file
|
||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.config.config.user_config_dir")
|
@patch("akkudoktoreos.config.config.user_config_dir")
|
||||||
@@ -141,5 +146,69 @@ def test_config_copy(config_eos, monkeypatch):
|
|||||||
assert not temp_config_file_path.exists()
|
assert not temp_config_file_path.exists()
|
||||||
with patch("akkudoktoreos.config.config.user_config_dir", return_value=temp_dir):
|
with patch("akkudoktoreos.config.config.user_config_dir", return_value=temp_dir):
|
||||||
assert config_eos._get_config_file_path() == (temp_config_file_path, False)
|
assert config_eos._get_config_file_path() == (temp_config_file_path, False)
|
||||||
config_eos.from_config_file()
|
config_eos.update()
|
||||||
assert temp_config_file_path.exists()
|
assert temp_config_file_path.exists()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"latitude, longitude, expected_timezone",
|
||||||
|
[
|
||||||
|
(40.7128, -74.0060, "America/New_York"), # Valid latitude/longitude
|
||||||
|
(None, None, None), # No location
|
||||||
|
(51.5074, -0.1278, "Europe/London"), # Another valid location
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_config_common_settings_valid(latitude, longitude, expected_timezone):
|
||||||
|
"""Test valid settings for GeneralSettings."""
|
||||||
|
general_settings = GeneralSettings(
|
||||||
|
latitude=latitude,
|
||||||
|
longitude=longitude,
|
||||||
|
)
|
||||||
|
assert general_settings.latitude == latitude
|
||||||
|
assert general_settings.longitude == longitude
|
||||||
|
assert general_settings.timezone == expected_timezone
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"field_name, invalid_value, expected_error",
|
||||||
|
[
|
||||||
|
("latitude", -91.0, "Input should be greater than or equal to -90"),
|
||||||
|
("latitude", 91.0, "Input should be less than or equal to 90"),
|
||||||
|
("longitude", -181.0, "Input should be greater than or equal to -180"),
|
||||||
|
("longitude", 181.0, "Input should be less than or equal to 180"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_config_common_settings_invalid(field_name, invalid_value, expected_error):
|
||||||
|
"""Test invalid settings for PredictionCommonSettings."""
|
||||||
|
valid_data = {
|
||||||
|
"latitude": 40.7128,
|
||||||
|
"longitude": -74.0060,
|
||||||
|
}
|
||||||
|
assert GeneralSettings(**valid_data) is not None
|
||||||
|
valid_data[field_name] = invalid_value
|
||||||
|
|
||||||
|
with pytest.raises(ValidationError, match=expected_error):
|
||||||
|
GeneralSettings(**valid_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_common_settings_no_location():
|
||||||
|
"""Test that timezone is None when latitude and longitude are not provided."""
|
||||||
|
settings = GeneralSettings(latitude=None, longitude=None)
|
||||||
|
assert settings.timezone is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_common_settings_with_location():
|
||||||
|
"""Test that timezone is correctly computed when latitude and longitude are provided."""
|
||||||
|
settings = GeneralSettings(latitude=34.0522, longitude=-118.2437)
|
||||||
|
assert settings.timezone == "America/Los_Angeles"
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_common_settings_timezone_none_when_coordinates_missing():
|
||||||
|
"""Test that timezone is None when latitude or longitude is missing."""
|
||||||
|
config_no_latitude = GeneralSettings(latitude=None, longitude=-74.0060)
|
||||||
|
config_no_longitude = GeneralSettings(latitude=40.7128, longitude=None)
|
||||||
|
config_no_coords = GeneralSettings(latitude=None, longitude=None)
|
||||||
|
|
||||||
|
assert config_no_latitude.timezone is None
|
||||||
|
assert config_no_longitude.timezone is None
|
||||||
|
assert config_no_coords.timezone is None
|
||||||
|
@@ -535,7 +535,7 @@ class TestDataSequence:
|
|||||||
json_str = sequence.to_json()
|
json_str = sequence.to_json()
|
||||||
assert isinstance(json_str, str)
|
assert isinstance(json_str, str)
|
||||||
assert "2023-11-06" in json_str
|
assert "2023-11-06" in json_str
|
||||||
assert ":0.8" in json_str
|
assert ": 0.8" in json_str
|
||||||
|
|
||||||
def test_from_json(self, sequence, sequence2):
|
def test_from_json(self, sequence, sequence2):
|
||||||
json_str = sequence2.to_json()
|
json_str = sequence2.to_json()
|
||||||
|
@@ -86,7 +86,7 @@ def test_config_md_current(config_eos):
|
|||||||
sys.path.insert(0, str(root_dir))
|
sys.path.insert(0, str(root_dir))
|
||||||
from scripts import generate_config_md
|
from scripts import generate_config_md
|
||||||
|
|
||||||
config_md = generate_config_md.generate_config_md()
|
config_md = generate_config_md.generate_config_md(config_eos)
|
||||||
|
|
||||||
with open(new_config_md_path, "w", encoding="utf8") as f_new:
|
with open(new_config_md_path, "w", encoding="utf8") as f_new:
|
||||||
f_new.write(config_md)
|
f_new.write(config_md)
|
||||||
|
@@ -23,9 +23,10 @@ FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON = DIR_TESTDATA.joinpath(
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def elecprice_provider(monkeypatch):
|
def provider(monkeypatch, config_eos):
|
||||||
"""Fixture to create a ElecPriceProvider instance."""
|
"""Fixture to create a ElecPriceProvider instance."""
|
||||||
monkeypatch.setenv("elecprice_provider", "ElecPriceAkkudoktor")
|
monkeypatch.setenv("EOS_ELECPRICE__ELECPRICE_PROVIDER", "ElecPriceAkkudoktor")
|
||||||
|
config_eos.reset_settings()
|
||||||
return ElecPriceAkkudoktor()
|
return ElecPriceAkkudoktor()
|
||||||
|
|
||||||
|
|
||||||
@@ -48,17 +49,17 @@ def cache_store():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(elecprice_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that ElecPriceForecast behaves as a singleton."""
|
"""Test that ElecPriceForecast behaves as a singleton."""
|
||||||
another_instance = ElecPriceAkkudoktor()
|
another_instance = ElecPriceAkkudoktor()
|
||||||
assert elecprice_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(elecprice_provider, monkeypatch):
|
def test_invalid_provider(provider, monkeypatch):
|
||||||
"""Test requesting an unsupported elecprice_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
monkeypatch.setenv("elecprice_provider", "<invalid>")
|
monkeypatch.setenv("EOS_ELECPRICE__ELECPRICE_PROVIDER", "<invalid>")
|
||||||
elecprice_provider.config.update()
|
provider.config.reset_settings()
|
||||||
assert elecprice_provider.enabled() == False
|
assert not provider.enabled()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@@ -67,16 +68,16 @@ def test_invalid_provider(elecprice_provider, monkeypatch):
|
|||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.prediction.elecpriceakkudoktor.logger.error")
|
@patch("akkudoktoreos.prediction.elecpriceakkudoktor.logger.error")
|
||||||
def test_validate_data_invalid_format(mock_logger, elecprice_provider):
|
def test_validate_data_invalid_format(mock_logger, provider):
|
||||||
"""Test validation for invalid Akkudoktor data."""
|
"""Test validation for invalid Akkudoktor data."""
|
||||||
invalid_data = '{"invalid": "data"}'
|
invalid_data = '{"invalid": "data"}'
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
elecprice_provider._validate_data(invalid_data)
|
provider._validate_data(invalid_data)
|
||||||
mock_logger.assert_called_once_with(mock_logger.call_args[0][0])
|
mock_logger.assert_called_once_with(mock_logger.call_args[0][0])
|
||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_request_forecast(mock_get, elecprice_provider, sample_akkudoktor_1_json):
|
def test_request_forecast(mock_get, provider, sample_akkudoktor_1_json):
|
||||||
"""Test requesting forecast from Akkudoktor."""
|
"""Test requesting forecast from Akkudoktor."""
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@@ -85,10 +86,10 @@ def test_request_forecast(mock_get, elecprice_provider, sample_akkudoktor_1_json
|
|||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
# Preset, as this is usually done by update()
|
# Preset, as this is usually done by update()
|
||||||
elecprice_provider.config.update()
|
provider.config.update()
|
||||||
|
|
||||||
# Test function
|
# Test function
|
||||||
akkudoktor_data = elecprice_provider._request_forecast()
|
akkudoktor_data = provider._request_forecast()
|
||||||
|
|
||||||
assert isinstance(akkudoktor_data, AkkudoktorElecPrice)
|
assert isinstance(akkudoktor_data, AkkudoktorElecPrice)
|
||||||
assert akkudoktor_data.values[0] == AkkudoktorElecPriceValue(
|
assert akkudoktor_data.values[0] == AkkudoktorElecPriceValue(
|
||||||
@@ -103,7 +104,7 @@ def test_request_forecast(mock_get, elecprice_provider, sample_akkudoktor_1_json
|
|||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_update_data(mock_get, elecprice_provider, sample_akkudoktor_1_json, cache_store):
|
def test_update_data(mock_get, provider, sample_akkudoktor_1_json, cache_store):
|
||||||
"""Test fetching forecast from Akkudoktor."""
|
"""Test fetching forecast from Akkudoktor."""
|
||||||
# Mock response object
|
# Mock response object
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@@ -116,28 +117,28 @@ def test_update_data(mock_get, elecprice_provider, sample_akkudoktor_1_json, cac
|
|||||||
# Call the method
|
# Call the method
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime("2024-12-11 00:00:00", in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime("2024-12-11 00:00:00", in_timezone="Europe/Berlin"))
|
||||||
elecprice_provider.update_data(force_enable=True, force_update=True)
|
provider.update_data(force_enable=True, force_update=True)
|
||||||
|
|
||||||
# Assert: Verify the result is as expected
|
# Assert: Verify the result is as expected
|
||||||
mock_get.assert_called_once()
|
mock_get.assert_called_once()
|
||||||
assert (
|
assert (
|
||||||
len(elecprice_provider) == 73
|
len(provider) == 73
|
||||||
) # we have 48 datasets in the api response, we want to know 48h into the future. The data we get has already 23h into the future so we need only 25h more. 48+25=73
|
) # we have 48 datasets in the api response, we want to know 48h into the future. The data we get has already 23h into the future so we need only 25h more. 48+25=73
|
||||||
|
|
||||||
# Assert we get prediction_hours prioce values by resampling
|
# Assert we get hours prioce values by resampling
|
||||||
np_price_array = elecprice_provider.key_to_array(
|
np_price_array = provider.key_to_array(
|
||||||
key="elecprice_marketprice_wh",
|
key="elecprice_marketprice_wh",
|
||||||
start_datetime=elecprice_provider.start_datetime,
|
start_datetime=provider.start_datetime,
|
||||||
end_datetime=elecprice_provider.end_datetime,
|
end_datetime=provider.end_datetime,
|
||||||
)
|
)
|
||||||
assert len(np_price_array) == elecprice_provider.total_hours
|
assert len(np_price_array) == provider.total_hours
|
||||||
|
|
||||||
# with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_2_JSON, "w") as f_out:
|
# with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_2_JSON, "w") as f_out:
|
||||||
# f_out.write(elecprice_provider.to_json())
|
# f_out.write(provider.to_json())
|
||||||
|
|
||||||
|
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_update_data_with_incomplete_forecast(mock_get, elecprice_provider):
|
def test_update_data_with_incomplete_forecast(mock_get, provider):
|
||||||
"""Test `_update_data` with incomplete or missing forecast data."""
|
"""Test `_update_data` with incomplete or missing forecast data."""
|
||||||
incomplete_data: dict = {"meta": {}, "values": []}
|
incomplete_data: dict = {"meta": {}, "values": []}
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@@ -145,7 +146,7 @@ def test_update_data_with_incomplete_forecast(mock_get, elecprice_provider):
|
|||||||
mock_response.content = json.dumps(incomplete_data)
|
mock_response.content = json.dumps(incomplete_data)
|
||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
elecprice_provider._update_data(force_update=True)
|
provider._update_data(force_update=True)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -154,7 +155,7 @@ def test_update_data_with_incomplete_forecast(mock_get, elecprice_provider):
|
|||||||
)
|
)
|
||||||
@patch("requests.get")
|
@patch("requests.get")
|
||||||
def test_request_forecast_status_codes(
|
def test_request_forecast_status_codes(
|
||||||
mock_get, elecprice_provider, sample_akkudoktor_1_json, status_code, exception
|
mock_get, provider, sample_akkudoktor_1_json, status_code, exception
|
||||||
):
|
):
|
||||||
"""Test handling of various API status codes."""
|
"""Test handling of various API status codes."""
|
||||||
mock_response = Mock()
|
mock_response = Mock()
|
||||||
@@ -166,31 +167,31 @@ def test_request_forecast_status_codes(
|
|||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
if exception:
|
if exception:
|
||||||
with pytest.raises(exception):
|
with pytest.raises(exception):
|
||||||
elecprice_provider._request_forecast()
|
provider._request_forecast()
|
||||||
else:
|
else:
|
||||||
elecprice_provider._request_forecast()
|
provider._request_forecast()
|
||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.utils.cacheutil.CacheFileStore")
|
@patch("akkudoktoreos.utils.cacheutil.CacheFileStore")
|
||||||
def test_cache_integration(mock_cache, elecprice_provider):
|
def test_cache_integration(mock_cache, provider):
|
||||||
"""Test caching of 8-day electricity price data."""
|
"""Test caching of 8-day electricity price data."""
|
||||||
mock_cache_instance = mock_cache.return_value
|
mock_cache_instance = mock_cache.return_value
|
||||||
mock_cache_instance.get.return_value = None # Simulate no cache
|
mock_cache_instance.get.return_value = None # Simulate no cache
|
||||||
elecprice_provider._update_data(force_update=True)
|
provider._update_data(force_update=True)
|
||||||
mock_cache_instance.create.assert_called_once()
|
mock_cache_instance.create.assert_called_once()
|
||||||
mock_cache_instance.get.assert_called_once()
|
mock_cache_instance.get.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
def test_key_to_array_resampling(elecprice_provider):
|
def test_key_to_array_resampling(provider):
|
||||||
"""Test resampling of forecast data to NumPy array."""
|
"""Test resampling of forecast data to NumPy array."""
|
||||||
elecprice_provider.update_data(force_update=True)
|
provider.update_data(force_update=True)
|
||||||
array = elecprice_provider.key_to_array(
|
array = provider.key_to_array(
|
||||||
key="elecprice_marketprice_wh",
|
key="elecprice_marketprice_wh",
|
||||||
start_datetime=elecprice_provider.start_datetime,
|
start_datetime=provider.start_datetime,
|
||||||
end_datetime=elecprice_provider.end_datetime,
|
end_datetime=provider.end_datetime,
|
||||||
)
|
)
|
||||||
assert isinstance(array, np.ndarray)
|
assert isinstance(array, np.ndarray)
|
||||||
assert len(array) == elecprice_provider.total_hours
|
assert len(array) == provider.total_hours
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@@ -199,12 +200,12 @@ def test_key_to_array_resampling(elecprice_provider):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="For development only")
|
@pytest.mark.skip(reason="For development only")
|
||||||
def test_akkudoktor_development_forecast_data(elecprice_provider):
|
def test_akkudoktor_development_forecast_data(provider):
|
||||||
"""Fetch data from real Akkudoktor server."""
|
"""Fetch data from real Akkudoktor server."""
|
||||||
# Preset, as this is usually done by update_data()
|
# Preset, as this is usually done by update_data()
|
||||||
elecprice_provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
provider.start_datetime = to_datetime("2024-10-26 00:00:00")
|
||||||
|
|
||||||
akkudoktor_data = elecprice_provider._request_forecast()
|
akkudoktor_data = provider._request_forecast()
|
||||||
|
|
||||||
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "w") as f_out:
|
with open(FILE_TESTDATA_ELECPRICEAKKUDOKTOR_1_JSON, "w") as f_out:
|
||||||
json.dump(akkudoktor_data, f_out, indent=4)
|
json.dump(akkudoktor_data, f_out, indent=4)
|
||||||
|
@@ -13,12 +13,16 @@ FILE_TESTDATA_ELECPRICEIMPORT_1_JSON = DIR_TESTDATA.joinpath("import_input_1.jso
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def elecprice_provider(sample_import_1_json, config_eos):
|
def provider(sample_import_1_json, config_eos):
|
||||||
"""Fixture to create a ElecPriceProvider instance."""
|
"""Fixture to create a ElecPriceProvider instance."""
|
||||||
settings = {
|
settings = {
|
||||||
"elecprice_provider": "ElecPriceImport",
|
"elecprice": {
|
||||||
"elecpriceimport_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
"provider": "ElecPriceImport",
|
||||||
"elecpriceimport_json": json.dumps(sample_import_1_json),
|
"provider_settings": {
|
||||||
|
"import_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
||||||
|
"import_json": json.dumps(sample_import_1_json),
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
provider = ElecPriceImport()
|
provider = ElecPriceImport()
|
||||||
@@ -39,20 +43,24 @@ def sample_import_1_json():
|
|||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def test_singleton_instance(elecprice_provider):
|
def test_singleton_instance(provider):
|
||||||
"""Test that ElecPriceForecast behaves as a singleton."""
|
"""Test that ElecPriceForecast behaves as a singleton."""
|
||||||
another_instance = ElecPriceImport()
|
another_instance = ElecPriceImport()
|
||||||
assert elecprice_provider is another_instance
|
assert provider is another_instance
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_provider(elecprice_provider, config_eos):
|
def test_invalid_provider(provider, config_eos):
|
||||||
"""Test requesting an unsupported elecprice_provider."""
|
"""Test requesting an unsupported provider."""
|
||||||
settings = {
|
settings = {
|
||||||
"elecprice_provider": "<invalid>",
|
"elecprice": {
|
||||||
"elecpriceimport_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
"provider": "<invalid>",
|
||||||
|
"provider_settings": {
|
||||||
|
"import_file_path": str(FILE_TESTDATA_ELECPRICEIMPORT_1_JSON),
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
assert not elecprice_provider.enabled()
|
assert not provider.enabled()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------
|
# ------------------------------------------------
|
||||||
@@ -73,35 +81,33 @@ def test_invalid_provider(elecprice_provider, config_eos):
|
|||||||
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
("2024-10-27 00:00:00", False), # DST change in Germany (25 hours/ day)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_import(elecprice_provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
def test_import(provider, sample_import_1_json, start_datetime, from_file, config_eos):
|
||||||
"""Test fetching forecast from Import."""
|
"""Test fetching forecast from Import."""
|
||||||
ems_eos = get_ems()
|
ems_eos = get_ems()
|
||||||
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
ems_eos.set_start_datetime(to_datetime(start_datetime, in_timezone="Europe/Berlin"))
|
||||||
if from_file:
|
if from_file:
|
||||||
config_eos.elecpriceimport_json = None
|
config_eos.elecprice.provider_settings.import_json = None
|
||||||
assert config_eos.elecpriceimport_json is None
|
assert config_eos.elecprice.provider_settings.import_json is None
|
||||||
else:
|
else:
|
||||||
config_eos.elecpriceimport_file_path = None
|
config_eos.elecprice.provider_settings.import_file_path = None
|
||||||
assert config_eos.elecpriceimport_file_path is None
|
assert config_eos.elecprice.provider_settings.import_file_path is None
|
||||||
elecprice_provider.clear()
|
provider.clear()
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
elecprice_provider.update_data()
|
provider.update_data()
|
||||||
|
|
||||||
# Assert: Verify the result is as expected
|
# Assert: Verify the result is as expected
|
||||||
assert elecprice_provider.start_datetime is not None
|
assert provider.start_datetime is not None
|
||||||
assert elecprice_provider.total_hours is not None
|
assert provider.total_hours is not None
|
||||||
assert compare_datetimes(elecprice_provider.start_datetime, ems_eos.start_datetime).equal
|
assert compare_datetimes(provider.start_datetime, ems_eos.start_datetime).equal
|
||||||
values = sample_import_1_json["elecprice_marketprice_wh"]
|
values = sample_import_1_json["elecprice_marketprice_wh"]
|
||||||
value_datetime_mapping = elecprice_provider.import_datetimes(
|
value_datetime_mapping = provider.import_datetimes(ems_eos.start_datetime, len(values))
|
||||||
ems_eos.start_datetime, len(values)
|
|
||||||
)
|
|
||||||
for i, mapping in enumerate(value_datetime_mapping):
|
for i, mapping in enumerate(value_datetime_mapping):
|
||||||
assert i < len(elecprice_provider.records)
|
assert i < len(provider.records)
|
||||||
expected_datetime, expected_value_index = mapping
|
expected_datetime, expected_value_index = mapping
|
||||||
expected_value = values[expected_value_index]
|
expected_value = values[expected_value_index]
|
||||||
result_datetime = elecprice_provider.records[i].date_time
|
result_datetime = provider.records[i].date_time
|
||||||
result_value = elecprice_provider.records[i]["elecprice_marketprice_wh"]
|
result_value = provider.records[i]["elecprice_marketprice_wh"]
|
||||||
|
|
||||||
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
# print(f"{i}: Expected: {expected_datetime}:{expected_value}")
|
||||||
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
# print(f"{i}: Result: {result_datetime}:{result_value}")
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
from unittest.mock import Mock
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -6,22 +6,31 @@ from akkudoktoreos.devices.inverter import Inverter, InverterParameters
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_battery():
|
def mock_battery() -> Mock:
|
||||||
mock_battery = Mock()
|
mock_battery = Mock()
|
||||||
mock_battery.charge_energy = Mock(return_value=(0.0, 0.0))
|
mock_battery.charge_energy = Mock(return_value=(0.0, 0.0))
|
||||||
mock_battery.discharge_energy = Mock(return_value=(0.0, 0.0))
|
mock_battery.discharge_energy = Mock(return_value=(0.0, 0.0))
|
||||||
|
mock_battery.device_id = "battery1"
|
||||||
return mock_battery
|
return mock_battery
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def inverter(mock_battery):
|
def inverter(mock_battery, devices_eos) -> Inverter:
|
||||||
|
devices_eos.add_device(mock_battery)
|
||||||
mock_self_consumption_predictor = Mock()
|
mock_self_consumption_predictor = Mock()
|
||||||
mock_self_consumption_predictor.calculate_self_consumption.return_value = 1.0
|
mock_self_consumption_predictor.calculate_self_consumption.return_value = 1.0
|
||||||
return Inverter(
|
with patch(
|
||||||
mock_self_consumption_predictor,
|
"akkudoktoreos.devices.inverter.get_eos_load_interpolator",
|
||||||
InverterParameters(max_power_wh=500.0),
|
return_value=mock_self_consumption_predictor,
|
||||||
battery=mock_battery,
|
):
|
||||||
)
|
iv = Inverter(
|
||||||
|
InverterParameters(
|
||||||
|
device_id="iv1", max_power_wh=500.0, battery_id=mock_battery.device_id
|
||||||
|
),
|
||||||
|
)
|
||||||
|
devices_eos.add_device(iv)
|
||||||
|
devices_eos.post_setup()
|
||||||
|
return iv
|
||||||
|
|
||||||
|
|
||||||
def test_process_energy_excess_generation(inverter, mock_battery):
|
def test_process_energy_excess_generation(inverter, mock_battery):
|
||||||
|
@@ -14,12 +14,16 @@ from akkudoktoreos.utils.datetimeutil import compare_datetimes, to_datetime, to_
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def load_provider(config_eos):
|
def provider(config_eos):
|
||||||
"""Fixture to initialise the LoadAkkudoktor instance."""
|
"""Fixture to initialise the LoadAkkudoktor instance."""
|
||||||
settings = {
|
settings = {
|
||||||
"load_provider": "LoadAkkudoktor",
|
"load": {
|
||||||
"load_name": "Akkudoktor Profile",
|
"provider": "LoadAkkudoktor",
|
||||||
"loadakkudoktor_year_energy": "1000",
|
"provider_settings": {
|
||||||
|
"load_name": "Akkudoktor Profile",
|
||||||
|
"loadakkudoktor_year_energy": "1000",
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
config_eos.merge_settings_from_dict(settings)
|
config_eos.merge_settings_from_dict(settings)
|
||||||
return LoadAkkudoktor()
|
return LoadAkkudoktor()
|
||||||
@@ -37,8 +41,8 @@ def measurement_eos():
|
|||||||
measurement.records.append(
|
measurement.records.append(
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=dt,
|
date_time=dt,
|
||||||
measurement_load0_mr=load0_mr,
|
load0_mr=load0_mr,
|
||||||
measurement_load1_mr=load1_mr,
|
load1_mr=load1_mr,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
dt += interval
|
dt += interval
|
||||||
@@ -72,13 +76,13 @@ def test_loadakkudoktor_settings_validator():
|
|||||||
assert settings.loadakkudoktor_year_energy == 1234.56
|
assert settings.loadakkudoktor_year_energy == 1234.56
|
||||||
|
|
||||||
|
|
||||||
def test_loadakkudoktor_provider_id(load_provider):
|
def test_loadakkudoktor_provider_id(provider):
|
||||||
"""Test the `provider_id` class method."""
|
"""Test the `provider_id` class method."""
|
||||||
assert load_provider.provider_id() == "LoadAkkudoktor"
|
assert provider.provider_id() == "LoadAkkudoktor"
|
||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.prediction.loadakkudoktor.np.load")
|
@patch("akkudoktoreos.prediction.loadakkudoktor.np.load")
|
||||||
def test_load_data_from_mock(mock_np_load, mock_load_profiles_file, load_provider):
|
def test_load_data_from_mock(mock_np_load, mock_load_profiles_file, provider):
|
||||||
"""Test the `load_data` method."""
|
"""Test the `load_data` method."""
|
||||||
# Mock numpy load to return data similar to what would be in the file
|
# Mock numpy load to return data similar to what would be in the file
|
||||||
mock_np_load.return_value = {
|
mock_np_load.return_value = {
|
||||||
@@ -87,19 +91,19 @@ def test_load_data_from_mock(mock_np_load, mock_load_profiles_file, load_provide
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Test data loading
|
# Test data loading
|
||||||
data_year_energy = load_provider.load_data()
|
data_year_energy = provider.load_data()
|
||||||
assert data_year_energy is not None
|
assert data_year_energy is not None
|
||||||
assert data_year_energy.shape == (365, 2, 24)
|
assert data_year_energy.shape == (365, 2, 24)
|
||||||
|
|
||||||
|
|
||||||
def test_load_data_from_file(load_provider):
|
def test_load_data_from_file(provider):
|
||||||
"""Test `load_data` loads data from the profiles file."""
|
"""Test `load_data` loads data from the profiles file."""
|
||||||
data_year_energy = load_provider.load_data()
|
data_year_energy = provider.load_data()
|
||||||
assert data_year_energy is not None
|
assert data_year_energy is not None
|
||||||
|
|
||||||
|
|
||||||
@patch("akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor.load_data")
|
@patch("akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor.load_data")
|
||||||
def test_update_data(mock_load_data, load_provider):
|
def test_update_data(mock_load_data, provider):
|
||||||
"""Test the `_update` method."""
|
"""Test the `_update` method."""
|
||||||
mock_load_data.return_value = np.random.rand(365, 2, 24)
|
mock_load_data.return_value = np.random.rand(365, 2, 24)
|
||||||
|
|
||||||
@@ -108,27 +112,27 @@ def test_update_data(mock_load_data, load_provider):
|
|||||||
ems_eos.set_start_datetime(pendulum.datetime(2024, 1, 1))
|
ems_eos.set_start_datetime(pendulum.datetime(2024, 1, 1))
|
||||||
|
|
||||||
# Assure there are no prediction records
|
# Assure there are no prediction records
|
||||||
load_provider.clear()
|
provider.clear()
|
||||||
assert len(load_provider) == 0
|
assert len(provider) == 0
|
||||||
|
|
||||||
# Execute the method
|
# Execute the method
|
||||||
load_provider._update_data()
|
provider._update_data()
|
||||||
|
|
||||||
# Validate that update_value is called
|
# Validate that update_value is called
|
||||||
assert len(load_provider) > 0
|
assert len(provider) > 0
|
||||||
|
|
||||||
|
|
||||||
def test_calculate_adjustment(load_provider, measurement_eos):
|
def test_calculate_adjustment(provider, measurement_eos):
|
||||||
"""Test `_calculate_adjustment` for various scenarios."""
|
"""Test `_calculate_adjustment` for various scenarios."""
|
||||||
data_year_energy = np.random.rand(365, 2, 24)
|
data_year_energy = np.random.rand(365, 2, 24)
|
||||||
|
|
||||||
# Call the method and validate results
|
# Call the method and validate results
|
||||||
weekday_adjust, weekend_adjust = load_provider._calculate_adjustment(data_year_energy)
|
weekday_adjust, weekend_adjust = provider._calculate_adjustment(data_year_energy)
|
||||||
assert weekday_adjust.shape == (24,)
|
assert weekday_adjust.shape == (24,)
|
||||||
assert weekend_adjust.shape == (24,)
|
assert weekend_adjust.shape == (24,)
|
||||||
|
|
||||||
data_year_energy = np.zeros((365, 2, 24))
|
data_year_energy = np.zeros((365, 2, 24))
|
||||||
weekday_adjust, weekend_adjust = load_provider._calculate_adjustment(data_year_energy)
|
weekday_adjust, weekend_adjust = provider._calculate_adjustment(data_year_energy)
|
||||||
|
|
||||||
assert weekday_adjust.shape == (24,)
|
assert weekday_adjust.shape == (24,)
|
||||||
expected = np.array(
|
expected = np.array(
|
||||||
@@ -193,7 +197,7 @@ def test_calculate_adjustment(load_provider, measurement_eos):
|
|||||||
np.testing.assert_array_equal(weekend_adjust, expected)
|
np.testing.assert_array_equal(weekend_adjust, expected)
|
||||||
|
|
||||||
|
|
||||||
def test_load_provider_adjustments_with_mock_data(load_provider):
|
def test_provider_adjustments_with_mock_data(provider):
|
||||||
"""Test full integration of adjustments with mock data."""
|
"""Test full integration of adjustments with mock data."""
|
||||||
with patch(
|
with patch(
|
||||||
"akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor._calculate_adjustment"
|
"akkudoktoreos.prediction.loadakkudoktor.LoadAkkudoktor._calculate_adjustment"
|
||||||
@@ -201,5 +205,5 @@ def test_load_provider_adjustments_with_mock_data(load_provider):
|
|||||||
mock_adjust.return_value = (np.zeros(24), np.zeros(24))
|
mock_adjust.return_value = (np.zeros(24), np.zeros(24))
|
||||||
|
|
||||||
# Test execution
|
# Test execution
|
||||||
load_provider._update_data()
|
provider._update_data()
|
||||||
assert mock_adjust.called
|
assert mock_adjust.called
|
||||||
|
@@ -3,7 +3,11 @@ import pytest
|
|||||||
from pendulum import datetime, duration
|
from pendulum import datetime, duration
|
||||||
|
|
||||||
from akkudoktoreos.config.config import SettingsEOS
|
from akkudoktoreos.config.config import SettingsEOS
|
||||||
from akkudoktoreos.measurement.measurement import MeasurementDataRecord, get_measurement
|
from akkudoktoreos.measurement.measurement import (
|
||||||
|
MeasurementCommonSettings,
|
||||||
|
MeasurementDataRecord,
|
||||||
|
get_measurement,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@@ -13,33 +17,33 @@ def measurement_eos():
|
|||||||
measurement.records = [
|
measurement.records = [
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=0),
|
date_time=datetime(2023, 1, 1, hour=0),
|
||||||
measurement_load0_mr=100,
|
load0_mr=100,
|
||||||
measurement_load1_mr=200,
|
load1_mr=200,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=1),
|
date_time=datetime(2023, 1, 1, hour=1),
|
||||||
measurement_load0_mr=150,
|
load0_mr=150,
|
||||||
measurement_load1_mr=250,
|
load1_mr=250,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=2),
|
date_time=datetime(2023, 1, 1, hour=2),
|
||||||
measurement_load0_mr=200,
|
load0_mr=200,
|
||||||
measurement_load1_mr=300,
|
load1_mr=300,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=3),
|
date_time=datetime(2023, 1, 1, hour=3),
|
||||||
measurement_load0_mr=250,
|
load0_mr=250,
|
||||||
measurement_load1_mr=350,
|
load1_mr=350,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=4),
|
date_time=datetime(2023, 1, 1, hour=4),
|
||||||
measurement_load0_mr=300,
|
load0_mr=300,
|
||||||
measurement_load1_mr=400,
|
load1_mr=400,
|
||||||
),
|
),
|
||||||
MeasurementDataRecord(
|
MeasurementDataRecord(
|
||||||
date_time=datetime(2023, 1, 1, hour=5),
|
date_time=datetime(2023, 1, 1, hour=5),
|
||||||
measurement_load0_mr=350,
|
load0_mr=350,
|
||||||
measurement_load1_mr=450,
|
load1_mr=450,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
return measurement
|
return measurement
|
||||||
@@ -75,7 +79,7 @@ def test_interval_count_invalid_non_positive_interval(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_valid_input(measurement_eos):
|
def test_energy_from_meter_readings_valid_input(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with valid inputs and proper alignment of load data."""
|
"""Test _energy_from_meter_readings with valid inputs and proper alignment of load data."""
|
||||||
key = "measurement_load0_mr"
|
key = "load0_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@@ -90,7 +94,7 @@ def test_energy_from_meter_readings_valid_input(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_empty_array(measurement_eos):
|
def test_energy_from_meter_readings_empty_array(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with no data (empty array)."""
|
"""Test _energy_from_meter_readings with no data (empty array)."""
|
||||||
key = "measurement_load0_mr"
|
key = "load0_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@@ -112,7 +116,7 @@ def test_energy_from_meter_readings_empty_array(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_misaligned_array(measurement_eos):
|
def test_energy_from_meter_readings_misaligned_array(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with misaligned array size."""
|
"""Test _energy_from_meter_readings with misaligned array size."""
|
||||||
key = "measurement_load1_mr"
|
key = "load1_mr"
|
||||||
start_datetime = measurement_eos.min_datetime
|
start_datetime = measurement_eos.min_datetime
|
||||||
end_datetime = measurement_eos.max_datetime
|
end_datetime = measurement_eos.max_datetime
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@@ -130,7 +134,7 @@ def test_energy_from_meter_readings_misaligned_array(measurement_eos):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_partial_data(measurement_eos, caplog):
|
def test_energy_from_meter_readings_partial_data(measurement_eos, caplog):
|
||||||
"""Test _energy_from_meter_readings with partial data (misaligned but empty array)."""
|
"""Test _energy_from_meter_readings with partial data (misaligned but empty array)."""
|
||||||
key = "measurement_load2_mr"
|
key = "load2_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=1)
|
interval = duration(hours=1)
|
||||||
@@ -149,7 +153,7 @@ def test_energy_from_meter_readings_partial_data(measurement_eos, caplog):
|
|||||||
|
|
||||||
def test_energy_from_meter_readings_negative_interval(measurement_eos):
|
def test_energy_from_meter_readings_negative_interval(measurement_eos):
|
||||||
"""Test _energy_from_meter_readings with a negative interval."""
|
"""Test _energy_from_meter_readings with a negative interval."""
|
||||||
key = "measurement_load3_mr"
|
key = "load3_mr"
|
||||||
start_datetime = datetime(2023, 1, 1, 0)
|
start_datetime = datetime(2023, 1, 1, 0)
|
||||||
end_datetime = datetime(2023, 1, 1, 5)
|
end_datetime = datetime(2023, 1, 1, 5)
|
||||||
interval = duration(hours=-1)
|
interval = duration(hours=-1)
|
||||||
@@ -186,21 +190,25 @@ def test_load_total_no_data(measurement_eos):
|
|||||||
def test_name_to_key(measurement_eos):
|
def test_name_to_key(measurement_eos):
|
||||||
"""Test name_to_key functionality."""
|
"""Test name_to_key functionality."""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
measurement_load0_name="Household",
|
measurement=MeasurementCommonSettings(
|
||||||
measurement_load1_name="Heat Pump",
|
load0_name="Household",
|
||||||
|
load1_name="Heat Pump",
|
||||||
|
)
|
||||||
)
|
)
|
||||||
measurement_eos.config.merge_settings(settings)
|
measurement_eos.config.merge_settings(settings)
|
||||||
|
|
||||||
assert measurement_eos.name_to_key("Household", "measurement_load") == "measurement_load0_mr"
|
assert measurement_eos.name_to_key("Household", "load") == "load0_mr"
|
||||||
assert measurement_eos.name_to_key("Heat Pump", "measurement_load") == "measurement_load1_mr"
|
assert measurement_eos.name_to_key("Heat Pump", "load") == "load1_mr"
|
||||||
assert measurement_eos.name_to_key("Unknown", "measurement_load") is None
|
assert measurement_eos.name_to_key("Unknown", "load") is None
|
||||||
|
|
||||||
|
|
||||||
def test_name_to_key_invalid_topic(measurement_eos):
|
def test_name_to_key_invalid_topic(measurement_eos):
|
||||||
"""Test name_to_key with an invalid topic."""
|
"""Test name_to_key with an invalid topic."""
|
||||||
settings = SettingsEOS(
|
settings = SettingsEOS(
|
||||||
measurement_load0_name="Household",
|
MeasurementCommonSettings(
|
||||||
measurement_load1_name="Heat Pump",
|
load0_name="Household",
|
||||||
|
load1_name="Heat Pump",
|
||||||
|
)
|
||||||
)
|
)
|
||||||
measurement_eos.config.merge_settings(settings)
|
measurement_eos.config.merge_settings(settings)
|
||||||
|
|
||||||
|
@@ -17,25 +17,6 @@ from akkudoktoreos.prediction.weatherclearoutside import WeatherClearOutside
|
|||||||
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
from akkudoktoreos.prediction.weatherimport import WeatherImport
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_settings(config_eos):
|
|
||||||
"""Fixture that adds settings data to the global config."""
|
|
||||||
settings = {
|
|
||||||
"prediction_hours": 48,
|
|
||||||
"prediction_historic_hours": 24,
|
|
||||||
"latitude": 52.52,
|
|
||||||
"longitude": 13.405,
|
|
||||||
"weather_provider": None,
|
|
||||||
"pvforecast_provider": None,
|
|
||||||
"load_provider": None,
|
|
||||||
"elecprice_provider": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Merge settings to config
|
|
||||||
config_eos.merge_settings_from_dict(settings)
|
|
||||||
return config_eos
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def prediction():
|
def prediction():
|
||||||
"""All EOS predictions."""
|
"""All EOS predictions."""
|
||||||
@@ -58,83 +39,26 @@ def forecast_providers():
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"prediction_hours, prediction_historic_hours, latitude, longitude, expected_timezone",
|
|
||||||
[
|
|
||||||
(48, 24, 40.7128, -74.0060, "America/New_York"), # Valid latitude/longitude
|
|
||||||
(0, 0, None, None, None), # No location
|
|
||||||
(100, 50, 51.5074, -0.1278, "Europe/London"), # Another valid location
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_prediction_common_settings_valid(
|
|
||||||
prediction_hours, prediction_historic_hours, latitude, longitude, expected_timezone
|
|
||||||
):
|
|
||||||
"""Test valid settings for PredictionCommonSettings."""
|
|
||||||
settings = PredictionCommonSettings(
|
|
||||||
prediction_hours=prediction_hours,
|
|
||||||
prediction_historic_hours=prediction_historic_hours,
|
|
||||||
latitude=latitude,
|
|
||||||
longitude=longitude,
|
|
||||||
)
|
|
||||||
assert settings.prediction_hours == prediction_hours
|
|
||||||
assert settings.prediction_historic_hours == prediction_historic_hours
|
|
||||||
assert settings.latitude == latitude
|
|
||||||
assert settings.longitude == longitude
|
|
||||||
assert settings.timezone == expected_timezone
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"field_name, invalid_value, expected_error",
|
"field_name, invalid_value, expected_error",
|
||||||
[
|
[
|
||||||
("prediction_hours", -1, "Input should be greater than or equal to 0"),
|
("hours", -1, "Input should be greater than or equal to 0"),
|
||||||
("prediction_historic_hours", -5, "Input should be greater than or equal to 0"),
|
("historic_hours", -5, "Input should be greater than or equal to 0"),
|
||||||
("latitude", -91.0, "Input should be greater than or equal to -90"),
|
|
||||||
("latitude", 91.0, "Input should be less than or equal to 90"),
|
|
||||||
("longitude", -181.0, "Input should be greater than or equal to -180"),
|
|
||||||
("longitude", 181.0, "Input should be less than or equal to 180"),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_prediction_common_settings_invalid(field_name, invalid_value, expected_error):
|
def test_prediction_common_settings_invalid(field_name, invalid_value, expected_error, config_eos):
|
||||||
"""Test invalid settings for PredictionCommonSettings."""
|
"""Test invalid settings for PredictionCommonSettings."""
|
||||||
valid_data = {
|
valid_data = {
|
||||||
"prediction_hours": 48,
|
"hours": 48,
|
||||||
"prediction_historic_hours": 24,
|
"historic_hours": 24,
|
||||||
"latitude": 40.7128,
|
|
||||||
"longitude": -74.0060,
|
|
||||||
}
|
}
|
||||||
|
assert PredictionCommonSettings(**valid_data) is not None
|
||||||
valid_data[field_name] = invalid_value
|
valid_data[field_name] = invalid_value
|
||||||
|
|
||||||
with pytest.raises(ValidationError, match=expected_error):
|
with pytest.raises(ValidationError, match=expected_error):
|
||||||
PredictionCommonSettings(**valid_data)
|
PredictionCommonSettings(**valid_data)
|
||||||
|
|
||||||
|
|
||||||
def test_prediction_common_settings_no_location():
|
|
||||||
"""Test that timezone is None when latitude and longitude are not provided."""
|
|
||||||
settings = PredictionCommonSettings(
|
|
||||||
prediction_hours=48, prediction_historic_hours=24, latitude=None, longitude=None
|
|
||||||
)
|
|
||||||
assert settings.timezone is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_prediction_common_settings_with_location():
|
|
||||||
"""Test that timezone is correctly computed when latitude and longitude are provided."""
|
|
||||||
settings = PredictionCommonSettings(
|
|
||||||
prediction_hours=48, prediction_historic_hours=24, latitude=34.0522, longitude=-118.2437
|
|
||||||
)
|
|
||||||
assert settings.timezone == "America/Los_Angeles"
|
|
||||||
|
|
||||||
|
|
||||||
def test_prediction_common_settings_timezone_none_when_coordinates_missing():
|
|
||||||
"""Test that timezone is None when latitude or longitude is missing."""
|
|
||||||
config_no_latitude = PredictionCommonSettings(longitude=-74.0060)
|
|
||||||
config_no_longitude = PredictionCommonSettings(latitude=40.7128)
|
|
||||||
config_no_coords = PredictionCommonSettings()
|
|
||||||
|
|
||||||
assert config_no_latitude.timezone is None
|
|
||||||
assert config_no_longitude.timezone is None
|
|
||||||
assert config_no_coords.timezone is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_initialization(prediction, forecast_providers):
|
def test_initialization(prediction, forecast_providers):
|
||||||
"""Test that Prediction is initialized with the correct providers in sequence."""
|
"""Test that Prediction is initialized with the correct providers in sequence."""
|
||||||
assert isinstance(prediction, Prediction)
|
assert isinstance(prediction, Prediction)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user