mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-11-20 20:36:33 +00:00
chore: automate development version and release generation (#772)
Some checks failed
Bump Version / Bump Version Workflow (push) Has been cancelled
docker-build / platform-excludes (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
Some checks failed
Bump Version / Bump Version Workflow (push) Has been cancelled
docker-build / platform-excludes (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
This change introduces a GitHub Action to automate release creation, including proper tagging and automatic addition of a development marker to the version. A hash is also appended to development versions to make their state easier to distinguish. Tests and release documentation have been updated to reflect the revised release workflow. Several files now retrieve the current version dynamically. The test --full-run option has been rename to --finalize to make clear it is to do commit finalization testing. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
99
.github/workflows/bump-version.yml
vendored
Normal file
99
.github/workflows/bump-version.yml
vendored
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
name: Bump Version
|
||||||
|
|
||||||
|
# Trigger the workflow on any push to main
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Bump Version Workflow
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# --- Step 1: Checkout the repository ---
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Needed to create tags and see full history
|
||||||
|
persist-credentials: true # Needed for pushing commits and tags
|
||||||
|
|
||||||
|
# --- Step 2: Set up Python ---
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.11"
|
||||||
|
|
||||||
|
# --- Step 3: Calculate version dynamically ---
|
||||||
|
- name: Calculate version
|
||||||
|
id: calc
|
||||||
|
run: |
|
||||||
|
# Call custom version calculation script
|
||||||
|
VERSION=$(python scripts/get_version.py)
|
||||||
|
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||||
|
echo "Computed version: $VERSION"
|
||||||
|
|
||||||
|
# --- Step 4: Skip workflow for development versions ---
|
||||||
|
- name: Skip if version contains 'dev'
|
||||||
|
run: |
|
||||||
|
# Exit workflow early if the version contains 'dev'
|
||||||
|
if [[ "${{ steps.calc.outputs.version }}" == *dev* ]]; then
|
||||||
|
echo "Version contains 'dev', skipping bump version workflow."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# --- Step 5: Update files and commit if necessary ---
|
||||||
|
- name: Update files and commit
|
||||||
|
run: |
|
||||||
|
# Define files to update
|
||||||
|
UPDATE_FILES="haaddon/config.yaml"
|
||||||
|
|
||||||
|
# Call general Python version replacement script
|
||||||
|
python scripts/update_version.py "${{ steps.calc.outputs.version }}" $UPDATE_FILES
|
||||||
|
|
||||||
|
# Commit changes if any
|
||||||
|
git config user.name "github-actions"
|
||||||
|
git config user.email "actions@github.com"
|
||||||
|
git add $UPDATE_FILES
|
||||||
|
|
||||||
|
if git diff --cached --quiet; then
|
||||||
|
echo "No files changed. Skipping commit."
|
||||||
|
else
|
||||||
|
git commit -m "chore: bump version to ${{ steps.calc.outputs.version }}"
|
||||||
|
git push
|
||||||
|
|
||||||
|
# --- Step 6: Create release tag ---
|
||||||
|
- name: Create release tag if it does not exist
|
||||||
|
id: tagging
|
||||||
|
run: |
|
||||||
|
TAG="v${{ steps.calc.outputs.version }}"
|
||||||
|
|
||||||
|
if git rev-parse --verify "$TAG" >/dev/null 2>&1; then
|
||||||
|
echo "Tag $TAG already exists. Skipping tag creation."
|
||||||
|
echo "created=false" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
git tag -a "v${{ steps.calc.outputs.version }}" -m "Release ${{ steps.calc.outputs.version }}"
|
||||||
|
git push origin "v${{ steps.calc.outputs.version }}"
|
||||||
|
echo "created=true" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# --- Step 7: Bump to development version ---
|
||||||
|
- name: Bump dev version
|
||||||
|
id: bump_dev
|
||||||
|
run: |
|
||||||
|
VERSION_BASE=$(python scripts/bump_dev_version.py | tail -n1)
|
||||||
|
if [ -z "$VERSION_BASE" ]; then
|
||||||
|
echo "Error: bump_dev_version.py returned an empty version."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "version_base=$VERSION_BASE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
git config user.name "github-actions"
|
||||||
|
git config user.email "actions@github.com"
|
||||||
|
git add src/akkudoktoreos/core/version.py
|
||||||
|
if git diff --cached --quiet; then
|
||||||
|
echo "version.py not changed. Skipping commit."
|
||||||
|
else
|
||||||
|
git commit -m "chore: bump dev version to ${VERSION_BASE}"
|
||||||
|
git push
|
||||||
2
.github/workflows/pytest.yml
vendored
2
.github/workflows/pytest.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
- name: Run Pytest
|
- name: Run Pytest
|
||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .
|
||||||
python -m pytest --full-run --check-config-side-effect -vs --cov src --cov-report term-missing
|
python -m pytest --finalize --check-config-side-effect -vs --cov src --cov-report term-missing
|
||||||
|
|
||||||
- name: Upload test artifacts
|
- name: Upload test artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ repos:
|
|||||||
- pandas-stubs==2.3.2.250926
|
- pandas-stubs==2.3.2.250926
|
||||||
- tokenize-rt==6.2.0
|
- tokenize-rt==6.2.0
|
||||||
- types-docutils==0.22.2.20251006
|
- types-docutils==0.22.2.20251006
|
||||||
|
- types-PyYaml==6.0.12.20250915
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
|
|
||||||
# --- Markdown linter ---
|
# --- Markdown linter ---
|
||||||
|
|||||||
33
Dockerfile
33
Dockerfile
@@ -1,4 +1,7 @@
|
|||||||
# syntax=docker/dockerfile:1.7
|
# syntax=docker/dockerfile:1.7
|
||||||
|
# Dockerfile
|
||||||
|
|
||||||
|
# Set base image first
|
||||||
ARG PYTHON_VERSION=3.13.9
|
ARG PYTHON_VERSION=3.13.9
|
||||||
FROM python:${PYTHON_VERSION}-slim
|
FROM python:${PYTHON_VERSION}-slim
|
||||||
|
|
||||||
@@ -32,28 +35,25 @@ RUN adduser --system --group --no-create-home eos \
|
|||||||
&& mkdir -p "${EOS_CONFIG_DIR}" \
|
&& mkdir -p "${EOS_CONFIG_DIR}" \
|
||||||
&& chown eos "${EOS_CONFIG_DIR}"
|
&& chown eos "${EOS_CONFIG_DIR}"
|
||||||
|
|
||||||
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
pip install --no-cache-dir -r requirements.txt
|
pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy source
|
||||||
|
COPY src/ ./src
|
||||||
COPY pyproject.toml .
|
COPY pyproject.toml .
|
||||||
RUN mkdir -p src && pip install --no-cache-dir -e .
|
|
||||||
|
|
||||||
COPY src src
|
# Create version information
|
||||||
|
COPY scripts/get_version.py ./scripts/get_version.py
|
||||||
|
RUN python scripts/get_version.py > ./version.txt
|
||||||
|
RUN rm ./scripts/get_version.py
|
||||||
|
|
||||||
# Create minimal default configuration for Docker to fix EOSDash accessibility (#629)
|
RUN echo "Building Akkudoktor-EOS with Python $PYTHON_VERSION"
|
||||||
# This ensures EOSDash binds to 0.0.0.0 instead of 127.0.0.1 in containers
|
|
||||||
RUN echo '{\n\
|
# Install akkudoktoreos package in editable form (-e)
|
||||||
"server": {\n\
|
# pyproject-toml will read the version from version.txt
|
||||||
"host": "0.0.0.0",\n\
|
RUN pip install --no-cache-dir -e .
|
||||||
"port": 8503,\n\
|
|
||||||
"startup_eosdash": true,\n\
|
|
||||||
"eosdash_host": "0.0.0.0",\n\
|
|
||||||
"eosdash_port": 8504\n\
|
|
||||||
}\n\
|
|
||||||
}' > "${EOS_CONFIG_DIR}/EOS.config.json" \
|
|
||||||
&& chown eos:eos "${EOS_CONFIG_DIR}/EOS.config.json"
|
|
||||||
|
|
||||||
USER eos
|
USER eos
|
||||||
ENTRYPOINT []
|
ENTRYPOINT []
|
||||||
@@ -61,6 +61,7 @@ ENTRYPOINT []
|
|||||||
EXPOSE 8503
|
EXPOSE 8503
|
||||||
EXPOSE 8504
|
EXPOSE 8504
|
||||||
|
|
||||||
CMD ["python", "src/akkudoktoreos/server/eos.py", "--host", "0.0.0.0"]
|
# Ensure EOS and EOSdash bind to 0.0.0.0
|
||||||
|
CMD ["python", "-m", "akkudoktoreos.server.eos", "--host", "0.0.0.0"]
|
||||||
|
|
||||||
VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"]
|
VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"]
|
||||||
|
|||||||
48
Makefile
48
Makefile
@@ -1,5 +1,8 @@
|
|||||||
# Define the targets
|
# Define the targets
|
||||||
.PHONY: help venv pip install dist test test-full test-system test-ci test-profile docker-run docker-build docs read-docs clean format gitlint mypy run run-dev run-dash run-dash-dev bumps
|
.PHONY: help venv pip install dist test test-full test-system test-ci test-profile docker-run docker-build docs read-docs clean format gitlint mypy run run-dev run-dash run-dash-dev prepare-version test-version
|
||||||
|
|
||||||
|
# - Take VERSION from version.py
|
||||||
|
VERSION := $(shell python3 scripts/get_version.py)
|
||||||
|
|
||||||
# Default target
|
# Default target
|
||||||
all: help
|
all: help
|
||||||
@@ -25,13 +28,13 @@ help:
|
|||||||
@echo " run-dash - Run EOSdash production server in virtual environment."
|
@echo " run-dash - Run EOSdash production server in virtual environment."
|
||||||
@echo " run-dash-dev - Run EOSdash development server in virtual environment (automatically reloads)."
|
@echo " run-dash-dev - Run EOSdash development server in virtual environment (automatically reloads)."
|
||||||
@echo " test - Run tests."
|
@echo " test - Run tests."
|
||||||
@echo " test-full - Run tests with full optimization."
|
@echo " test-full - Run all tests (e.g. to finalize a commit)."
|
||||||
@echo " test-system - Run tests with system tests enabled."
|
@echo " test-system - Run tests with system tests enabled."
|
||||||
@echo " test-ci - Run tests as CI does. No user config file allowed."
|
@echo " test-ci - Run tests as CI does. No user config file allowed."
|
||||||
@echo " test-profile - Run single test optimization with profiling."
|
@echo " test-profile - Run single test optimization with profiling."
|
||||||
@echo " dist - Create distribution (in dist/)."
|
@echo " dist - Create distribution (in dist/)."
|
||||||
@echo " clean - Remove generated documentation, distribution and virtual environment."
|
@echo " clean - Remove generated documentation, distribution and virtual environment."
|
||||||
@echo " bump - Bump version to next release version."
|
@echo " prepare-version - Prepare a version defined in setup.py."
|
||||||
|
|
||||||
# Target to set up a Python 3 virtual environment
|
# Target to set up a Python 3 virtual environment
|
||||||
venv:
|
venv:
|
||||||
@@ -50,8 +53,12 @@ pip-dev: pip
|
|||||||
.venv/bin/pip install -r requirements-dev.txt
|
.venv/bin/pip install -r requirements-dev.txt
|
||||||
@echo "Dependencies installed from requirements-dev.txt."
|
@echo "Dependencies installed from requirements-dev.txt."
|
||||||
|
|
||||||
|
# Target to create a version.txt
|
||||||
|
version-txt:
|
||||||
|
echo "$(VERSION)" > version.txt
|
||||||
|
|
||||||
# Target to install EOS in editable form (development mode) into virtual environment.
|
# Target to install EOS in editable form (development mode) into virtual environment.
|
||||||
install: pip-dev
|
install: pip-dev version-txt
|
||||||
.venv/bin/pip install build
|
.venv/bin/pip install build
|
||||||
.venv/bin/pip install -e .
|
.venv/bin/pip install -e .
|
||||||
@echo "EOS installed in editable form (development mode)."
|
@echo "EOS installed in editable form (development mode)."
|
||||||
@@ -63,7 +70,7 @@ dist: pip
|
|||||||
@echo "Distribution created (see dist/)."
|
@echo "Distribution created (see dist/)."
|
||||||
|
|
||||||
# Target to generate documentation
|
# Target to generate documentation
|
||||||
gen-docs: pip-dev
|
gen-docs: pip-dev version-txt
|
||||||
.venv/bin/pip install -e .
|
.venv/bin/pip install -e .
|
||||||
.venv/bin/python ./scripts/generate_config_md.py --output-file docs/_generated/config.md
|
.venv/bin/python ./scripts/generate_config_md.py --output-file docs/_generated/config.md
|
||||||
.venv/bin/python ./scripts/generate_openapi_md.py --output-file docs/_generated/openapi.md
|
.venv/bin/python ./scripts/generate_openapi_md.py --output-file docs/_generated/openapi.md
|
||||||
@@ -127,7 +134,7 @@ test:
|
|||||||
# Target to run tests as done by CI on Github.
|
# Target to run tests as done by CI on Github.
|
||||||
test-ci:
|
test-ci:
|
||||||
@echo "Running tests as CI..."
|
@echo "Running tests as CI..."
|
||||||
.venv/bin/pytest --full-run --check-config-side-effect -vs --cov src --cov-report term-missing
|
.venv/bin/pytest --finalize --check-config-side-effect -vs --cov src --cov-report term-missing
|
||||||
|
|
||||||
# Target to run tests including the system tests.
|
# Target to run tests including the system tests.
|
||||||
test-system:
|
test-system:
|
||||||
@@ -137,7 +144,7 @@ test-system:
|
|||||||
# Target to run all tests.
|
# Target to run all tests.
|
||||||
test-full:
|
test-full:
|
||||||
@echo "Running all tests..."
|
@echo "Running all tests..."
|
||||||
.venv/bin/pytest --full-run
|
.venv/bin/pytest --finalize
|
||||||
|
|
||||||
# Target to run tests including the single test optimization with profiling.
|
# Target to run tests including the single test optimization with profiling.
|
||||||
test-profile:
|
test-profile:
|
||||||
@@ -165,16 +172,19 @@ docker-build:
|
|||||||
@docker pull python:3.13.9-slim
|
@docker pull python:3.13.9-slim
|
||||||
@docker compose build
|
@docker compose build
|
||||||
|
|
||||||
# Bump Akkudoktoreos version
|
# Propagete version info to all version files
|
||||||
VERSION ?= 0.2.0+dev
|
# Take UPDATE_FILES from GitHub action bump-version.yml
|
||||||
NEW_VERSION ?= $(subst +dev,,$(VERSION))+dev # be careful - default is always +dev
|
UPDATE_FILES := $(shell sed -n 's/^[[:space:]]*UPDATE_FILES[[:space:]]*=[[:space:]]*"\([^"]*\)".*/\1/p' \
|
||||||
|
.github/workflows/bump-version.yml)
|
||||||
|
prepare-version: #pip-dev
|
||||||
|
@echo "Update version to $(VERSION) from version.py in files $(UPDATE_FILES) and doc"
|
||||||
|
.venv/bin/python ./scripts/update_version.py $(VERSION) $(UPDATE_FILES)
|
||||||
|
.venv/bin/python ./scripts/convert_lightweight_tags.py
|
||||||
|
.venv/bin/python ./scripts/generate_config_md.py --output-file docs/_generated/config.md
|
||||||
|
.venv/bin/python ./scripts/generate_openapi_md.py --output-file docs/_generated/openapi.md
|
||||||
|
.venv/bin/python ./scripts/generate_openapi.py --output-file openapi.json
|
||||||
|
.venv/bin/pytest -vv --finalize tests/test_version.py
|
||||||
|
|
||||||
bump: pip-dev
|
test-version:
|
||||||
@echo "Bumping akkudoktoreos version from $(VERSION) to $(NEW_VERSION) (dry-run: $(EXTRA_ARGS))"
|
echo "Test version information to be correctly set in all version files"
|
||||||
.venv/bin/python scripts/convert_lightweight_tags.py
|
.venv/bin/pytest -vv tests/test_version.py
|
||||||
.venv/bin/python scripts/bump_version.py $(VERSION) $(NEW_VERSION) $(EXTRA_ARGS)
|
|
||||||
|
|
||||||
bump-dry: pip-dev
|
|
||||||
@echo "Bumping akkudoktoreos version from $(VERSION) to $(NEW_VERSION) (dry-run: --dry-run)"
|
|
||||||
.venv/bin/python scripts/convert_lightweight_tags.py
|
|
||||||
.venv/bin/python scripts/bump_version.py $(VERSION) $(NEW_VERSION) --dry-run
|
|
||||||
|
|||||||
@@ -39,18 +39,6 @@ services:
|
|||||||
- "${EOS_SERVER__EOSDASH_PORT}:8504"
|
- "${EOS_SERVER__EOSDASH_PORT}:8504"
|
||||||
|
|
||||||
# Volume mount configuration (optional)
|
# Volume mount configuration (optional)
|
||||||
# IMPORTANT: When mounting local directories, the default config won't be available.
|
|
||||||
# You must create an EOS.config.json file in your local config directory with:
|
|
||||||
# {
|
|
||||||
# "server": {
|
|
||||||
# "host": "0.0.0.0", # Required for Docker container accessibility
|
|
||||||
# "port": 8503,
|
|
||||||
# "startup_eosdash": true,
|
|
||||||
# "eosdash_host": "0.0.0.0", # Required for Docker container accessibility
|
|
||||||
# "eosdash_port": 8504
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
#
|
|
||||||
# Example volume mounts (uncomment to use):
|
# Example volume mounts (uncomment to use):
|
||||||
# volumes:
|
# volumes:
|
||||||
# - ./config:/opt/eos/config # Mount local config directory
|
# - ./config:/opt/eos/config # Mount local config directory
|
||||||
|
|||||||
@@ -92,7 +92,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"general": {
|
"general": {
|
||||||
"version": "0.2.0+dev",
|
"version": "0.2.0+dev.4dbc2d",
|
||||||
"data_folder_path": null,
|
"data_folder_path": null,
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ Properties:
|
|||||||
| latitude | `EOS_GENERAL__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°) |
|
| latitude | `EOS_GENERAL__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°) |
|
||||||
| longitude | `EOS_GENERAL__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees, within -180 to 180 (°) |
|
| longitude | `EOS_GENERAL__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees, within -180 to 180 (°) |
|
||||||
| timezone | | `Optional[str]` | `ro` | `N/A` | None |
|
| timezone | | `Optional[str]` | `ro` | `N/A` | None |
|
||||||
| version | `EOS_GENERAL__VERSION` | `str` | `rw` | `0.2.0+dev` | Configuration file version. Used to check compatibility. |
|
| version | `EOS_GENERAL__VERSION` | `str` | `rw` | `0.2.0+dev.4dbc2d` | Configuration file version. Used to check compatibility. |
|
||||||
:::
|
:::
|
||||||
<!-- pyml enable line-length -->
|
<!-- pyml enable line-length -->
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ Properties:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"general": {
|
"general": {
|
||||||
"version": "0.2.0+dev",
|
"version": "0.2.0+dev.4dbc2d",
|
||||||
"data_folder_path": null,
|
"data_folder_path": null,
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
@@ -58,7 +58,7 @@ Properties:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"general": {
|
"general": {
|
||||||
"version": "0.2.0+dev",
|
"version": "0.2.0+dev.4dbc2d",
|
||||||
"data_folder_path": null,
|
"data_folder_path": null,
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Akkudoktor-EOS
|
# Akkudoktor-EOS
|
||||||
|
|
||||||
**Version**: `v0.2.0+dev`
|
**Version**: `v0.2.0+dev.4dbc2d`
|
||||||
|
|
||||||
<!-- pyml disable line-length -->
|
<!-- pyml disable line-length -->
|
||||||
**Description**: This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.
|
**Description**: This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.
|
||||||
|
|||||||
11
docs/conf.py
11
docs/conf.py
@@ -7,13 +7,20 @@ https://www.sphinx-doc.org/en/master/usage/configuration.html
|
|||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add the src directory to sys.path so Sphinx can import akkudoktoreos
|
||||||
|
PROJECT_ROOT = Path(__file__).parent.parent
|
||||||
|
SRC_DIR = PROJECT_ROOT / "src"
|
||||||
|
sys.path.insert(0, str(SRC_DIR))
|
||||||
|
|
||||||
|
from akkudoktoreos.core.version import __version__
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
# -- Project information -----------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||||
|
|
||||||
project = "Akkudoktor EOS"
|
project = "Akkudoktor EOS"
|
||||||
copyright = "2024, Andreas Schmitz"
|
copyright = "2025, Andreas Schmitz"
|
||||||
author = "Andreas Schmitz"
|
author = "Andreas Schmitz"
|
||||||
release = "0.0.1"
|
release = __version__
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||||
|
|||||||
@@ -393,6 +393,13 @@ At a minimum, you should run the module tests:
|
|||||||
make test
|
make test
|
||||||
```
|
```
|
||||||
|
|
||||||
|
:::{admonition} Note
|
||||||
|
:class: Note
|
||||||
|
Depending on your changes you may also have to change the version.py and documentation files. Do as
|
||||||
|
suggested by the tests. You may ignore the version.py and documentation changes up until you
|
||||||
|
finalize your change.
|
||||||
|
:::
|
||||||
|
|
||||||
You should also run the system tests. These include additional tests that interact with real
|
You should also run the system tests. These include additional tests that interact with real
|
||||||
resources:
|
resources:
|
||||||
|
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ and how to set a **development version** after the release.
|
|||||||
| 1 | Contributor | Prepare a release branch **in your fork** using Commitizen |
|
| 1 | Contributor | Prepare a release branch **in your fork** using Commitizen |
|
||||||
| 2 | Contributor | Open a **Pull Request to upstream** (`Akkudoktor-EOS/EOS`) |
|
| 2 | Contributor | Open a **Pull Request to upstream** (`Akkudoktor-EOS/EOS`) |
|
||||||
| 3 | Maintainer | Review and **merge the release PR** |
|
| 3 | Maintainer | Review and **merge the release PR** |
|
||||||
| 4 | Maintainer | Create the **GitHub Release and tag** |
|
| 4 | CI | Create the **GitHub Release and tag** |
|
||||||
| 5 | Maintainer | Set the **development version marker** via a follow-up PR |
|
| 5 | CI | Set the **development version marker** via a follow-up PR |
|
||||||
|
|
||||||
## 🔄 Detailed Workflow
|
## 🔄 Detailed Workflow
|
||||||
|
|
||||||
@@ -40,24 +40,26 @@ git checkout -b release/vX.Y.Z
|
|||||||
|
|
||||||
#### Bump the version information
|
#### Bump the version information
|
||||||
|
|
||||||
At least update
|
Set `__version__` in src/akkudoktoreos/core/version.py
|
||||||
|
|
||||||
- pyproject.toml
|
```python
|
||||||
- src/akkudoktoreos/core/version.py
|
__version__ = 0.3.0
|
||||||
- src/akkudoktoreos/data/default.config.json
|
```
|
||||||
- Makefile
|
|
||||||
|
Prepare version by updating versioned files, e.g.:
|
||||||
|
|
||||||
|
- haaddon/config.yaml
|
||||||
|
|
||||||
and the generated documentation:
|
and the generated documentation:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make bump VERSION=0.1.0+dev NEW_VERSION=X.Y.Z
|
make prepare-version
|
||||||
make gen-docs
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You may check the changes by:
|
Check the changes by:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git diff
|
make test-version
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Create a new CHANGELOG.md entry
|
#### Create a new CHANGELOG.md entry
|
||||||
@@ -66,19 +68,20 @@ Edit CHANGELOG.md
|
|||||||
|
|
||||||
#### Create the new release commit
|
#### Create the new release commit
|
||||||
|
|
||||||
|
Add all the changed version files and all other changes to the commit.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git add pyproject.toml src/akkudoktoreos/core/version.py \
|
git add src/akkudoktoreos/core/version.py CHANGELOG.md ...
|
||||||
src/akkudoktoreos/data/default.config.json Makefile CHANGELOG.md
|
git commit -s -m "chore: Prepare Release v0.3.0"
|
||||||
git commit -s -m "chore(release): Release vX.Y.Z"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Push the branch to your fork
|
#### Push the branch to your fork
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git push --set-upstream origin release/vX.Y.Z
|
git push --set-upstream origin release/v0.3.0
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2️⃣ Contributor: Open the Release Pull Request
|
### 2️⃣ Contributor: Open the Release Preparation Pull Request
|
||||||
|
|
||||||
| From | To |
|
| From | To |
|
||||||
| ------------------------------------ | ------------------------- |
|
| ------------------------------------ | ------------------------- |
|
||||||
@@ -87,13 +90,13 @@ git push --set-upstream origin release/vX.Y.Z
|
|||||||
**PR Title:**
|
**PR Title:**
|
||||||
|
|
||||||
```text
|
```text
|
||||||
chore(release): release vX.Y.Z
|
chore: prepare release vX.Y.Z
|
||||||
```
|
```
|
||||||
|
|
||||||
**PR Description Template:**
|
**PR Description Template:**
|
||||||
|
|
||||||
```markdown
|
```markdown
|
||||||
## Release vX.Y.Z
|
## Prepare Release vX.Y.Z
|
||||||
|
|
||||||
This pull request prepares release **vX.Y.Z**.
|
This pull request prepares release **vX.Y.Z**.
|
||||||
|
|
||||||
@@ -119,94 +122,26 @@ See `CHANGELOG.md` for full details.
|
|||||||
**Merge Strategy:**
|
**Merge Strategy:**
|
||||||
|
|
||||||
- Prefer **Merge Commit** (or **Squash Merge**, per project preference)
|
- Prefer **Merge Commit** (or **Squash Merge**, per project preference)
|
||||||
- Use commit message: `chore(release): Release vX.Y.Z`
|
- Use commit message: `chore: Prepare Release vX.Y.Z`
|
||||||
|
|
||||||
### 4️⃣ Maintainer: Publish the GitHub Release
|
### 4️⃣ CI: Publish the GitHub Release
|
||||||
|
|
||||||
1. Go to **GitHub → Releases → Draft a new release**
|
The new release will automatically be published by the GitHub CI action.
|
||||||
2. **Choose tag** → enter `vX.Y.Z` (GitHub creates the tag on publish)
|
|
||||||
3. **Release title:** `vX.Y.Z`
|
|
||||||
4. **Paste changelog entry** from `CHANGELOG.md`
|
|
||||||
5. Optionally enable **Set as latest release**
|
|
||||||
6. Click **Publish release** 🎉
|
|
||||||
|
|
||||||
### 5️⃣ Maintainer: Prepare the Development Version Marker
|
See `.github/workflwows/bump-version.yml`for details.
|
||||||
|
|
||||||
**Sync local copy:**
|
### 5️⃣ CI: Prepare the Development Version Marker
|
||||||
|
|
||||||
```bash
|
The development version marker will automatically be set by the GitHub CI action.
|
||||||
git fetch eos
|
|
||||||
git checkout main
|
|
||||||
git pull eos main
|
|
||||||
```
|
|
||||||
|
|
||||||
**Create a development version branch:**
|
See `.github/workflwows/bump-version.yml`for details.
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout -b release/vX.Y.Z_dev
|
|
||||||
```
|
|
||||||
|
|
||||||
**Set development version marker manually:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
make bump VERSION=X.Y.Z NEW_VERSION=X.Y.Z+dev
|
|
||||||
make gen-docs
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git add pyproject.toml src/akkudoktoreos/core/version.py \
|
|
||||||
src/akkudoktoreos/data/default.config.json Makefile
|
|
||||||
git commit -s -m "chore: set development version marker X.Y.Z+dev"
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git push --set-upstream origin release/vX.Y.Z_dev
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6️⃣ Maintainer (or Contributor): Open the Development Version PR
|
|
||||||
|
|
||||||
| From | To |
|
|
||||||
| ---------------------------------------- | ------------------------- |
|
|
||||||
| `<your-username>/EOS:release/vX.Y.Z_dev` | `Akkudoktor-EOS/EOS:main` |
|
|
||||||
|
|
||||||
**PR Title:**
|
|
||||||
|
|
||||||
```text
|
|
||||||
chore: development version vX.Y.Z+dev
|
|
||||||
```
|
|
||||||
|
|
||||||
**PR Description Template:**
|
|
||||||
|
|
||||||
```markdown
|
|
||||||
## Development version vX.Y.Z+dev
|
|
||||||
|
|
||||||
This pull request marks the repository as back in active development.
|
|
||||||
|
|
||||||
### Changes
|
|
||||||
- Set version to `vX.Y.Z+dev`
|
|
||||||
|
|
||||||
No changelog entry is needed.
|
|
||||||
```
|
|
||||||
|
|
||||||
### 7️⃣ Maintainer: Review and Merge the Development Version PR
|
|
||||||
|
|
||||||
**Checklist:**
|
|
||||||
|
|
||||||
- ✅ Only version files updated to `+dev`
|
|
||||||
- ✅ No unintended changes
|
|
||||||
|
|
||||||
**Merge Strategy:**
|
|
||||||
|
|
||||||
- Merge with commit message: `chore: development version vX.Y.Z+dev`
|
|
||||||
|
|
||||||
## ✅ Quick Reference
|
## ✅ Quick Reference
|
||||||
|
|
||||||
| Step | Actor | Action |
|
| Step | Actor | Action |
|
||||||
| ---- | ----- | ------ |
|
| ---- | ----- | ------ |
|
||||||
| **1. Prepare release branch** | Contributor | Bump version & changelog via Commitizen |
|
| **1. Prepare release branch** | Contributor | Bump version & changelog |
|
||||||
| **2. Open release PR** | Contributor | Submit release for review |
|
| **2. Open release PR** | Contributor | Submit release for review |
|
||||||
| **3. Review & merge release PR** | Maintainer | Finalize changes into `main` |
|
| **3. Review & merge release PR** | Maintainer | Finalize changes into `main` |
|
||||||
| **4. Publish GitHub Release** | Maintainer | Create tag & notify users |
|
| **4. Publish GitHub Release** | CI | Create tag & notify users |
|
||||||
| **5. Prepare development version branch** | Maintainer | Set development marker |
|
| **5. Prepare development version branch** | CI | Set development marker |
|
||||||
| **6. Open development PR** | Maintainer (or Contributor) | Propose returning to development state |
|
|
||||||
| **7. Review & merge development PR** | Maintainer | Mark repository as back in development |
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"info": {
|
"info": {
|
||||||
"title": "Akkudoktor-EOS",
|
"title": "Akkudoktor-EOS",
|
||||||
"description": "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.",
|
"description": "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.",
|
||||||
"version": "v0.2.0+dev"
|
"version": "v0.2.0+dev.4dbc2d"
|
||||||
},
|
},
|
||||||
"paths": {
|
"paths": {
|
||||||
"/v1/admin/cache/clear": {
|
"/v1/admin/cache/clear": {
|
||||||
@@ -2406,7 +2406,7 @@
|
|||||||
"general": {
|
"general": {
|
||||||
"$ref": "#/components/schemas/GeneralSettings-Output",
|
"$ref": "#/components/schemas/GeneralSettings-Output",
|
||||||
"default": {
|
"default": {
|
||||||
"version": "0.2.0+dev",
|
"version": "0.2.0+dev.4dbc2d",
|
||||||
"data_output_subpath": "output",
|
"data_output_subpath": "output",
|
||||||
"latitude": 52.52,
|
"latitude": 52.52,
|
||||||
"longitude": 13.405,
|
"longitude": 13.405,
|
||||||
@@ -4084,7 +4084,7 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"title": "Version",
|
"title": "Version",
|
||||||
"description": "Configuration file version. Used to check compatibility.",
|
"description": "Configuration file version. Used to check compatibility.",
|
||||||
"default": "0.2.0+dev"
|
"default": "0.2.0+dev.4dbc2d"
|
||||||
},
|
},
|
||||||
"data_folder_path": {
|
"data_folder_path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
@@ -4158,7 +4158,7 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"title": "Version",
|
"title": "Version",
|
||||||
"description": "Configuration file version. Used to check compatibility.",
|
"description": "Configuration file version. Used to check compatibility.",
|
||||||
"default": "0.2.0+dev"
|
"default": "0.2.0+dev.4dbc2d"
|
||||||
},
|
},
|
||||||
"data_folder_path": {
|
"data_folder_path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "akkudoktor-eos"
|
name = "akkudoktor-eos"
|
||||||
version = "0.2.0+dev"
|
dynamic = ["version"] # Get version information dynamically
|
||||||
authors = [
|
authors = [
|
||||||
{ name="Andreas Schmitz", email="author@example.com" },
|
{ name="Andreas Schmitz", email="author@example.com" },
|
||||||
]
|
]
|
||||||
@@ -25,6 +25,8 @@ build-backend = "setuptools.build_meta"
|
|||||||
[tool.setuptools.dynamic]
|
[tool.setuptools.dynamic]
|
||||||
dependencies = {file = ["requirements.txt"]}
|
dependencies = {file = ["requirements.txt"]}
|
||||||
optional-dependencies = {dev = { file = ["requirements-dev.txt"] }}
|
optional-dependencies = {dev = { file = ["requirements-dev.txt"] }}
|
||||||
|
# version.txt must be generated
|
||||||
|
version = { file = "version.txt" }
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
[tool.setuptools.packages.find]
|
||||||
where = ["src/"]
|
where = ["src/"]
|
||||||
@@ -109,29 +111,10 @@ module = "xprocess.*"
|
|||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
|
||||||
[tool.commitizen]
|
[tool.commitizen]
|
||||||
|
# Only used as linter
|
||||||
name = "cz_conventional_commits"
|
name = "cz_conventional_commits"
|
||||||
version_scheme = "semver"
|
version_scheme = "semver"
|
||||||
version = "0.2.0+dev" # <-- Set your current version heretag_format = "v$version"
|
|
||||||
|
|
||||||
# Files to automatically update when bumping version
|
# Enforce commit message and branch style:
|
||||||
update_changelog_on_bump = true
|
|
||||||
changelog_incremental = true
|
|
||||||
annotated_tag = true
|
|
||||||
bump_message = "chore(release): $current_version → $new_version"
|
|
||||||
|
|
||||||
# Branch validation settings
|
|
||||||
branch_validation = true
|
branch_validation = true
|
||||||
branch_pattern = "^(feat|fix|chore|docs|refactor|test)/[a-z0-9._-]+$"
|
branch_pattern = "^(feat|fix|chore|docs|refactor|test)/[a-z0-9._-]+$"
|
||||||
|
|
||||||
# Customize changelog generation
|
|
||||||
[tool.commitizen.changelog]
|
|
||||||
path = "CHANGELOG.md"
|
|
||||||
template = "keepachangelog"
|
|
||||||
|
|
||||||
# If your version is stored in multiple files (Python modules, docs etc.), add them here
|
|
||||||
[tool.commitizen.files]
|
|
||||||
version = [
|
|
||||||
"pyproject.toml", # Auto-update project version
|
|
||||||
"src/akkudoktoreos/core/version.py",
|
|
||||||
"src/akkudoktoreos/data/default.config.json"
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -7,11 +7,15 @@
|
|||||||
# - mypy (mirrors-mypy) - sync with requirements-dev.txt (if on pypi)
|
# - mypy (mirrors-mypy) - sync with requirements-dev.txt (if on pypi)
|
||||||
# - pymarkdown
|
# - pymarkdown
|
||||||
# - commitizen - sync with requirements-dev.txt (if on pypi)
|
# - commitizen - sync with requirements-dev.txt (if on pypi)
|
||||||
|
#
|
||||||
|
# !!! Sync .pre-commit-config.yaml and requirements-dev.txt !!!
|
||||||
pre-commit==4.4.0
|
pre-commit==4.4.0
|
||||||
mypy==1.18.2
|
mypy==1.18.2
|
||||||
types-requests==2.32.4.20250913 # for mypy
|
types-requests==2.32.4.20250913 # for mypy
|
||||||
pandas-stubs==2.3.2.250926 # for mypy
|
pandas-stubs==2.3.2.250926 # for mypy
|
||||||
tokenize-rt==6.2.0 # for mypy
|
tokenize-rt==6.2.0 # for mypy
|
||||||
|
types-docutils==0.22.2.20251006 # for mypy
|
||||||
|
types-PyYaml==6.0.12.20250915 # for mypy
|
||||||
commitizen==4.10.0
|
commitizen==4.10.0
|
||||||
deprecated==1.3.1 # for commitizen
|
deprecated==1.3.1 # for commitizen
|
||||||
|
|
||||||
|
|||||||
70
scripts/bump_dev_version.py
Normal file
70
scripts/bump_dev_version.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Update VERSION_BASE in version.py after a release tag.
|
||||||
|
|
||||||
|
Behavior:
|
||||||
|
- Read VERSION_BASE from version.py
|
||||||
|
- Strip ANY existing "+dev" suffix
|
||||||
|
- Append exactly one "+dev"
|
||||||
|
- Write back the updated file
|
||||||
|
|
||||||
|
This ensures:
|
||||||
|
0.2.0 --> 0.2.0+dev
|
||||||
|
0.2.0+dev --> 0.2.0+dev
|
||||||
|
0.2.0+dev+dev -> 0.2.0+dev
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
ROOT = Path(__file__).resolve().parent.parent
|
||||||
|
VERSION_FILE = ROOT / "src" / "akkudoktoreos" / "core" / "version.py"
|
||||||
|
|
||||||
|
|
||||||
|
def bump_dev_version_file(file: Path) -> str:
|
||||||
|
text = file.read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
# Extract current version
|
||||||
|
m = re.search(r'^VERSION_BASE\s*=\s*["\']([^"\']+)["\']',
|
||||||
|
text, flags=re.MULTILINE)
|
||||||
|
if not m:
|
||||||
|
raise ValueError("VERSION_BASE not found")
|
||||||
|
|
||||||
|
base_version = m.group(1)
|
||||||
|
|
||||||
|
# Remove trailing +dev if present → ensure idempotency
|
||||||
|
cleaned = re.sub(r'(\+dev)+$', '', base_version)
|
||||||
|
|
||||||
|
# Append +dev
|
||||||
|
new_version = f"{cleaned}+dev"
|
||||||
|
|
||||||
|
# Replace inside file content
|
||||||
|
new_text = re.sub(
|
||||||
|
r'^VERSION_BASE\s*=\s*["\']([^"\']+)["\']',
|
||||||
|
f'VERSION_BASE = "{new_version}"',
|
||||||
|
text,
|
||||||
|
flags=re.MULTILINE
|
||||||
|
)
|
||||||
|
|
||||||
|
file.write_text(new_text, encoding="utf-8")
|
||||||
|
|
||||||
|
return new_version
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Use CLI argument or fallback default path
|
||||||
|
version_file = Path(sys.argv[1]) if len(sys.argv) > 1 else VERSION_FILE
|
||||||
|
|
||||||
|
try:
|
||||||
|
new_version = bump_dev_version_file(version_file)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# MUST print to stdout
|
||||||
|
print(new_version)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,170 +0,0 @@
|
|||||||
"""Update version strings in multiple project files only if the old version matches.
|
|
||||||
|
|
||||||
This script updates version information in:
|
|
||||||
- pyproject.toml
|
|
||||||
- src/akkudoktoreos/core/version.py
|
|
||||||
- src/akkudoktoreos/data/default.config.json
|
|
||||||
- Makefile
|
|
||||||
|
|
||||||
Supported version formats:
|
|
||||||
- __version__ = "<version>"
|
|
||||||
- version = "<version>"
|
|
||||||
- "version": "<version>"
|
|
||||||
- VERSION ?: <version>
|
|
||||||
|
|
||||||
It will:
|
|
||||||
- Replace VERSION → NEW_VERSION if the old version is found.
|
|
||||||
- Report which files were updated.
|
|
||||||
- Report which files contained mismatched versions.
|
|
||||||
- Report which files had no version.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python bump_version.py VERSION NEW_VERSION
|
|
||||||
|
|
||||||
Args:
|
|
||||||
VERSION (str): Version expected before replacement.
|
|
||||||
NEW_VERSION (str): Version to write.
|
|
||||||
|
|
||||||
"""
|
|
||||||
#!/usr/bin/env python3
|
|
||||||
import argparse
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
# Patterns to match version strings
|
|
||||||
VERSION_PATTERNS = [
|
|
||||||
re.compile(r'(__version__\s*=\s*")(?P<ver>[^"]+)(")'),
|
|
||||||
re.compile(r'(version\s*=\s*")(?P<ver>[^"]+)(")'),
|
|
||||||
re.compile(r'("version"\s*:\s*")(?P<ver>[^"]+)(")'),
|
|
||||||
re.compile(r'(VERSION\s*\?=\s*)(?P<ver>[^\s]+)'), # For Makefile: VERSION ?= 0.2.0
|
|
||||||
]
|
|
||||||
|
|
||||||
# Default files to process
|
|
||||||
DEFAULT_FILES = [
|
|
||||||
"pyproject.toml",
|
|
||||||
"src/akkudoktoreos/core/version.py",
|
|
||||||
"src/akkudoktoreos/data/default.config.json",
|
|
||||||
"Makefile",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def backup_file(file_path: str) -> str:
|
|
||||||
"""Create a backup of the given file with a .bak suffix.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to the file to backup.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path to the backup file.
|
|
||||||
"""
|
|
||||||
backup_path = f"{file_path}.bak"
|
|
||||||
shutil.copy2(file_path, backup_path)
|
|
||||||
return backup_path
|
|
||||||
|
|
||||||
|
|
||||||
def replace_version_in_file(
|
|
||||||
file_path: Path, old_version: str, new_version: str, dry_run: bool = False
|
|
||||||
) -> Tuple[bool, bool]:
|
|
||||||
"""
|
|
||||||
Replace old_version with new_version in the given file if it matches.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to the file to modify.
|
|
||||||
old_version: The old version to replace.
|
|
||||||
new_version: The new version to set.
|
|
||||||
dry_run: If True, don't actually modify files.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple[bool, bool]: (file_would_be_updated, old_version_found)
|
|
||||||
"""
|
|
||||||
content = file_path.read_text()
|
|
||||||
new_content = content
|
|
||||||
old_version_found = False
|
|
||||||
file_would_be_updated = False
|
|
||||||
|
|
||||||
for pattern in VERSION_PATTERNS:
|
|
||||||
def repl(match):
|
|
||||||
nonlocal old_version_found, file_would_be_updated
|
|
||||||
ver = match.group("ver")
|
|
||||||
if ver == old_version:
|
|
||||||
old_version_found = True
|
|
||||||
file_would_be_updated = True
|
|
||||||
# Some patterns have 3 groups (like quotes)
|
|
||||||
if len(match.groups()) == 3:
|
|
||||||
return f"{match.group(1)}{new_version}{match.group(3)}"
|
|
||||||
else:
|
|
||||||
return f"{match.group(1)}{new_version}"
|
|
||||||
return match.group(0)
|
|
||||||
|
|
||||||
new_content = pattern.sub(repl, new_content)
|
|
||||||
|
|
||||||
if file_would_be_updated:
|
|
||||||
if dry_run:
|
|
||||||
print(f"[DRY-RUN] Would update {file_path}")
|
|
||||||
else:
|
|
||||||
backup_path = file_path.with_suffix(file_path.suffix + ".bak")
|
|
||||||
shutil.copy(file_path, backup_path)
|
|
||||||
file_path.write_text(new_content)
|
|
||||||
print(f"Updated {file_path} (backup saved to {backup_path})")
|
|
||||||
elif not old_version_found:
|
|
||||||
print(f"[SKIP] {file_path}: old version '{old_version}' not found")
|
|
||||||
|
|
||||||
return file_would_be_updated, old_version_found
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description="Bump version across project files.")
|
|
||||||
parser.add_argument("old_version", help="Old version to replace")
|
|
||||||
parser.add_argument("new_version", help="New version to set")
|
|
||||||
parser.add_argument(
|
|
||||||
"--dry-run", action="store_true", help="Show what would be changed without modifying files"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--glob", nargs="*", help="Optional glob patterns to include additional files"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
updated_files = []
|
|
||||||
not_found_files = []
|
|
||||||
|
|
||||||
# Determine files to update
|
|
||||||
files_to_update: List[Path] = [Path(f) for f in DEFAULT_FILES]
|
|
||||||
if args.glob:
|
|
||||||
for pattern in args.glob:
|
|
||||||
files_to_update.extend(Path(".").glob(pattern))
|
|
||||||
|
|
||||||
files_to_update = list(dict.fromkeys(files_to_update)) # remove duplicates
|
|
||||||
|
|
||||||
any_updated = False
|
|
||||||
for file_path in files_to_update:
|
|
||||||
if file_path.exists() and file_path.is_file():
|
|
||||||
updated, _ = replace_version_in_file(
|
|
||||||
file_path, args.old_version, args.new_version, args.dry_run
|
|
||||||
)
|
|
||||||
any_updated |= updated
|
|
||||||
if updated:
|
|
||||||
updated_files.append(file_path)
|
|
||||||
else:
|
|
||||||
print(f"[SKIP] {file_path}: file does not exist")
|
|
||||||
not_found_files.append(file_path)
|
|
||||||
|
|
||||||
print("\nSummary:")
|
|
||||||
if updated_files:
|
|
||||||
print(f"Updated files ({len(updated_files)}):")
|
|
||||||
for f in updated_files:
|
|
||||||
print(f" {f}")
|
|
||||||
else:
|
|
||||||
print("No files were updated.")
|
|
||||||
|
|
||||||
if not_found_files:
|
|
||||||
print(f"Files where old version was not found ({len(not_found_files)}):")
|
|
||||||
for f in not_found_files:
|
|
||||||
print(f" {f}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
15
scripts/get_version.py
Normal file
15
scripts/get_version.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!.venv/bin/python
|
||||||
|
"""Get version of EOS"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add the src directory to sys.path so Sphinx can import akkudoktoreos
|
||||||
|
PROJECT_ROOT = Path(__file__).parent.parent
|
||||||
|
SRC_DIR = PROJECT_ROOT / "src"
|
||||||
|
sys.path.insert(0, str(SRC_DIR))
|
||||||
|
|
||||||
|
from akkudoktoreos.core.version import __version__
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print(__version__)
|
||||||
113
scripts/update_version.py
Normal file
113
scripts/update_version.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
#!.venv/bin/python
|
||||||
|
"""General version replacement script.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python scripts/update_version.py <version> <file1> [file2 ...]
|
||||||
|
"""
|
||||||
|
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
# --- Patterns to match version strings ---
|
||||||
|
VERSION_PATTERNS = [
|
||||||
|
# Python: __version__ = "1.2.3"
|
||||||
|
re.compile(
|
||||||
|
r'(?<![A-Za-z0-9])(__version__\s*=\s*")'
|
||||||
|
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
||||||
|
r'(")'
|
||||||
|
),
|
||||||
|
|
||||||
|
# Python: version = "1.2.3"
|
||||||
|
re.compile(
|
||||||
|
r'(?<![A-Za-z0-9])(version\s*=\s*")'
|
||||||
|
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
||||||
|
r'(")'
|
||||||
|
),
|
||||||
|
|
||||||
|
# JSON: "version": "1.2.3"
|
||||||
|
re.compile(
|
||||||
|
r'(?<![A-Za-z0-9])("version"\s*:\s*")'
|
||||||
|
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
||||||
|
r'(")'
|
||||||
|
),
|
||||||
|
|
||||||
|
# Makefile-style: VERSION ?= 1.2.3
|
||||||
|
re.compile(
|
||||||
|
r'(?<![A-Za-z0-9])(VERSION\s*\?=\s*)'
|
||||||
|
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
||||||
|
),
|
||||||
|
|
||||||
|
# YAML: version: "1.2.3"
|
||||||
|
re.compile(
|
||||||
|
r'(?m)^(version\s*:\s*["\']?)'
|
||||||
|
r'(?P<ver>\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)'
|
||||||
|
r'(["\']?)\s*$'
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def update_version_in_file(file_path: Path, new_version: str) -> bool:
|
||||||
|
"""
|
||||||
|
Replace version strings in a file based on VERSION_PATTERNS.
|
||||||
|
Returns True if the file was updated.
|
||||||
|
"""
|
||||||
|
content = file_path.read_text()
|
||||||
|
new_content = content
|
||||||
|
file_would_be_updated = False
|
||||||
|
|
||||||
|
for pattern in VERSION_PATTERNS:
|
||||||
|
def repl(match):
|
||||||
|
nonlocal file_would_be_updated
|
||||||
|
ver = match.group("ver")
|
||||||
|
if ver != new_version:
|
||||||
|
file_would_be_updated = True
|
||||||
|
|
||||||
|
# Three-group patterns (__version__, JSON, YAML)
|
||||||
|
if len(match.groups()) == 3:
|
||||||
|
return f"{match.group(1)}{new_version}{match.group(3)}"
|
||||||
|
|
||||||
|
# Two-group patterns (Makefile)
|
||||||
|
return f"{match.group(1)}{new_version}"
|
||||||
|
|
||||||
|
return match.group(0)
|
||||||
|
|
||||||
|
new_content = pattern.sub(repl, new_content)
|
||||||
|
|
||||||
|
if file_would_be_updated:
|
||||||
|
file_path.write_text(new_content)
|
||||||
|
|
||||||
|
return file_would_be_updated
|
||||||
|
|
||||||
|
|
||||||
|
def main(version: str, files: List[str]):
|
||||||
|
if not version:
|
||||||
|
raise ValueError("No version provided")
|
||||||
|
if not files:
|
||||||
|
raise ValueError("No files provided")
|
||||||
|
|
||||||
|
updated_files = []
|
||||||
|
for f in files:
|
||||||
|
path = Path(f)
|
||||||
|
if not path.exists():
|
||||||
|
print(f"Warning: {path} does not exist, skipping")
|
||||||
|
continue
|
||||||
|
if update_version_in_file(path, version):
|
||||||
|
updated_files.append(str(path))
|
||||||
|
|
||||||
|
if updated_files:
|
||||||
|
print(f"Updated files: {', '.join(updated_files)}")
|
||||||
|
else:
|
||||||
|
print("No files updated.")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) < 3:
|
||||||
|
print("Usage: python update_version.py <version> <file1> [file2 ...]")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
version_arg = sys.argv[1]
|
||||||
|
files_arg = sys.argv[2:]
|
||||||
|
main(version_arg, files_arg)
|
||||||
@@ -11,7 +11,7 @@ Key features:
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, ClassVar, Optional, Type
|
from typing import Any, ClassVar, Optional, Type
|
||||||
|
|
||||||
@@ -154,7 +154,7 @@ class GeneralSettings(SettingsBaseModel):
|
|||||||
if v not in cls.compatible_versions:
|
if v not in cls.compatible_versions:
|
||||||
error = (
|
error = (
|
||||||
f"Incompatible configuration version '{v}'. "
|
f"Incompatible configuration version '{v}'. "
|
||||||
f"Expected one of: {', '.join(cls.compatible_versions)}."
|
f"Expected: {', '.join(cls.compatible_versions)}."
|
||||||
)
|
)
|
||||||
logger.error(error)
|
logger.error(error)
|
||||||
raise ValueError(error)
|
raise ValueError(error)
|
||||||
@@ -339,28 +339,40 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
|
|
||||||
Behavior:
|
Behavior:
|
||||||
1. Checks for the existence of a JSON configuration file in the expected location.
|
1. Checks for the existence of a JSON configuration file in the expected location.
|
||||||
2. If the configuration file does not exist, creates the directory (if needed) and attempts to copy a
|
2. If the configuration file does not exist, creates the directory (if needed) and
|
||||||
default configuration file to the location. If the copy fails, uses the default configuration file directly.
|
attempts to create a default configuration file in the location. If the creation
|
||||||
3. Creates a `pydantic_settings.JsonConfigSettingsSource` for both the configuration file and the default configuration file.
|
fails, a temporary configuration directory is used.
|
||||||
|
3. Creates a `pydantic_settings.JsonConfigSettingsSource` for the configuration
|
||||||
|
file.
|
||||||
4. Updates class attributes `GeneralSettings._config_folder_path` and
|
4. Updates class attributes `GeneralSettings._config_folder_path` and
|
||||||
`GeneralSettings._config_file_path` to reflect the determined paths.
|
`GeneralSettings._config_file_path` to reflect the determined paths.
|
||||||
5. Returns a tuple containing all provided and newly created settings sources in the desired order.
|
5. Returns a tuple containing all provided and newly created settings sources in
|
||||||
|
the desired order.
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
- This method logs a warning if the default configuration file cannot be copied.
|
- This method logs an error if the default configuration file in the normal
|
||||||
- It ensures that a fallback to the default configuration file is always possible.
|
configuration directory cannot be created.
|
||||||
|
- It ensures that a fallback to a default configuration file is always possible.
|
||||||
"""
|
"""
|
||||||
# Ensure we know and have the config folder path and the config file
|
# Ensure we know and have the config folder path and the config file
|
||||||
config_file, exists = cls._get_config_file_path()
|
config_file, exists = cls._get_config_file_path()
|
||||||
config_dir = config_file.parent
|
config_dir = config_file.parent
|
||||||
if not exists:
|
if not exists:
|
||||||
config_dir.mkdir(parents=True, exist_ok=True)
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
# Create minimum config file
|
||||||
|
config_minimum_content = '{ "general": { "version": "' + __version__ + '" } }'
|
||||||
try:
|
try:
|
||||||
shutil.copy2(cls.config_default_file_path, config_file)
|
config_file.write_text(config_minimum_content, encoding="utf-8")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(f"Could not copy default config: {exc}. Using default config...")
|
# Create minimum config in temporary config directory as last resort
|
||||||
config_file = cls.config_default_file_path
|
error_msg = f"Could not create minimum config file in {config_dir}: {exc}"
|
||||||
config_dir = config_file.parent
|
logger.error(error_msg)
|
||||||
|
temp_dir = Path(tempfile.mkdtemp())
|
||||||
|
info_msg = f"Using temporary config directory {temp_dir}"
|
||||||
|
logger.info(info_msg)
|
||||||
|
config_dir = temp_dir
|
||||||
|
config_file = temp_dir / config_file.name
|
||||||
|
config_file.write_text(config_minimum_content, encoding="utf-8")
|
||||||
# Remember config_dir and config file
|
# Remember config_dir and config file
|
||||||
GeneralSettings._config_folder_path = config_dir
|
GeneralSettings._config_folder_path = config_dir
|
||||||
GeneralSettings._config_file_path = config_file
|
GeneralSettings._config_file_path = config_file
|
||||||
@@ -387,19 +399,8 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults):
|
|||||||
f"Error reading config file '{config_file}' (falling back to default config): {ex}"
|
f"Error reading config file '{config_file}' (falling back to default config): {ex}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Append default settings to sources
|
|
||||||
default_settings = pydantic_settings.JsonConfigSettingsSource(
|
|
||||||
settings_cls, json_file=cls.config_default_file_path
|
|
||||||
)
|
|
||||||
setting_sources.append(default_settings)
|
|
||||||
|
|
||||||
return tuple(setting_sources)
|
return tuple(setting_sources)
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def config_default_file_path(cls) -> Path:
|
|
||||||
"""Compute the default config file path."""
|
|
||||||
return cls.package_root_path.joinpath("data/default.config.json")
|
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
def package_root_path(cls) -> Path:
|
def package_root_path(cls) -> Path:
|
||||||
"""Compute the package root path."""
|
"""Compute the package root path."""
|
||||||
|
|||||||
@@ -1,5 +1,156 @@
|
|||||||
"""Version information for akkudoktoreos."""
|
"""Version information for akkudoktoreos."""
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import re
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
# For development add `+dev` to previous release
|
# For development add `+dev` to previous release
|
||||||
# For release omit `+dev`.
|
# For release omit `+dev`.
|
||||||
__version__ = "0.2.0+dev"
|
VERSION_BASE = "0.2.0+dev"
|
||||||
|
|
||||||
|
# Project hash of relevant files
|
||||||
|
HASH_EOS = ""
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# Helpers for version generation
|
||||||
|
# ------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def is_excluded_dir(path: Path, excluded_dir_patterns: set[str]) -> bool:
|
||||||
|
"""Check whether a directory should be excluded based on name patterns."""
|
||||||
|
return any(fnmatch(path.name, pattern) for pattern in excluded_dir_patterns)
|
||||||
|
|
||||||
|
|
||||||
|
def hash_tree(
|
||||||
|
paths: list[Path],
|
||||||
|
allowed_suffixes: set[str],
|
||||||
|
excluded_dir_patterns: set[str],
|
||||||
|
excluded_files: Optional[set[Path]] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Return SHA256 hash for files under `paths`.
|
||||||
|
|
||||||
|
Restricted by suffix, excluding excluded directory patterns and excluded_files.
|
||||||
|
"""
|
||||||
|
h = hashlib.sha256()
|
||||||
|
excluded_files = excluded_files or set()
|
||||||
|
|
||||||
|
for root in paths:
|
||||||
|
if not root.exists():
|
||||||
|
raise ValueError(f"Root path does not exist: {root}")
|
||||||
|
for p in sorted(root.rglob("*")):
|
||||||
|
# Skip excluded directories
|
||||||
|
if p.is_dir() and is_excluded_dir(p, excluded_dir_patterns):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip files inside excluded directories
|
||||||
|
if any(is_excluded_dir(parent, excluded_dir_patterns) for parent in p.parents):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip excluded files
|
||||||
|
if p.resolve() in excluded_files:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Hash only allowed file types
|
||||||
|
if p.is_file() and p.suffix.lower() in allowed_suffixes:
|
||||||
|
h.update(p.read_bytes())
|
||||||
|
|
||||||
|
digest = h.hexdigest()
|
||||||
|
|
||||||
|
return digest
|
||||||
|
|
||||||
|
|
||||||
|
def _version_hash() -> str:
|
||||||
|
"""Calculate project hash.
|
||||||
|
|
||||||
|
Only package file ins src/akkudoktoreos can be hashed to make it work also for packages.
|
||||||
|
"""
|
||||||
|
DIR_PACKAGE_ROOT = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
# Allowed file suffixes to consider
|
||||||
|
ALLOWED_SUFFIXES: set[str] = {".py", ".md", ".json"}
|
||||||
|
|
||||||
|
# Directory patterns to exclude (glob-like)
|
||||||
|
EXCLUDED_DIR_PATTERNS: set[str] = {"*_autosum", "*__pycache__", "*_generated"}
|
||||||
|
|
||||||
|
# Files to exclude
|
||||||
|
EXCLUDED_FILES: set[Path] = set()
|
||||||
|
|
||||||
|
# Directories whose changes shall be part of the project hash
|
||||||
|
watched_paths = [DIR_PACKAGE_ROOT]
|
||||||
|
|
||||||
|
hash_current = hash_tree(
|
||||||
|
watched_paths, ALLOWED_SUFFIXES, EXCLUDED_DIR_PATTERNS, excluded_files=EXCLUDED_FILES
|
||||||
|
)
|
||||||
|
return hash_current
|
||||||
|
|
||||||
|
|
||||||
|
def _version_calculate() -> str:
|
||||||
|
"""Compute version."""
|
||||||
|
global HASH_EOS
|
||||||
|
HASH_EOS = _version_hash()
|
||||||
|
if VERSION_BASE.endswith("+dev"):
|
||||||
|
return f"{VERSION_BASE}.{HASH_EOS[:6]}"
|
||||||
|
else:
|
||||||
|
return VERSION_BASE
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Project version information
|
||||||
|
# ----------------------------
|
||||||
|
|
||||||
|
# The version
|
||||||
|
__version__ = _version_calculate()
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------
|
||||||
|
# Version info access
|
||||||
|
# -------------------
|
||||||
|
|
||||||
|
|
||||||
|
# Regular expression to split the version string into pieces
|
||||||
|
VERSION_RE = re.compile(
|
||||||
|
r"""
|
||||||
|
^(?P<base>\d+\.\d+\.\d+) # x.y.z
|
||||||
|
(?:\+ # +dev.hash starts here
|
||||||
|
(?:
|
||||||
|
(?P<dev>dev) # literal 'dev'
|
||||||
|
(?:\.(?P<hash>[A-Za-z0-9]+))? # optional .hash
|
||||||
|
)
|
||||||
|
)?
|
||||||
|
$
|
||||||
|
""",
|
||||||
|
re.VERBOSE,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def version() -> dict[str, Optional[str]]:
|
||||||
|
"""Parses the version string.
|
||||||
|
|
||||||
|
The version string shall be of the form:
|
||||||
|
x.y.z
|
||||||
|
x.y.z+dev
|
||||||
|
x.y.z+dev.HASH
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
{
|
||||||
|
"version": "0.2.0+dev.a96a65",
|
||||||
|
"base": "x.y.z",
|
||||||
|
"dev": "dev" or None,
|
||||||
|
"hash": "<hash>" or None,
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
global __version__
|
||||||
|
|
||||||
|
match = VERSION_RE.match(__version__)
|
||||||
|
if not match:
|
||||||
|
raise ValueError(f"Invalid version format: {version}")
|
||||||
|
|
||||||
|
info = match.groupdict()
|
||||||
|
info["version"] = __version__
|
||||||
|
|
||||||
|
return info
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
"general": {
|
|
||||||
"version": "0.2.0+dev"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@@ -7,6 +8,7 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from fnmatch import fnmatch
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Generator, Optional, Union
|
from typing import Generator, Optional, Union
|
||||||
@@ -21,12 +23,14 @@ from loguru import logger
|
|||||||
from xprocess import ProcessStarter, XProcess
|
from xprocess import ProcessStarter, XProcess
|
||||||
|
|
||||||
from akkudoktoreos.config.config import ConfigEOS, get_config
|
from akkudoktoreos.config.config import ConfigEOS, get_config
|
||||||
|
from akkudoktoreos.core.version import _version_hash, version
|
||||||
from akkudoktoreos.server.server import get_default_host
|
from akkudoktoreos.server.server import get_default_host
|
||||||
|
|
||||||
# -----------------------------------------------
|
# -----------------------------------------------
|
||||||
# Adapt pytest logging handling to Loguru logging
|
# Adapt pytest logging handling to Loguru logging
|
||||||
# -----------------------------------------------
|
# -----------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def caplog(caplog: LogCaptureFixture):
|
def caplog(caplog: LogCaptureFixture):
|
||||||
"""Propagate Loguru logs to the pytest caplog handler."""
|
"""Propagate Loguru logs to the pytest caplog handler."""
|
||||||
@@ -88,7 +92,7 @@ def disable_debug_logging(scope="session", autouse=True):
|
|||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
parser.addoption(
|
parser.addoption(
|
||||||
"--full-run", action="store_true", default=False, help="Run with all optimization tests."
|
"--finalize", action="store_true", default=False, help="Run with all tests."
|
||||||
)
|
)
|
||||||
parser.addoption(
|
parser.addoption(
|
||||||
"--check-config-side-effect",
|
"--check-config-side-effect",
|
||||||
@@ -105,8 +109,8 @@ def pytest_addoption(parser):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def is_full_run(request):
|
def is_finalize(request):
|
||||||
yield bool(request.config.getoption("--full-run"))
|
yield bool(request.config.getoption("--finalize"))
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
@@ -123,6 +127,12 @@ def is_system_test(request):
|
|||||||
yield bool(request.config.getoption("--system-test"))
|
yield bool(request.config.getoption("--system-test"))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def is_ci() -> bool:
|
||||||
|
"""Returns True if running on GitHub Actions CI, False otherwise."""
|
||||||
|
return os.getenv("CI") == "true"
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def prediction_eos():
|
def prediction_eos():
|
||||||
from akkudoktoreos.prediction.prediction import get_prediction
|
from akkudoktoreos.prediction.prediction import get_prediction
|
||||||
@@ -528,6 +538,25 @@ def server_setup_for_function(xprocess) -> Generator[dict[str, Union[str, int]],
|
|||||||
yield result
|
yield result
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------
|
||||||
|
# Provide version and hash check support
|
||||||
|
# --------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def version_and_hash() -> Generator[dict[str, Optional[str]], None, None]:
|
||||||
|
"""Return version info as in in version.py and calculate current hash.
|
||||||
|
|
||||||
|
Runs once per test session.
|
||||||
|
"""
|
||||||
|
info = version()
|
||||||
|
info["hash_current"] = _version_hash()
|
||||||
|
|
||||||
|
yield info
|
||||||
|
|
||||||
|
# After all tests
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------
|
# ------------------------------
|
||||||
# Provide pytest timezone change
|
# Provide pytest timezone change
|
||||||
# ------------------------------
|
# ------------------------------
|
||||||
|
|||||||
@@ -120,15 +120,6 @@ def test_singleton_behavior(config_eos, config_default_dirs):
|
|||||||
assert instance1.general.config_file_path == initial_cfg_file
|
assert instance1.general.config_file_path == initial_cfg_file
|
||||||
|
|
||||||
|
|
||||||
def test_default_config_path(config_eos, config_default_dirs):
|
|
||||||
"""Test that the default config file path is computed correctly."""
|
|
||||||
_, _, config_default_dir_default, _ = config_default_dirs
|
|
||||||
|
|
||||||
expected_path = config_default_dir_default.joinpath("default.config.json")
|
|
||||||
assert config_eos.config_default_file_path == expected_path
|
|
||||||
assert config_eos.config_default_file_path.is_file()
|
|
||||||
|
|
||||||
|
|
||||||
def test_config_file_priority(config_default_dirs):
|
def test_config_file_priority(config_default_dirs):
|
||||||
"""Test config file priority.
|
"""Test config file priority.
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import hashlib
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from fnmatch import fnmatch
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -18,43 +17,6 @@ DIR_SRC = DIR_PROJECT_ROOT / "src"
|
|||||||
|
|
||||||
HASH_FILE = DIR_BUILD / ".sphinx_hash.json"
|
HASH_FILE = DIR_BUILD / ".sphinx_hash.json"
|
||||||
|
|
||||||
# Allowed file suffixes to consider
|
|
||||||
ALLOWED_SUFFIXES = {".py", ".md", ".json"}
|
|
||||||
|
|
||||||
# Directory patterns to exclude (glob-like)
|
|
||||||
EXCLUDED_DIR_PATTERNS = {"*_autosum", "*__pycache__"}
|
|
||||||
|
|
||||||
|
|
||||||
def is_excluded_dir(path: Path) -> bool:
|
|
||||||
"""Check whether a directory should be excluded based on name patterns."""
|
|
||||||
return any(fnmatch(path.name, pattern) for pattern in EXCLUDED_DIR_PATTERNS)
|
|
||||||
|
|
||||||
|
|
||||||
def hash_tree(paths: list[Path], suffixes=ALLOWED_SUFFIXES) -> str:
|
|
||||||
"""Return SHA256 hash for files under `paths`.
|
|
||||||
|
|
||||||
Restricted by suffix, excluding excluded directory patterns.
|
|
||||||
"""
|
|
||||||
h = hashlib.sha256()
|
|
||||||
|
|
||||||
for root in paths:
|
|
||||||
if not root.exists():
|
|
||||||
continue
|
|
||||||
for p in sorted(root.rglob("*")):
|
|
||||||
# Skip excluded directories
|
|
||||||
if p.is_dir() and is_excluded_dir(p):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Skip files inside excluded directories
|
|
||||||
if any(is_excluded_dir(parent) for parent in p.parents):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Hash only allowed file types
|
|
||||||
if p.is_file() and p.suffix.lower() in suffixes:
|
|
||||||
h.update(p.read_bytes())
|
|
||||||
|
|
||||||
return h.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def find_sphinx_build() -> str:
|
def find_sphinx_build() -> str:
|
||||||
venv = os.getenv("VIRTUAL_ENV")
|
venv = os.getenv("VIRTUAL_ENV")
|
||||||
@@ -69,15 +31,12 @@ def find_sphinx_build() -> str:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def sphinx_changed() -> bool:
|
def sphinx_changed(version_and_hash) -> Optional[str]:
|
||||||
"""Returns True if any watched files have changed since last run.
|
"""Returns new hash if any watched files have changed since last run.
|
||||||
|
|
||||||
Hash is stored in .sphinx_hash.json.
|
Hash is stored in .sphinx_hash.json.
|
||||||
"""
|
"""
|
||||||
# Directories whose changes should trigger rebuilding docs
|
new_hash = None
|
||||||
watched_paths = [Path("docs"), Path("src")]
|
|
||||||
|
|
||||||
current_hash = hash_tree(watched_paths)
|
|
||||||
|
|
||||||
# Load previous hash
|
# Load previous hash
|
||||||
try:
|
try:
|
||||||
@@ -86,13 +45,12 @@ def sphinx_changed() -> bool:
|
|||||||
except Exception:
|
except Exception:
|
||||||
previous_hash = None
|
previous_hash = None
|
||||||
|
|
||||||
changed = (previous_hash != current_hash)
|
changed = (previous_hash != version_and_hash["hash_current"])
|
||||||
|
|
||||||
# Update stored hash
|
if changed:
|
||||||
HASH_FILE.parent.mkdir(parents=True, exist_ok=True)
|
new_hash = version_and_hash["hash_current"]
|
||||||
HASH_FILE.write_text(json.dumps({"hash": current_hash}, indent=2))
|
|
||||||
|
|
||||||
return changed
|
return new_hash
|
||||||
|
|
||||||
|
|
||||||
class TestSphinxDocumentation:
|
class TestSphinxDocumentation:
|
||||||
@@ -120,17 +78,17 @@ class TestSphinxDocumentation:
|
|||||||
if DIR_BUILD_DOCS.exists():
|
if DIR_BUILD_DOCS.exists():
|
||||||
shutil.rmtree(DIR_BUILD_DOCS)
|
shutil.rmtree(DIR_BUILD_DOCS)
|
||||||
|
|
||||||
def test_sphinx_build(self, sphinx_changed: bool, is_full_run: bool):
|
def test_sphinx_build(self, sphinx_changed: Optional[str], is_finalize: bool):
|
||||||
"""Build Sphinx documentation and ensure no major warnings appear in the build output."""
|
"""Build Sphinx documentation and ensure no major warnings appear in the build output."""
|
||||||
if not is_full_run:
|
# Ensure docs folder exists
|
||||||
pytest.skip("Skipping Sphinx test — not full run")
|
if not DIR_DOCS.exists():
|
||||||
|
pytest.skip(f"Skipping Sphinx build test - docs folder not present: {DIR_DOCS}")
|
||||||
|
|
||||||
if not sphinx_changed:
|
if not sphinx_changed:
|
||||||
pytest.skip(f"Skipping Sphinx build — no relevant file changes detected: {HASH_FILE}")
|
pytest.skip(f"Skipping Sphinx build — no relevant file changes detected: {HASH_FILE}")
|
||||||
|
|
||||||
# Ensure docs folder exists
|
if not is_finalize:
|
||||||
if not Path("docs").exists():
|
pytest.skip("Skipping Sphinx test — not full run")
|
||||||
pytest.skip(f"Skipping Sphinx build test - docs folder not present: {DIR_DOCS}")
|
|
||||||
|
|
||||||
# Clean directories
|
# Clean directories
|
||||||
self._cleanup_autosum_dirs()
|
self._cleanup_autosum_dirs()
|
||||||
@@ -176,3 +134,7 @@ class TestSphinxDocumentation:
|
|||||||
]
|
]
|
||||||
|
|
||||||
assert not bad_lines, f"Sphinx build contained errors:\n" + "\n".join(bad_lines)
|
assert not bad_lines, f"Sphinx build contained errors:\n" + "\n".join(bad_lines)
|
||||||
|
|
||||||
|
# Update stored hash
|
||||||
|
HASH_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
HASH_FILE.write_text(json.dumps({"hash": sphinx_changed}, indent=2))
|
||||||
|
|||||||
@@ -102,6 +102,9 @@ IGNORE_LOCATIONS = [
|
|||||||
# functools
|
# functools
|
||||||
r"\.partial$",
|
r"\.partial$",
|
||||||
|
|
||||||
|
# fnmatch
|
||||||
|
r"\.fnmatch$",
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def test_optimize(
|
|||||||
fn_out: str,
|
fn_out: str,
|
||||||
ngen: int,
|
ngen: int,
|
||||||
config_eos: ConfigEOS,
|
config_eos: ConfigEOS,
|
||||||
is_full_run: bool,
|
is_finalize: bool,
|
||||||
):
|
):
|
||||||
"""Test optimierung_ems."""
|
"""Test optimierung_ems."""
|
||||||
# Test parameters
|
# Test parameters
|
||||||
@@ -107,8 +107,8 @@ def test_optimize(
|
|||||||
|
|
||||||
genetic_optimization = GeneticOptimization(fixed_seed=fixed_seed)
|
genetic_optimization = GeneticOptimization(fixed_seed=fixed_seed)
|
||||||
|
|
||||||
# Activate with pytest --full-run
|
# Activate with pytest --finalize
|
||||||
if ngen > 10 and not is_full_run:
|
if ngen > 10 and not is_finalize:
|
||||||
pytest.skip()
|
pytest.skip()
|
||||||
|
|
||||||
visualize_filename = str((DIR_TESTDATA / f"new_{fn_out}").with_suffix(".pdf"))
|
visualize_filename = str((DIR_TESTDATA / f"new_{fn_out}").with_suffix(".pdf"))
|
||||||
|
|||||||
119
tests/test_version.py
Normal file
119
tests/test_version.py
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
# tests/test_version.py
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
DIR_PROJECT_ROOT = Path(__file__).parent.parent
|
||||||
|
GET_VERSION_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "get_version.py"
|
||||||
|
BUMP_DEV_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "bump_dev_version.py"
|
||||||
|
UPDATE_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "update_version.py"
|
||||||
|
|
||||||
|
|
||||||
|
# --- Helper to create test files ---
|
||||||
|
def write_file(path: Path, content: str):
|
||||||
|
path.write_text(content, encoding="utf-8")
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
# --- 1️⃣ Test get_version.py ---
|
||||||
|
def test_get_version_prints_non_empty():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, str(GET_VERSION_SCRIPT)],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
version = result.stdout.strip()
|
||||||
|
assert version, "get_version.py should print a non-empty version"
|
||||||
|
assert len(version.split(".")) >= 3, "Version should have at least MAJOR.MINOR.PATCH"
|
||||||
|
|
||||||
|
|
||||||
|
# --- 2️⃣ Test update_version.py on multiple file types ---
|
||||||
|
def test_update_version_multiple_formats(tmp_path):
|
||||||
|
py_file = write_file(tmp_path / "version.py", '__version__ = "0.1.0"\n')
|
||||||
|
yaml_file = write_file(tmp_path / "config.yaml", 'version: "0.1.0"\n')
|
||||||
|
json_file = write_file(tmp_path / "package.json", '{"version": "0.1.0"}\n')
|
||||||
|
|
||||||
|
new_version = "0.2.0"
|
||||||
|
files = [py_file, yaml_file, json_file]
|
||||||
|
|
||||||
|
subprocess.run(
|
||||||
|
[sys.executable, str(UPDATE_SCRIPT), new_version] + [str(f.resolve()) for f in files],
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify updates
|
||||||
|
assert f'__version__ = "{new_version}"' in py_file.read_text()
|
||||||
|
assert yaml.safe_load(yaml_file.read_text())["version"] == new_version
|
||||||
|
assert f'"version": "{new_version}"' in json_file.read_text()
|
||||||
|
|
||||||
|
|
||||||
|
# --- 3️⃣ Test bump_dev_version.py ---
|
||||||
|
def test_bump_dev_version_appends_dev(tmp_path):
|
||||||
|
version_file = write_file(tmp_path / "version.py", 'VERSION_BASE = "0.2.0"\n')
|
||||||
|
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, str(BUMP_DEV_SCRIPT), str(version_file.resolve())],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
new_version = result.stdout.strip()
|
||||||
|
assert new_version == "0.2.0+dev"
|
||||||
|
|
||||||
|
content = version_file.read_text()
|
||||||
|
assert f'VERSION_BASE = "{new_version}"' in content
|
||||||
|
|
||||||
|
|
||||||
|
# --- 4️⃣ Full workflow simulation with git ---
|
||||||
|
def test_workflow_git(tmp_path):
|
||||||
|
# Create git repo
|
||||||
|
subprocess.run(["git", "init"], cwd=tmp_path, check=True)
|
||||||
|
subprocess.run(["git", "config", "user.name", "test"], cwd=tmp_path, check=True)
|
||||||
|
subprocess.run(["git", "config", "user.email", "test@test.com"], cwd=tmp_path, check=True)
|
||||||
|
|
||||||
|
# Create files
|
||||||
|
version_file = write_file(tmp_path / "version.py", 'VERSION_BASE = "0.1.0"\n')
|
||||||
|
config_file = write_file(tmp_path / "config.yaml", 'version: "0.1.0"\n')
|
||||||
|
|
||||||
|
subprocess.run(["git", "add", "."], cwd=tmp_path, check=True)
|
||||||
|
subprocess.run(["git", "commit", "-m", "initial commit"], cwd=tmp_path, check=True)
|
||||||
|
|
||||||
|
# --- Step 1: Calculate version (mock) ---
|
||||||
|
new_version = "0.2.0"
|
||||||
|
|
||||||
|
# --- Step 2: Update files ---
|
||||||
|
subprocess.run(
|
||||||
|
[sys.executable, str(UPDATE_SCRIPT), new_version, str(config_file.resolve()), str(version_file.resolve())],
|
||||||
|
cwd=tmp_path,
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# --- Step 3: Commit updated files if needed ---
|
||||||
|
subprocess.run(["git", "add", str(config_file.resolve()), str(version_file.resolve())], cwd=tmp_path, check=True)
|
||||||
|
diff_result = subprocess.run(["git", "diff", "--cached", "--quiet"], cwd=tmp_path)
|
||||||
|
assert diff_result.returncode == 1, "There should be staged changes to commit"
|
||||||
|
subprocess.run(["git", "commit", "-m", f"chore: bump version to {new_version}"], cwd=tmp_path, check=True)
|
||||||
|
|
||||||
|
# --- Step 4: Tag version ---
|
||||||
|
tag_name = f"v{new_version}"
|
||||||
|
subprocess.run(["git", "tag", "-a", tag_name, "-m", f"Release {new_version}"], cwd=tmp_path, check=True)
|
||||||
|
tags = subprocess.run(["git", "tag"], cwd=tmp_path, capture_output=True, text=True, check=True).stdout
|
||||||
|
assert tag_name in tags
|
||||||
|
|
||||||
|
# --- Step 5: Bump dev version ---
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, str(BUMP_DEV_SCRIPT), str(version_file.resolve())],
|
||||||
|
cwd=tmp_path,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
dev_version = result.stdout.strip()
|
||||||
|
assert dev_version.endswith("+dev")
|
||||||
|
assert dev_version.count("+dev") == 1
|
||||||
|
content = version_file.read_text()
|
||||||
|
assert f'VERSION_BASE = "{dev_version}"' in content
|
||||||
Reference in New Issue
Block a user