diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml new file mode 100644 index 0000000..f460a84 --- /dev/null +++ b/.github/workflows/bump-version.yml @@ -0,0 +1,99 @@ +name: Bump Version + +# Trigger the workflow on any push to main +on: + push: + branches: + - main + +jobs: + bump-version: + runs-on: ubuntu-latest + name: Bump Version Workflow + + steps: + # --- Step 1: Checkout the repository --- + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Needed to create tags and see full history + persist-credentials: true # Needed for pushing commits and tags + + # --- Step 2: Set up Python --- + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + # --- Step 3: Calculate version dynamically --- + - name: Calculate version + id: calc + run: | + # Call custom version calculation script + VERSION=$(python scripts/get_version.py) + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Computed version: $VERSION" + + # --- Step 4: Skip workflow for development versions --- + - name: Skip if version contains 'dev' + run: | + # Exit workflow early if the version contains 'dev' + if [[ "${{ steps.calc.outputs.version }}" == *dev* ]]; then + echo "Version contains 'dev', skipping bump version workflow." + exit 0 + fi + + # --- Step 5: Update files and commit if necessary --- + - name: Update files and commit + run: | + # Define files to update + UPDATE_FILES="haaddon/config.yaml" + + # Call general Python version replacement script + python scripts/update_version.py "${{ steps.calc.outputs.version }}" $UPDATE_FILES + + # Commit changes if any + git config user.name "github-actions" + git config user.email "actions@github.com" + git add $UPDATE_FILES + + if git diff --cached --quiet; then + echo "No files changed. Skipping commit." + else + git commit -m "chore: bump version to ${{ steps.calc.outputs.version }}" + git push + + # --- Step 6: Create release tag --- + - name: Create release tag if it does not exist + id: tagging + run: | + TAG="v${{ steps.calc.outputs.version }}" + + if git rev-parse --verify "$TAG" >/dev/null 2>&1; then + echo "Tag $TAG already exists. Skipping tag creation." + echo "created=false" >> $GITHUB_OUTPUT + else + git tag -a "v${{ steps.calc.outputs.version }}" -m "Release ${{ steps.calc.outputs.version }}" + git push origin "v${{ steps.calc.outputs.version }}" + echo "created=true" >> $GITHUB_OUTPUT + fi + + # --- Step 7: Bump to development version --- + - name: Bump dev version + id: bump_dev + run: | + VERSION_BASE=$(python scripts/bump_dev_version.py | tail -n1) + if [ -z "$VERSION_BASE" ]; then + echo "Error: bump_dev_version.py returned an empty version." + exit 1 + fi + echo "version_base=$VERSION_BASE" >> $GITHUB_OUTPUT + + git config user.name "github-actions" + git config user.email "actions@github.com" + git add src/akkudoktoreos/core/version.py + if git diff --cached --quiet; then + echo "version.py not changed. Skipping commit." + else + git commit -m "chore: bump dev version to ${VERSION_BASE}" + git push diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index f5d505a..d01ee16 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -26,7 +26,7 @@ jobs: - name: Run Pytest run: | pip install -e . - python -m pytest --full-run --check-config-side-effect -vs --cov src --cov-report term-missing + python -m pytest --finalize --check-config-side-effect -vs --cov src --cov-report term-missing - name: Upload test artifacts uses: actions/upload-artifact@v4 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a6b4c42..f54094d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,6 +39,7 @@ repos: - pandas-stubs==2.3.2.250926 - tokenize-rt==6.2.0 - types-docutils==0.22.2.20251006 + - types-PyYaml==6.0.12.20250915 pass_filenames: false # --- Markdown linter --- diff --git a/Dockerfile b/Dockerfile index c24066b..5368978 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,7 @@ # syntax=docker/dockerfile:1.7 +# Dockerfile + +# Set base image first ARG PYTHON_VERSION=3.13.9 FROM python:${PYTHON_VERSION}-slim @@ -32,28 +35,25 @@ RUN adduser --system --group --no-create-home eos \ && mkdir -p "${EOS_CONFIG_DIR}" \ && chown eos "${EOS_CONFIG_DIR}" +# Install requirements COPY requirements.txt . - RUN --mount=type=cache,target=/root/.cache/pip \ pip install --no-cache-dir -r requirements.txt +# Copy source +COPY src/ ./src COPY pyproject.toml . -RUN mkdir -p src && pip install --no-cache-dir -e . -COPY src src +# Create version information +COPY scripts/get_version.py ./scripts/get_version.py +RUN python scripts/get_version.py > ./version.txt +RUN rm ./scripts/get_version.py -# Create minimal default configuration for Docker to fix EOSDash accessibility (#629) -# This ensures EOSDash binds to 0.0.0.0 instead of 127.0.0.1 in containers -RUN echo '{\n\ - "server": {\n\ - "host": "0.0.0.0",\n\ - "port": 8503,\n\ - "startup_eosdash": true,\n\ - "eosdash_host": "0.0.0.0",\n\ - "eosdash_port": 8504\n\ - }\n\ -}' > "${EOS_CONFIG_DIR}/EOS.config.json" \ - && chown eos:eos "${EOS_CONFIG_DIR}/EOS.config.json" +RUN echo "Building Akkudoktor-EOS with Python $PYTHON_VERSION" + +# Install akkudoktoreos package in editable form (-e) +# pyproject-toml will read the version from version.txt +RUN pip install --no-cache-dir -e . USER eos ENTRYPOINT [] @@ -61,6 +61,7 @@ ENTRYPOINT [] EXPOSE 8503 EXPOSE 8504 -CMD ["python", "src/akkudoktoreos/server/eos.py", "--host", "0.0.0.0"] +# Ensure EOS and EOSdash bind to 0.0.0.0 +CMD ["python", "-m", "akkudoktoreos.server.eos", "--host", "0.0.0.0"] VOLUME ["${MPLCONFIGDIR}", "${EOS_CACHE_DIR}", "${EOS_OUTPUT_DIR}", "${EOS_CONFIG_DIR}"] diff --git a/Makefile b/Makefile index 442b752..1bb18c8 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,8 @@ # Define the targets -.PHONY: help venv pip install dist test test-full test-system test-ci test-profile docker-run docker-build docs read-docs clean format gitlint mypy run run-dev run-dash run-dash-dev bumps +.PHONY: help venv pip install dist test test-full test-system test-ci test-profile docker-run docker-build docs read-docs clean format gitlint mypy run run-dev run-dash run-dash-dev prepare-version test-version + +# - Take VERSION from version.py +VERSION := $(shell python3 scripts/get_version.py) # Default target all: help @@ -25,13 +28,13 @@ help: @echo " run-dash - Run EOSdash production server in virtual environment." @echo " run-dash-dev - Run EOSdash development server in virtual environment (automatically reloads)." @echo " test - Run tests." - @echo " test-full - Run tests with full optimization." + @echo " test-full - Run all tests (e.g. to finalize a commit)." @echo " test-system - Run tests with system tests enabled." @echo " test-ci - Run tests as CI does. No user config file allowed." @echo " test-profile - Run single test optimization with profiling." @echo " dist - Create distribution (in dist/)." @echo " clean - Remove generated documentation, distribution and virtual environment." - @echo " bump - Bump version to next release version." + @echo " prepare-version - Prepare a version defined in setup.py." # Target to set up a Python 3 virtual environment venv: @@ -50,8 +53,12 @@ pip-dev: pip .venv/bin/pip install -r requirements-dev.txt @echo "Dependencies installed from requirements-dev.txt." +# Target to create a version.txt +version-txt: + echo "$(VERSION)" > version.txt + # Target to install EOS in editable form (development mode) into virtual environment. -install: pip-dev +install: pip-dev version-txt .venv/bin/pip install build .venv/bin/pip install -e . @echo "EOS installed in editable form (development mode)." @@ -63,7 +70,7 @@ dist: pip @echo "Distribution created (see dist/)." # Target to generate documentation -gen-docs: pip-dev +gen-docs: pip-dev version-txt .venv/bin/pip install -e . .venv/bin/python ./scripts/generate_config_md.py --output-file docs/_generated/config.md .venv/bin/python ./scripts/generate_openapi_md.py --output-file docs/_generated/openapi.md @@ -127,7 +134,7 @@ test: # Target to run tests as done by CI on Github. test-ci: @echo "Running tests as CI..." - .venv/bin/pytest --full-run --check-config-side-effect -vs --cov src --cov-report term-missing + .venv/bin/pytest --finalize --check-config-side-effect -vs --cov src --cov-report term-missing # Target to run tests including the system tests. test-system: @@ -137,7 +144,7 @@ test-system: # Target to run all tests. test-full: @echo "Running all tests..." - .venv/bin/pytest --full-run + .venv/bin/pytest --finalize # Target to run tests including the single test optimization with profiling. test-profile: @@ -165,16 +172,19 @@ docker-build: @docker pull python:3.13.9-slim @docker compose build -# Bump Akkudoktoreos version -VERSION ?= 0.2.0+dev -NEW_VERSION ?= $(subst +dev,,$(VERSION))+dev # be careful - default is always +dev +# Propagete version info to all version files +# Take UPDATE_FILES from GitHub action bump-version.yml +UPDATE_FILES := $(shell sed -n 's/^[[:space:]]*UPDATE_FILES[[:space:]]*=[[:space:]]*"\([^"]*\)".*/\1/p' \ + .github/workflows/bump-version.yml) +prepare-version: #pip-dev + @echo "Update version to $(VERSION) from version.py in files $(UPDATE_FILES) and doc" + .venv/bin/python ./scripts/update_version.py $(VERSION) $(UPDATE_FILES) + .venv/bin/python ./scripts/convert_lightweight_tags.py + .venv/bin/python ./scripts/generate_config_md.py --output-file docs/_generated/config.md + .venv/bin/python ./scripts/generate_openapi_md.py --output-file docs/_generated/openapi.md + .venv/bin/python ./scripts/generate_openapi.py --output-file openapi.json + .venv/bin/pytest -vv --finalize tests/test_version.py -bump: pip-dev - @echo "Bumping akkudoktoreos version from $(VERSION) to $(NEW_VERSION) (dry-run: $(EXTRA_ARGS))" - .venv/bin/python scripts/convert_lightweight_tags.py - .venv/bin/python scripts/bump_version.py $(VERSION) $(NEW_VERSION) $(EXTRA_ARGS) - -bump-dry: pip-dev - @echo "Bumping akkudoktoreos version from $(VERSION) to $(NEW_VERSION) (dry-run: --dry-run)" - .venv/bin/python scripts/convert_lightweight_tags.py - .venv/bin/python scripts/bump_version.py $(VERSION) $(NEW_VERSION) --dry-run +test-version: + echo "Test version information to be correctly set in all version files" + .venv/bin/pytest -vv tests/test_version.py diff --git a/docker-compose.yaml b/docker-compose.yaml index 63fd486..959f6e9 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -39,18 +39,6 @@ services: - "${EOS_SERVER__EOSDASH_PORT}:8504" # Volume mount configuration (optional) - # IMPORTANT: When mounting local directories, the default config won't be available. - # You must create an EOS.config.json file in your local config directory with: - # { - # "server": { - # "host": "0.0.0.0", # Required for Docker container accessibility - # "port": 8503, - # "startup_eosdash": true, - # "eosdash_host": "0.0.0.0", # Required for Docker container accessibility - # "eosdash_port": 8504 - # } - # } - # # Example volume mounts (uncomment to use): # volumes: # - ./config:/opt/eos/config # Mount local config directory diff --git a/docs/_generated/configexample.md b/docs/_generated/configexample.md index 389a3ac..19e2089 100644 --- a/docs/_generated/configexample.md +++ b/docs/_generated/configexample.md @@ -92,7 +92,7 @@ } }, "general": { - "version": "0.2.0+dev", + "version": "0.2.0+dev.4dbc2d", "data_folder_path": null, "data_output_subpath": "output", "latitude": 52.52, diff --git a/docs/_generated/configgeneral.md b/docs/_generated/configgeneral.md index d12dd42..22d23e1 100644 --- a/docs/_generated/configgeneral.md +++ b/docs/_generated/configgeneral.md @@ -28,7 +28,7 @@ Properties: | latitude | `EOS_GENERAL__LATITUDE` | `Optional[float]` | `rw` | `52.52` | Latitude in decimal degrees, between -90 and 90, north is positive (ISO 19115) (°) | | longitude | `EOS_GENERAL__LONGITUDE` | `Optional[float]` | `rw` | `13.405` | Longitude in decimal degrees, within -180 to 180 (°) | | timezone | | `Optional[str]` | `ro` | `N/A` | None | -| version | `EOS_GENERAL__VERSION` | `str` | `rw` | `0.2.0+dev` | Configuration file version. Used to check compatibility. | +| version | `EOS_GENERAL__VERSION` | `str` | `rw` | `0.2.0+dev.4dbc2d` | Configuration file version. Used to check compatibility. | ::: @@ -40,7 +40,7 @@ Properties: ```json { "general": { - "version": "0.2.0+dev", + "version": "0.2.0+dev.4dbc2d", "data_folder_path": null, "data_output_subpath": "output", "latitude": 52.52, @@ -58,7 +58,7 @@ Properties: ```json { "general": { - "version": "0.2.0+dev", + "version": "0.2.0+dev.4dbc2d", "data_folder_path": null, "data_output_subpath": "output", "latitude": 52.52, diff --git a/docs/_generated/openapi.md b/docs/_generated/openapi.md index d1c16e3..6a74d4f 100644 --- a/docs/_generated/openapi.md +++ b/docs/_generated/openapi.md @@ -1,6 +1,6 @@ # Akkudoktor-EOS -**Version**: `v0.2.0+dev` +**Version**: `v0.2.0+dev.4dbc2d` **Description**: This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period. diff --git a/docs/conf.py b/docs/conf.py index f43e4d9..f533f45 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -7,13 +7,20 @@ https://www.sphinx-doc.org/en/master/usage/configuration.html import sys from pathlib import Path +# Add the src directory to sys.path so Sphinx can import akkudoktoreos +PROJECT_ROOT = Path(__file__).parent.parent +SRC_DIR = PROJECT_ROOT / "src" +sys.path.insert(0, str(SRC_DIR)) + +from akkudoktoreos.core.version import __version__ + # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information project = "Akkudoktor EOS" -copyright = "2024, Andreas Schmitz" +copyright = "2025, Andreas Schmitz" author = "Andreas Schmitz" -release = "0.0.1" +release = __version__ # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/docs/develop/develop.md b/docs/develop/develop.md index 1827786..a0ae2fc 100644 --- a/docs/develop/develop.md +++ b/docs/develop/develop.md @@ -393,6 +393,13 @@ At a minimum, you should run the module tests: make test ``` +:::{admonition} Note +:class: Note +Depending on your changes you may also have to change the version.py and documentation files. Do as +suggested by the tests. You may ignore the version.py and documentation changes up until you +finalize your change. +::: + You should also run the system tests. These include additional tests that interact with real resources: diff --git a/docs/develop/release.md b/docs/develop/release.md index 41d647f..8436886 100644 --- a/docs/develop/release.md +++ b/docs/develop/release.md @@ -13,8 +13,8 @@ and how to set a **development version** after the release. | 1 | Contributor | Prepare a release branch **in your fork** using Commitizen | | 2 | Contributor | Open a **Pull Request to upstream** (`Akkudoktor-EOS/EOS`) | | 3 | Maintainer | Review and **merge the release PR** | -| 4 | Maintainer | Create the **GitHub Release and tag** | -| 5 | Maintainer | Set the **development version marker** via a follow-up PR | +| 4 | CI | Create the **GitHub Release and tag** | +| 5 | CI | Set the **development version marker** via a follow-up PR | ## 🔄 Detailed Workflow @@ -40,24 +40,26 @@ git checkout -b release/vX.Y.Z #### Bump the version information -At least update +Set `__version__` in src/akkudoktoreos/core/version.py -- pyproject.toml -- src/akkudoktoreos/core/version.py -- src/akkudoktoreos/data/default.config.json -- Makefile +```python +__version__ = 0.3.0 +``` + +Prepare version by updating versioned files, e.g.: + +- haaddon/config.yaml and the generated documentation: ```bash -make bump VERSION=0.1.0+dev NEW_VERSION=X.Y.Z -make gen-docs +make prepare-version ``` -You may check the changes by: +Check the changes by: ```bash -git diff +make test-version ``` #### Create a new CHANGELOG.md entry @@ -66,19 +68,20 @@ Edit CHANGELOG.md #### Create the new release commit +Add all the changed version files and all other changes to the commit. + ```bash -git add pyproject.toml src/akkudoktoreos/core/version.py \ - src/akkudoktoreos/data/default.config.json Makefile CHANGELOG.md -git commit -s -m "chore(release): Release vX.Y.Z" +git add src/akkudoktoreos/core/version.py CHANGELOG.md ... +git commit -s -m "chore: Prepare Release v0.3.0" ``` #### Push the branch to your fork ```bash -git push --set-upstream origin release/vX.Y.Z +git push --set-upstream origin release/v0.3.0 ``` -### 2️⃣ Contributor: Open the Release Pull Request +### 2️⃣ Contributor: Open the Release Preparation Pull Request | From | To | | ------------------------------------ | ------------------------- | @@ -87,13 +90,13 @@ git push --set-upstream origin release/vX.Y.Z **PR Title:** ```text -chore(release): release vX.Y.Z +chore: prepare release vX.Y.Z ``` **PR Description Template:** ```markdown -## Release vX.Y.Z +## Prepare Release vX.Y.Z This pull request prepares release **vX.Y.Z**. @@ -119,94 +122,26 @@ See `CHANGELOG.md` for full details. **Merge Strategy:** - Prefer **Merge Commit** (or **Squash Merge**, per project preference) -- Use commit message: `chore(release): Release vX.Y.Z` +- Use commit message: `chore: Prepare Release vX.Y.Z` -### 4️⃣ Maintainer: Publish the GitHub Release +### 4️⃣ CI: Publish the GitHub Release -1. Go to **GitHub → Releases → Draft a new release** -2. **Choose tag** → enter `vX.Y.Z` (GitHub creates the tag on publish) -3. **Release title:** `vX.Y.Z` -4. **Paste changelog entry** from `CHANGELOG.md` -5. Optionally enable **Set as latest release** -6. Click **Publish release** 🎉 +The new release will automatically be published by the GitHub CI action. -### 5️⃣ Maintainer: Prepare the Development Version Marker +See `.github/workflwows/bump-version.yml`for details. -**Sync local copy:** +### 5️⃣ CI: Prepare the Development Version Marker -```bash -git fetch eos -git checkout main -git pull eos main -``` +The development version marker will automatically be set by the GitHub CI action. -**Create a development version branch:** - -```bash -git checkout -b release/vX.Y.Z_dev -``` - -**Set development version marker manually:** - -```bash -make bump VERSION=X.Y.Z NEW_VERSION=X.Y.Z+dev -make gen-docs -``` - -```bash -git add pyproject.toml src/akkudoktoreos/core/version.py \ - src/akkudoktoreos/data/default.config.json Makefile -git commit -s -m "chore: set development version marker X.Y.Z+dev" -``` - -```bash -git push --set-upstream origin release/vX.Y.Z_dev -``` - -### 6️⃣ Maintainer (or Contributor): Open the Development Version PR - -| From | To | -| ---------------------------------------- | ------------------------- | -| `/EOS:release/vX.Y.Z_dev` | `Akkudoktor-EOS/EOS:main` | - -**PR Title:** - -```text -chore: development version vX.Y.Z+dev -``` - -**PR Description Template:** - -```markdown -## Development version vX.Y.Z+dev - -This pull request marks the repository as back in active development. - -### Changes -- Set version to `vX.Y.Z+dev` - -No changelog entry is needed. -``` - -### 7️⃣ Maintainer: Review and Merge the Development Version PR - -**Checklist:** - -- ✅ Only version files updated to `+dev` -- ✅ No unintended changes - -**Merge Strategy:** - -- Merge with commit message: `chore: development version vX.Y.Z+dev` +See `.github/workflwows/bump-version.yml`for details. ## ✅ Quick Reference | Step | Actor | Action | | ---- | ----- | ------ | -| **1. Prepare release branch** | Contributor | Bump version & changelog via Commitizen | +| **1. Prepare release branch** | Contributor | Bump version & changelog | | **2. Open release PR** | Contributor | Submit release for review | | **3. Review & merge release PR** | Maintainer | Finalize changes into `main` | -| **4. Publish GitHub Release** | Maintainer | Create tag & notify users | -| **5. Prepare development version branch** | Maintainer | Set development marker | -| **6. Open development PR** | Maintainer (or Contributor) | Propose returning to development state | -| **7. Review & merge development PR** | Maintainer | Mark repository as back in development | +| **4. Publish GitHub Release** | CI | Create tag & notify users | +| **5. Prepare development version branch** | CI | Set development marker | diff --git a/openapi.json b/openapi.json index fe1b1fe..f04fa05 100644 --- a/openapi.json +++ b/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "Akkudoktor-EOS", "description": "This project provides a comprehensive solution for simulating and optimizing an energy system based on renewable energy sources. With a focus on photovoltaic (PV) systems, battery storage (batteries), load management (consumer requirements), heat pumps, electric vehicles, and consideration of electricity price data, this system enables forecasting and optimization of energy flow and costs over a specified period.", - "version": "v0.2.0+dev" + "version": "v0.2.0+dev.4dbc2d" }, "paths": { "/v1/admin/cache/clear": { @@ -2406,7 +2406,7 @@ "general": { "$ref": "#/components/schemas/GeneralSettings-Output", "default": { - "version": "0.2.0+dev", + "version": "0.2.0+dev.4dbc2d", "data_output_subpath": "output", "latitude": 52.52, "longitude": 13.405, @@ -4084,7 +4084,7 @@ "type": "string", "title": "Version", "description": "Configuration file version. Used to check compatibility.", - "default": "0.2.0+dev" + "default": "0.2.0+dev.4dbc2d" }, "data_folder_path": { "anyOf": [ @@ -4158,7 +4158,7 @@ "type": "string", "title": "Version", "description": "Configuration file version. Used to check compatibility.", - "default": "0.2.0+dev" + "default": "0.2.0+dev.4dbc2d" }, "data_folder_path": { "anyOf": [ diff --git a/pyproject.toml b/pyproject.toml index f56d684..01ea2b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "akkudoktor-eos" -version = "0.2.0+dev" +dynamic = ["version"] # Get version information dynamically authors = [ { name="Andreas Schmitz", email="author@example.com" }, ] @@ -25,6 +25,8 @@ build-backend = "setuptools.build_meta" [tool.setuptools.dynamic] dependencies = {file = ["requirements.txt"]} optional-dependencies = {dev = { file = ["requirements-dev.txt"] }} +# version.txt must be generated +version = { file = "version.txt" } [tool.setuptools.packages.find] where = ["src/"] @@ -109,29 +111,10 @@ module = "xprocess.*" ignore_missing_imports = true [tool.commitizen] +# Only used as linter name = "cz_conventional_commits" version_scheme = "semver" -version = "0.2.0+dev" # <-- Set your current version heretag_format = "v$version" -# Files to automatically update when bumping version -update_changelog_on_bump = true -changelog_incremental = true -annotated_tag = true -bump_message = "chore(release): $current_version → $new_version" - -# Branch validation settings +# Enforce commit message and branch style: branch_validation = true branch_pattern = "^(feat|fix|chore|docs|refactor|test)/[a-z0-9._-]+$" - -# Customize changelog generation -[tool.commitizen.changelog] -path = "CHANGELOG.md" -template = "keepachangelog" - -# If your version is stored in multiple files (Python modules, docs etc.), add them here -[tool.commitizen.files] -version = [ - "pyproject.toml", # Auto-update project version - "src/akkudoktoreos/core/version.py", - "src/akkudoktoreos/data/default.config.json" -] diff --git a/requirements-dev.txt b/requirements-dev.txt index f05ae06..daedb07 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,11 +7,15 @@ # - mypy (mirrors-mypy) - sync with requirements-dev.txt (if on pypi) # - pymarkdown # - commitizen - sync with requirements-dev.txt (if on pypi) +# +# !!! Sync .pre-commit-config.yaml and requirements-dev.txt !!! pre-commit==4.4.0 mypy==1.18.2 types-requests==2.32.4.20250913 # for mypy pandas-stubs==2.3.2.250926 # for mypy tokenize-rt==6.2.0 # for mypy +types-docutils==0.22.2.20251006 # for mypy +types-PyYaml==6.0.12.20250915 # for mypy commitizen==4.10.0 deprecated==1.3.1 # for commitizen diff --git a/scripts/bump_dev_version.py b/scripts/bump_dev_version.py new file mode 100644 index 0000000..9077fe0 --- /dev/null +++ b/scripts/bump_dev_version.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +""" +Update VERSION_BASE in version.py after a release tag. + +Behavior: +- Read VERSION_BASE from version.py +- Strip ANY existing "+dev" suffix +- Append exactly one "+dev" +- Write back the updated file + +This ensures: + 0.2.0 --> 0.2.0+dev + 0.2.0+dev --> 0.2.0+dev + 0.2.0+dev+dev -> 0.2.0+dev +""" + +import re +import sys +from pathlib import Path + +ROOT = Path(__file__).resolve().parent.parent +VERSION_FILE = ROOT / "src" / "akkudoktoreos" / "core" / "version.py" + + +def bump_dev_version_file(file: Path) -> str: + text = file.read_text(encoding="utf-8") + + # Extract current version + m = re.search(r'^VERSION_BASE\s*=\s*["\']([^"\']+)["\']', + text, flags=re.MULTILINE) + if not m: + raise ValueError("VERSION_BASE not found") + + base_version = m.group(1) + + # Remove trailing +dev if present → ensure idempotency + cleaned = re.sub(r'(\+dev)+$', '', base_version) + + # Append +dev + new_version = f"{cleaned}+dev" + + # Replace inside file content + new_text = re.sub( + r'^VERSION_BASE\s*=\s*["\']([^"\']+)["\']', + f'VERSION_BASE = "{new_version}"', + text, + flags=re.MULTILINE + ) + + file.write_text(new_text, encoding="utf-8") + + return new_version + + +def main(): + # Use CLI argument or fallback default path + version_file = Path(sys.argv[1]) if len(sys.argv) > 1 else VERSION_FILE + + try: + new_version = bump_dev_version_file(version_file) + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + # MUST print to stdout + print(new_version) + + +if __name__ == "__main__": + main() diff --git a/scripts/bump_version.py b/scripts/bump_version.py deleted file mode 100644 index 2d888be..0000000 --- a/scripts/bump_version.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Update version strings in multiple project files only if the old version matches. - -This script updates version information in: -- pyproject.toml -- src/akkudoktoreos/core/version.py -- src/akkudoktoreos/data/default.config.json -- Makefile - -Supported version formats: -- __version__ = "" -- version = "" -- "version": "" -- VERSION ?: - -It will: -- Replace VERSION → NEW_VERSION if the old version is found. -- Report which files were updated. -- Report which files contained mismatched versions. -- Report which files had no version. - -Usage: - python bump_version.py VERSION NEW_VERSION - -Args: - VERSION (str): Version expected before replacement. - NEW_VERSION (str): Version to write. - -""" -#!/usr/bin/env python3 -import argparse -import glob -import os -import re -import shutil -from pathlib import Path -from typing import List, Tuple - -# Patterns to match version strings -VERSION_PATTERNS = [ - re.compile(r'(__version__\s*=\s*")(?P[^"]+)(")'), - re.compile(r'(version\s*=\s*")(?P[^"]+)(")'), - re.compile(r'("version"\s*:\s*")(?P[^"]+)(")'), - re.compile(r'(VERSION\s*\?=\s*)(?P[^\s]+)'), # For Makefile: VERSION ?= 0.2.0 -] - -# Default files to process -DEFAULT_FILES = [ - "pyproject.toml", - "src/akkudoktoreos/core/version.py", - "src/akkudoktoreos/data/default.config.json", - "Makefile", -] - - -def backup_file(file_path: str) -> str: - """Create a backup of the given file with a .bak suffix. - - Args: - file_path: Path to the file to backup. - - Returns: - Path to the backup file. - """ - backup_path = f"{file_path}.bak" - shutil.copy2(file_path, backup_path) - return backup_path - - -def replace_version_in_file( - file_path: Path, old_version: str, new_version: str, dry_run: bool = False -) -> Tuple[bool, bool]: - """ - Replace old_version with new_version in the given file if it matches. - - Args: - file_path: Path to the file to modify. - old_version: The old version to replace. - new_version: The new version to set. - dry_run: If True, don't actually modify files. - - Returns: - Tuple[bool, bool]: (file_would_be_updated, old_version_found) - """ - content = file_path.read_text() - new_content = content - old_version_found = False - file_would_be_updated = False - - for pattern in VERSION_PATTERNS: - def repl(match): - nonlocal old_version_found, file_would_be_updated - ver = match.group("ver") - if ver == old_version: - old_version_found = True - file_would_be_updated = True - # Some patterns have 3 groups (like quotes) - if len(match.groups()) == 3: - return f"{match.group(1)}{new_version}{match.group(3)}" - else: - return f"{match.group(1)}{new_version}" - return match.group(0) - - new_content = pattern.sub(repl, new_content) - - if file_would_be_updated: - if dry_run: - print(f"[DRY-RUN] Would update {file_path}") - else: - backup_path = file_path.with_suffix(file_path.suffix + ".bak") - shutil.copy(file_path, backup_path) - file_path.write_text(new_content) - print(f"Updated {file_path} (backup saved to {backup_path})") - elif not old_version_found: - print(f"[SKIP] {file_path}: old version '{old_version}' not found") - - return file_would_be_updated, old_version_found - - -def main(): - parser = argparse.ArgumentParser(description="Bump version across project files.") - parser.add_argument("old_version", help="Old version to replace") - parser.add_argument("new_version", help="New version to set") - parser.add_argument( - "--dry-run", action="store_true", help="Show what would be changed without modifying files" - ) - parser.add_argument( - "--glob", nargs="*", help="Optional glob patterns to include additional files" - ) - args = parser.parse_args() - - updated_files = [] - not_found_files = [] - - # Determine files to update - files_to_update: List[Path] = [Path(f) for f in DEFAULT_FILES] - if args.glob: - for pattern in args.glob: - files_to_update.extend(Path(".").glob(pattern)) - - files_to_update = list(dict.fromkeys(files_to_update)) # remove duplicates - - any_updated = False - for file_path in files_to_update: - if file_path.exists() and file_path.is_file(): - updated, _ = replace_version_in_file( - file_path, args.old_version, args.new_version, args.dry_run - ) - any_updated |= updated - if updated: - updated_files.append(file_path) - else: - print(f"[SKIP] {file_path}: file does not exist") - not_found_files.append(file_path) - - print("\nSummary:") - if updated_files: - print(f"Updated files ({len(updated_files)}):") - for f in updated_files: - print(f" {f}") - else: - print("No files were updated.") - - if not_found_files: - print(f"Files where old version was not found ({len(not_found_files)}):") - for f in not_found_files: - print(f" {f}") - - -if __name__ == "__main__": - main() diff --git a/scripts/get_version.py b/scripts/get_version.py new file mode 100644 index 0000000..53eb838 --- /dev/null +++ b/scripts/get_version.py @@ -0,0 +1,15 @@ +#!.venv/bin/python +"""Get version of EOS""" + +import sys +from pathlib import Path + +# Add the src directory to sys.path so Sphinx can import akkudoktoreos +PROJECT_ROOT = Path(__file__).parent.parent +SRC_DIR = PROJECT_ROOT / "src" +sys.path.insert(0, str(SRC_DIR)) + +from akkudoktoreos.core.version import __version__ + +if __name__ == "__main__": + print(__version__) diff --git a/scripts/update_version.py b/scripts/update_version.py new file mode 100644 index 0000000..eb6c1bd --- /dev/null +++ b/scripts/update_version.py @@ -0,0 +1,113 @@ +#!.venv/bin/python +"""General version replacement script. + +Usage: + python scripts/update_version.py [file2 ...] +""" + +#!/usr/bin/env python3 +import re +import sys +from pathlib import Path +from typing import List + +# --- Patterns to match version strings --- +VERSION_PATTERNS = [ + # Python: __version__ = "1.2.3" + re.compile( + r'(?\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)' + r'(")' + ), + + # Python: version = "1.2.3" + re.compile( + r'(?\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)' + r'(")' + ), + + # JSON: "version": "1.2.3" + re.compile( + r'(?\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)' + r'(")' + ), + + # Makefile-style: VERSION ?= 1.2.3 + re.compile( + r'(?\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)' + ), + + # YAML: version: "1.2.3" + re.compile( + r'(?m)^(version\s*:\s*["\']?)' + r'(?P\d+\.\d+\.\d+(?:\+[0-9A-Za-z\.]+)?)' + r'(["\']?)\s*$' + ), +] + + +def update_version_in_file(file_path: Path, new_version: str) -> bool: + """ + Replace version strings in a file based on VERSION_PATTERNS. + Returns True if the file was updated. + """ + content = file_path.read_text() + new_content = content + file_would_be_updated = False + + for pattern in VERSION_PATTERNS: + def repl(match): + nonlocal file_would_be_updated + ver = match.group("ver") + if ver != new_version: + file_would_be_updated = True + + # Three-group patterns (__version__, JSON, YAML) + if len(match.groups()) == 3: + return f"{match.group(1)}{new_version}{match.group(3)}" + + # Two-group patterns (Makefile) + return f"{match.group(1)}{new_version}" + + return match.group(0) + + new_content = pattern.sub(repl, new_content) + + if file_would_be_updated: + file_path.write_text(new_content) + + return file_would_be_updated + + +def main(version: str, files: List[str]): + if not version: + raise ValueError("No version provided") + if not files: + raise ValueError("No files provided") + + updated_files = [] + for f in files: + path = Path(f) + if not path.exists(): + print(f"Warning: {path} does not exist, skipping") + continue + if update_version_in_file(path, version): + updated_files.append(str(path)) + + if updated_files: + print(f"Updated files: {', '.join(updated_files)}") + else: + print("No files updated.") + + +if __name__ == "__main__": + if len(sys.argv) < 3: + print("Usage: python update_version.py [file2 ...]") + sys.exit(1) + + version_arg = sys.argv[1] + files_arg = sys.argv[2:] + main(version_arg, files_arg) diff --git a/src/akkudoktoreos/config/config.py b/src/akkudoktoreos/config/config.py index 0cad98b..c6aa963 100644 --- a/src/akkudoktoreos/config/config.py +++ b/src/akkudoktoreos/config/config.py @@ -11,7 +11,7 @@ Key features: import json import os -import shutil +import tempfile from pathlib import Path from typing import Any, ClassVar, Optional, Type @@ -154,7 +154,7 @@ class GeneralSettings(SettingsBaseModel): if v not in cls.compatible_versions: error = ( f"Incompatible configuration version '{v}'. " - f"Expected one of: {', '.join(cls.compatible_versions)}." + f"Expected: {', '.join(cls.compatible_versions)}." ) logger.error(error) raise ValueError(error) @@ -335,32 +335,44 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults): file_secret_settings (pydantic_settings.PydanticBaseSettingsSource): Unused (needed for parent class interface). Returns: - tuple[pydantic_settings.PydanticBaseSettingsSource, ...]: A tuple of settings sources in the order they should be applied. + tuple[pydantic_settings.PydanticBaseSettingsSource, ...]: A tuple of settings sources in the order they should be applied. Behavior: 1. Checks for the existence of a JSON configuration file in the expected location. - 2. If the configuration file does not exist, creates the directory (if needed) and attempts to copy a - default configuration file to the location. If the copy fails, uses the default configuration file directly. - 3. Creates a `pydantic_settings.JsonConfigSettingsSource` for both the configuration file and the default configuration file. + 2. If the configuration file does not exist, creates the directory (if needed) and + attempts to create a default configuration file in the location. If the creation + fails, a temporary configuration directory is used. + 3. Creates a `pydantic_settings.JsonConfigSettingsSource` for the configuration + file. 4. Updates class attributes `GeneralSettings._config_folder_path` and `GeneralSettings._config_file_path` to reflect the determined paths. - 5. Returns a tuple containing all provided and newly created settings sources in the desired order. + 5. Returns a tuple containing all provided and newly created settings sources in + the desired order. Notes: - - This method logs a warning if the default configuration file cannot be copied. - - It ensures that a fallback to the default configuration file is always possible. + - This method logs an error if the default configuration file in the normal + configuration directory cannot be created. + - It ensures that a fallback to a default configuration file is always possible. """ # Ensure we know and have the config folder path and the config file config_file, exists = cls._get_config_file_path() config_dir = config_file.parent if not exists: config_dir.mkdir(parents=True, exist_ok=True) + # Create minimum config file + config_minimum_content = '{ "general": { "version": "' + __version__ + '" } }' try: - shutil.copy2(cls.config_default_file_path, config_file) + config_file.write_text(config_minimum_content, encoding="utf-8") except Exception as exc: - logger.warning(f"Could not copy default config: {exc}. Using default config...") - config_file = cls.config_default_file_path - config_dir = config_file.parent + # Create minimum config in temporary config directory as last resort + error_msg = f"Could not create minimum config file in {config_dir}: {exc}" + logger.error(error_msg) + temp_dir = Path(tempfile.mkdtemp()) + info_msg = f"Using temporary config directory {temp_dir}" + logger.info(info_msg) + config_dir = temp_dir + config_file = temp_dir / config_file.name + config_file.write_text(config_minimum_content, encoding="utf-8") # Remember config_dir and config file GeneralSettings._config_folder_path = config_dir GeneralSettings._config_file_path = config_file @@ -387,19 +399,8 @@ class ConfigEOS(SingletonMixin, SettingsEOSDefaults): f"Error reading config file '{config_file}' (falling back to default config): {ex}" ) - # Append default settings to sources - default_settings = pydantic_settings.JsonConfigSettingsSource( - settings_cls, json_file=cls.config_default_file_path - ) - setting_sources.append(default_settings) - return tuple(setting_sources) - @classproperty - def config_default_file_path(cls) -> Path: - """Compute the default config file path.""" - return cls.package_root_path.joinpath("data/default.config.json") - @classproperty def package_root_path(cls) -> Path: """Compute the package root path.""" diff --git a/src/akkudoktoreos/core/version.py b/src/akkudoktoreos/core/version.py index 6d485a7..9521a63 100644 --- a/src/akkudoktoreos/core/version.py +++ b/src/akkudoktoreos/core/version.py @@ -1,5 +1,156 @@ """Version information for akkudoktoreos.""" +import hashlib +import re +from fnmatch import fnmatch +from pathlib import Path +from typing import Optional + # For development add `+dev` to previous release # For release omit `+dev`. -__version__ = "0.2.0+dev" +VERSION_BASE = "0.2.0+dev" + +# Project hash of relevant files +HASH_EOS = "" + + +# ------------------------------ +# Helpers for version generation +# ------------------------------ + + +def is_excluded_dir(path: Path, excluded_dir_patterns: set[str]) -> bool: + """Check whether a directory should be excluded based on name patterns.""" + return any(fnmatch(path.name, pattern) for pattern in excluded_dir_patterns) + + +def hash_tree( + paths: list[Path], + allowed_suffixes: set[str], + excluded_dir_patterns: set[str], + excluded_files: Optional[set[Path]] = None, +) -> str: + """Return SHA256 hash for files under `paths`. + + Restricted by suffix, excluding excluded directory patterns and excluded_files. + """ + h = hashlib.sha256() + excluded_files = excluded_files or set() + + for root in paths: + if not root.exists(): + raise ValueError(f"Root path does not exist: {root}") + for p in sorted(root.rglob("*")): + # Skip excluded directories + if p.is_dir() and is_excluded_dir(p, excluded_dir_patterns): + continue + + # Skip files inside excluded directories + if any(is_excluded_dir(parent, excluded_dir_patterns) for parent in p.parents): + continue + + # Skip excluded files + if p.resolve() in excluded_files: + continue + + # Hash only allowed file types + if p.is_file() and p.suffix.lower() in allowed_suffixes: + h.update(p.read_bytes()) + + digest = h.hexdigest() + + return digest + + +def _version_hash() -> str: + """Calculate project hash. + + Only package file ins src/akkudoktoreos can be hashed to make it work also for packages. + """ + DIR_PACKAGE_ROOT = Path(__file__).resolve().parent.parent + + # Allowed file suffixes to consider + ALLOWED_SUFFIXES: set[str] = {".py", ".md", ".json"} + + # Directory patterns to exclude (glob-like) + EXCLUDED_DIR_PATTERNS: set[str] = {"*_autosum", "*__pycache__", "*_generated"} + + # Files to exclude + EXCLUDED_FILES: set[Path] = set() + + # Directories whose changes shall be part of the project hash + watched_paths = [DIR_PACKAGE_ROOT] + + hash_current = hash_tree( + watched_paths, ALLOWED_SUFFIXES, EXCLUDED_DIR_PATTERNS, excluded_files=EXCLUDED_FILES + ) + return hash_current + + +def _version_calculate() -> str: + """Compute version.""" + global HASH_EOS + HASH_EOS = _version_hash() + if VERSION_BASE.endswith("+dev"): + return f"{VERSION_BASE}.{HASH_EOS[:6]}" + else: + return VERSION_BASE + + +# --------------------------- +# Project version information +# ---------------------------- + +# The version +__version__ = _version_calculate() + + +# ------------------- +# Version info access +# ------------------- + + +# Regular expression to split the version string into pieces +VERSION_RE = re.compile( + r""" + ^(?P\d+\.\d+\.\d+) # x.y.z + (?:\+ # +dev.hash starts here + (?: + (?Pdev) # literal 'dev' + (?:\.(?P[A-Za-z0-9]+))? # optional .hash + ) + )? + $ + """, + re.VERBOSE, +) + + +def version() -> dict[str, Optional[str]]: + """Parses the version string. + + The version string shall be of the form: + x.y.z + x.y.z+dev + x.y.z+dev.HASH + + Returns: + .. code-block:: python + + { + "version": "0.2.0+dev.a96a65", + "base": "x.y.z", + "dev": "dev" or None, + "hash": "" or None, + } + """ + global __version__ + + match = VERSION_RE.match(__version__) + if not match: + raise ValueError(f"Invalid version format: {version}") + + info = match.groupdict() + info["version"] = __version__ + + return info diff --git a/src/akkudoktoreos/data/default.config.json b/src/akkudoktoreos/data/default.config.json deleted file mode 100644 index 27d0c68..0000000 --- a/src/akkudoktoreos/data/default.config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "general": { - "version": "0.2.0+dev" - } -} diff --git a/tests/conftest.py b/tests/conftest.py index 0e85f0c..e78dd3c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import hashlib import json import logging import os @@ -7,6 +8,7 @@ import sys import tempfile import time from contextlib import contextmanager +from fnmatch import fnmatch from http import HTTPStatus from pathlib import Path from typing import Generator, Optional, Union @@ -21,12 +23,14 @@ from loguru import logger from xprocess import ProcessStarter, XProcess from akkudoktoreos.config.config import ConfigEOS, get_config +from akkudoktoreos.core.version import _version_hash, version from akkudoktoreos.server.server import get_default_host # ----------------------------------------------- # Adapt pytest logging handling to Loguru logging # ----------------------------------------------- + @pytest.fixture def caplog(caplog: LogCaptureFixture): """Propagate Loguru logs to the pytest caplog handler.""" @@ -88,7 +92,7 @@ def disable_debug_logging(scope="session", autouse=True): def pytest_addoption(parser): parser.addoption( - "--full-run", action="store_true", default=False, help="Run with all optimization tests." + "--finalize", action="store_true", default=False, help="Run with all tests." ) parser.addoption( "--check-config-side-effect", @@ -105,8 +109,8 @@ def pytest_addoption(parser): @pytest.fixture -def is_full_run(request): - yield bool(request.config.getoption("--full-run")) +def is_finalize(request): + yield bool(request.config.getoption("--finalize")) @pytest.fixture(autouse=True) @@ -123,6 +127,12 @@ def is_system_test(request): yield bool(request.config.getoption("--system-test")) +@pytest.fixture +def is_ci() -> bool: + """Returns True if running on GitHub Actions CI, False otherwise.""" + return os.getenv("CI") == "true" + + @pytest.fixture def prediction_eos(): from akkudoktoreos.prediction.prediction import get_prediction @@ -528,6 +538,25 @@ def server_setup_for_function(xprocess) -> Generator[dict[str, Union[str, int]], yield result +# -------------------------------------- +# Provide version and hash check support +# -------------------------------------- + + +@pytest.fixture(scope="session") +def version_and_hash() -> Generator[dict[str, Optional[str]], None, None]: + """Return version info as in in version.py and calculate current hash. + + Runs once per test session. + """ + info = version() + info["hash_current"] = _version_hash() + + yield info + + # After all tests + + # ------------------------------ # Provide pytest timezone change # ------------------------------ diff --git a/tests/test_config.py b/tests/test_config.py index 3dbaff5..f76912a 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -120,15 +120,6 @@ def test_singleton_behavior(config_eos, config_default_dirs): assert instance1.general.config_file_path == initial_cfg_file -def test_default_config_path(config_eos, config_default_dirs): - """Test that the default config file path is computed correctly.""" - _, _, config_default_dir_default, _ = config_default_dirs - - expected_path = config_default_dir_default.joinpath("default.config.json") - assert config_eos.config_default_file_path == expected_path - assert config_eos.config_default_file_path.is_file() - - def test_config_file_priority(config_default_dirs): """Test config file priority. diff --git a/tests/test_docsphinx.py b/tests/test_docsphinx.py index 68c7089..f8364b0 100644 --- a/tests/test_docsphinx.py +++ b/tests/test_docsphinx.py @@ -1,12 +1,11 @@ -import hashlib import json import os import shutil import subprocess import sys import tempfile -from fnmatch import fnmatch from pathlib import Path +from typing import Optional import pytest @@ -18,43 +17,6 @@ DIR_SRC = DIR_PROJECT_ROOT / "src" HASH_FILE = DIR_BUILD / ".sphinx_hash.json" -# Allowed file suffixes to consider -ALLOWED_SUFFIXES = {".py", ".md", ".json"} - -# Directory patterns to exclude (glob-like) -EXCLUDED_DIR_PATTERNS = {"*_autosum", "*__pycache__"} - - -def is_excluded_dir(path: Path) -> bool: - """Check whether a directory should be excluded based on name patterns.""" - return any(fnmatch(path.name, pattern) for pattern in EXCLUDED_DIR_PATTERNS) - - -def hash_tree(paths: list[Path], suffixes=ALLOWED_SUFFIXES) -> str: - """Return SHA256 hash for files under `paths`. - - Restricted by suffix, excluding excluded directory patterns. - """ - h = hashlib.sha256() - - for root in paths: - if not root.exists(): - continue - for p in sorted(root.rglob("*")): - # Skip excluded directories - if p.is_dir() and is_excluded_dir(p): - continue - - # Skip files inside excluded directories - if any(is_excluded_dir(parent) for parent in p.parents): - continue - - # Hash only allowed file types - if p.is_file() and p.suffix.lower() in suffixes: - h.update(p.read_bytes()) - - return h.hexdigest() - def find_sphinx_build() -> str: venv = os.getenv("VIRTUAL_ENV") @@ -69,15 +31,12 @@ def find_sphinx_build() -> str: @pytest.fixture(scope="session") -def sphinx_changed() -> bool: - """Returns True if any watched files have changed since last run. +def sphinx_changed(version_and_hash) -> Optional[str]: + """Returns new hash if any watched files have changed since last run. Hash is stored in .sphinx_hash.json. """ - # Directories whose changes should trigger rebuilding docs - watched_paths = [Path("docs"), Path("src")] - - current_hash = hash_tree(watched_paths) + new_hash = None # Load previous hash try: @@ -86,13 +45,12 @@ def sphinx_changed() -> bool: except Exception: previous_hash = None - changed = (previous_hash != current_hash) + changed = (previous_hash != version_and_hash["hash_current"]) - # Update stored hash - HASH_FILE.parent.mkdir(parents=True, exist_ok=True) - HASH_FILE.write_text(json.dumps({"hash": current_hash}, indent=2)) + if changed: + new_hash = version_and_hash["hash_current"] - return changed + return new_hash class TestSphinxDocumentation: @@ -120,17 +78,17 @@ class TestSphinxDocumentation: if DIR_BUILD_DOCS.exists(): shutil.rmtree(DIR_BUILD_DOCS) - def test_sphinx_build(self, sphinx_changed: bool, is_full_run: bool): + def test_sphinx_build(self, sphinx_changed: Optional[str], is_finalize: bool): """Build Sphinx documentation and ensure no major warnings appear in the build output.""" - if not is_full_run: - pytest.skip("Skipping Sphinx test — not full run") + # Ensure docs folder exists + if not DIR_DOCS.exists(): + pytest.skip(f"Skipping Sphinx build test - docs folder not present: {DIR_DOCS}") if not sphinx_changed: pytest.skip(f"Skipping Sphinx build — no relevant file changes detected: {HASH_FILE}") - # Ensure docs folder exists - if not Path("docs").exists(): - pytest.skip(f"Skipping Sphinx build test - docs folder not present: {DIR_DOCS}") + if not is_finalize: + pytest.skip("Skipping Sphinx test — not full run") # Clean directories self._cleanup_autosum_dirs() @@ -176,3 +134,7 @@ class TestSphinxDocumentation: ] assert not bad_lines, f"Sphinx build contained errors:\n" + "\n".join(bad_lines) + + # Update stored hash + HASH_FILE.parent.mkdir(parents=True, exist_ok=True) + HASH_FILE.write_text(json.dumps({"hash": sphinx_changed}, indent=2)) diff --git a/tests/test_docstringrst.py b/tests/test_docstringrst.py index f9d27fc..7b8323b 100644 --- a/tests/test_docstringrst.py +++ b/tests/test_docstringrst.py @@ -102,6 +102,9 @@ IGNORE_LOCATIONS = [ # functools r"\.partial$", + # fnmatch + r"\.fnmatch$", + ] # --------------------------------------------------------------------------- diff --git a/tests/test_geneticoptimize.py b/tests/test_geneticoptimize.py index 4301adc..add9994 100644 --- a/tests/test_geneticoptimize.py +++ b/tests/test_geneticoptimize.py @@ -50,7 +50,7 @@ def test_optimize( fn_out: str, ngen: int, config_eos: ConfigEOS, - is_full_run: bool, + is_finalize: bool, ): """Test optimierung_ems.""" # Test parameters @@ -107,8 +107,8 @@ def test_optimize( genetic_optimization = GeneticOptimization(fixed_seed=fixed_seed) - # Activate with pytest --full-run - if ngen > 10 and not is_full_run: + # Activate with pytest --finalize + if ngen > 10 and not is_finalize: pytest.skip() visualize_filename = str((DIR_TESTDATA / f"new_{fn_out}").with_suffix(".pdf")) diff --git a/tests/test_version.py b/tests/test_version.py new file mode 100644 index 0000000..da8c939 --- /dev/null +++ b/tests/test_version.py @@ -0,0 +1,119 @@ +# tests/test_version.py +import subprocess +import sys +from pathlib import Path + +import pytest +import yaml + +DIR_PROJECT_ROOT = Path(__file__).parent.parent +GET_VERSION_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "get_version.py" +BUMP_DEV_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "bump_dev_version.py" +UPDATE_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "update_version.py" + + +# --- Helper to create test files --- +def write_file(path: Path, content: str): + path.write_text(content, encoding="utf-8") + return path + + +# --- 1️⃣ Test get_version.py --- +def test_get_version_prints_non_empty(): + result = subprocess.run( + [sys.executable, str(GET_VERSION_SCRIPT)], + capture_output=True, + text=True, + check=True + ) + version = result.stdout.strip() + assert version, "get_version.py should print a non-empty version" + assert len(version.split(".")) >= 3, "Version should have at least MAJOR.MINOR.PATCH" + + +# --- 2️⃣ Test update_version.py on multiple file types --- +def test_update_version_multiple_formats(tmp_path): + py_file = write_file(tmp_path / "version.py", '__version__ = "0.1.0"\n') + yaml_file = write_file(tmp_path / "config.yaml", 'version: "0.1.0"\n') + json_file = write_file(tmp_path / "package.json", '{"version": "0.1.0"}\n') + + new_version = "0.2.0" + files = [py_file, yaml_file, json_file] + + subprocess.run( + [sys.executable, str(UPDATE_SCRIPT), new_version] + [str(f.resolve()) for f in files], + check=True + ) + + # Verify updates + assert f'__version__ = "{new_version}"' in py_file.read_text() + assert yaml.safe_load(yaml_file.read_text())["version"] == new_version + assert f'"version": "{new_version}"' in json_file.read_text() + + +# --- 3️⃣ Test bump_dev_version.py --- +def test_bump_dev_version_appends_dev(tmp_path): + version_file = write_file(tmp_path / "version.py", 'VERSION_BASE = "0.2.0"\n') + + result = subprocess.run( + [sys.executable, str(BUMP_DEV_SCRIPT), str(version_file.resolve())], + capture_output=True, + text=True, + check=True + ) + new_version = result.stdout.strip() + assert new_version == "0.2.0+dev" + + content = version_file.read_text() + assert f'VERSION_BASE = "{new_version}"' in content + + +# --- 4️⃣ Full workflow simulation with git --- +def test_workflow_git(tmp_path): + # Create git repo + subprocess.run(["git", "init"], cwd=tmp_path, check=True) + subprocess.run(["git", "config", "user.name", "test"], cwd=tmp_path, check=True) + subprocess.run(["git", "config", "user.email", "test@test.com"], cwd=tmp_path, check=True) + + # Create files + version_file = write_file(tmp_path / "version.py", 'VERSION_BASE = "0.1.0"\n') + config_file = write_file(tmp_path / "config.yaml", 'version: "0.1.0"\n') + + subprocess.run(["git", "add", "."], cwd=tmp_path, check=True) + subprocess.run(["git", "commit", "-m", "initial commit"], cwd=tmp_path, check=True) + + # --- Step 1: Calculate version (mock) --- + new_version = "0.2.0" + + # --- Step 2: Update files --- + subprocess.run( + [sys.executable, str(UPDATE_SCRIPT), new_version, str(config_file.resolve()), str(version_file.resolve())], + cwd=tmp_path, + check=True + ) + + # --- Step 3: Commit updated files if needed --- + subprocess.run(["git", "add", str(config_file.resolve()), str(version_file.resolve())], cwd=tmp_path, check=True) + diff_result = subprocess.run(["git", "diff", "--cached", "--quiet"], cwd=tmp_path) + assert diff_result.returncode == 1, "There should be staged changes to commit" + subprocess.run(["git", "commit", "-m", f"chore: bump version to {new_version}"], cwd=tmp_path, check=True) + + # --- Step 4: Tag version --- + tag_name = f"v{new_version}" + subprocess.run(["git", "tag", "-a", tag_name, "-m", f"Release {new_version}"], cwd=tmp_path, check=True) + tags = subprocess.run(["git", "tag"], cwd=tmp_path, capture_output=True, text=True, check=True).stdout + assert tag_name in tags + + # --- Step 5: Bump dev version --- + result = subprocess.run( + [sys.executable, str(BUMP_DEV_SCRIPT), str(version_file.resolve())], + cwd=tmp_path, + capture_output=True, + text=True, + check=True + ) + dev_version = result.stdout.strip() + assert dev_version.endswith("+dev") + assert dev_version.count("+dev") == 1 + content = version_file.read_text() + assert f'VERSION_BASE = "{dev_version}"' in content