mirror of
https://github.com/Akkudoktor-EOS/EOS.git
synced 2025-11-21 04:46:31 +00:00
chore: automate development version and release generation (#772)
Some checks failed
Bump Version / Bump Version Workflow (push) Has been cancelled
docker-build / platform-excludes (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
Some checks failed
Bump Version / Bump Version Workflow (push) Has been cancelled
docker-build / platform-excludes (push) Has been cancelled
docker-build / build (push) Has been cancelled
docker-build / merge (push) Has been cancelled
pre-commit / pre-commit (push) Has been cancelled
Run Pytest on Pull Request / test (push) Has been cancelled
This change introduces a GitHub Action to automate release creation, including proper tagging and automatic addition of a development marker to the version. A hash is also appended to development versions to make their state easier to distinguish. Tests and release documentation have been updated to reflect the revised release workflow. Several files now retrieve the current version dynamically. The test --full-run option has been rename to --finalize to make clear it is to do commit finalization testing. Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@@ -7,6 +8,7 @@ import sys
|
||||
import tempfile
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from fnmatch import fnmatch
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from typing import Generator, Optional, Union
|
||||
@@ -21,12 +23,14 @@ from loguru import logger
|
||||
from xprocess import ProcessStarter, XProcess
|
||||
|
||||
from akkudoktoreos.config.config import ConfigEOS, get_config
|
||||
from akkudoktoreos.core.version import _version_hash, version
|
||||
from akkudoktoreos.server.server import get_default_host
|
||||
|
||||
# -----------------------------------------------
|
||||
# Adapt pytest logging handling to Loguru logging
|
||||
# -----------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def caplog(caplog: LogCaptureFixture):
|
||||
"""Propagate Loguru logs to the pytest caplog handler."""
|
||||
@@ -88,7 +92,7 @@ def disable_debug_logging(scope="session", autouse=True):
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--full-run", action="store_true", default=False, help="Run with all optimization tests."
|
||||
"--finalize", action="store_true", default=False, help="Run with all tests."
|
||||
)
|
||||
parser.addoption(
|
||||
"--check-config-side-effect",
|
||||
@@ -105,8 +109,8 @@ def pytest_addoption(parser):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def is_full_run(request):
|
||||
yield bool(request.config.getoption("--full-run"))
|
||||
def is_finalize(request):
|
||||
yield bool(request.config.getoption("--finalize"))
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -123,6 +127,12 @@ def is_system_test(request):
|
||||
yield bool(request.config.getoption("--system-test"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def is_ci() -> bool:
|
||||
"""Returns True if running on GitHub Actions CI, False otherwise."""
|
||||
return os.getenv("CI") == "true"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def prediction_eos():
|
||||
from akkudoktoreos.prediction.prediction import get_prediction
|
||||
@@ -528,6 +538,25 @@ def server_setup_for_function(xprocess) -> Generator[dict[str, Union[str, int]],
|
||||
yield result
|
||||
|
||||
|
||||
# --------------------------------------
|
||||
# Provide version and hash check support
|
||||
# --------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def version_and_hash() -> Generator[dict[str, Optional[str]], None, None]:
|
||||
"""Return version info as in in version.py and calculate current hash.
|
||||
|
||||
Runs once per test session.
|
||||
"""
|
||||
info = version()
|
||||
info["hash_current"] = _version_hash()
|
||||
|
||||
yield info
|
||||
|
||||
# After all tests
|
||||
|
||||
|
||||
# ------------------------------
|
||||
# Provide pytest timezone change
|
||||
# ------------------------------
|
||||
|
||||
@@ -120,15 +120,6 @@ def test_singleton_behavior(config_eos, config_default_dirs):
|
||||
assert instance1.general.config_file_path == initial_cfg_file
|
||||
|
||||
|
||||
def test_default_config_path(config_eos, config_default_dirs):
|
||||
"""Test that the default config file path is computed correctly."""
|
||||
_, _, config_default_dir_default, _ = config_default_dirs
|
||||
|
||||
expected_path = config_default_dir_default.joinpath("default.config.json")
|
||||
assert config_eos.config_default_file_path == expected_path
|
||||
assert config_eos.config_default_file_path.is_file()
|
||||
|
||||
|
||||
def test_config_file_priority(config_default_dirs):
|
||||
"""Test config file priority.
|
||||
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from fnmatch import fnmatch
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -18,43 +17,6 @@ DIR_SRC = DIR_PROJECT_ROOT / "src"
|
||||
|
||||
HASH_FILE = DIR_BUILD / ".sphinx_hash.json"
|
||||
|
||||
# Allowed file suffixes to consider
|
||||
ALLOWED_SUFFIXES = {".py", ".md", ".json"}
|
||||
|
||||
# Directory patterns to exclude (glob-like)
|
||||
EXCLUDED_DIR_PATTERNS = {"*_autosum", "*__pycache__"}
|
||||
|
||||
|
||||
def is_excluded_dir(path: Path) -> bool:
|
||||
"""Check whether a directory should be excluded based on name patterns."""
|
||||
return any(fnmatch(path.name, pattern) for pattern in EXCLUDED_DIR_PATTERNS)
|
||||
|
||||
|
||||
def hash_tree(paths: list[Path], suffixes=ALLOWED_SUFFIXES) -> str:
|
||||
"""Return SHA256 hash for files under `paths`.
|
||||
|
||||
Restricted by suffix, excluding excluded directory patterns.
|
||||
"""
|
||||
h = hashlib.sha256()
|
||||
|
||||
for root in paths:
|
||||
if not root.exists():
|
||||
continue
|
||||
for p in sorted(root.rglob("*")):
|
||||
# Skip excluded directories
|
||||
if p.is_dir() and is_excluded_dir(p):
|
||||
continue
|
||||
|
||||
# Skip files inside excluded directories
|
||||
if any(is_excluded_dir(parent) for parent in p.parents):
|
||||
continue
|
||||
|
||||
# Hash only allowed file types
|
||||
if p.is_file() and p.suffix.lower() in suffixes:
|
||||
h.update(p.read_bytes())
|
||||
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def find_sphinx_build() -> str:
|
||||
venv = os.getenv("VIRTUAL_ENV")
|
||||
@@ -69,15 +31,12 @@ def find_sphinx_build() -> str:
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def sphinx_changed() -> bool:
|
||||
"""Returns True if any watched files have changed since last run.
|
||||
def sphinx_changed(version_and_hash) -> Optional[str]:
|
||||
"""Returns new hash if any watched files have changed since last run.
|
||||
|
||||
Hash is stored in .sphinx_hash.json.
|
||||
"""
|
||||
# Directories whose changes should trigger rebuilding docs
|
||||
watched_paths = [Path("docs"), Path("src")]
|
||||
|
||||
current_hash = hash_tree(watched_paths)
|
||||
new_hash = None
|
||||
|
||||
# Load previous hash
|
||||
try:
|
||||
@@ -86,13 +45,12 @@ def sphinx_changed() -> bool:
|
||||
except Exception:
|
||||
previous_hash = None
|
||||
|
||||
changed = (previous_hash != current_hash)
|
||||
changed = (previous_hash != version_and_hash["hash_current"])
|
||||
|
||||
# Update stored hash
|
||||
HASH_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
HASH_FILE.write_text(json.dumps({"hash": current_hash}, indent=2))
|
||||
if changed:
|
||||
new_hash = version_and_hash["hash_current"]
|
||||
|
||||
return changed
|
||||
return new_hash
|
||||
|
||||
|
||||
class TestSphinxDocumentation:
|
||||
@@ -120,17 +78,17 @@ class TestSphinxDocumentation:
|
||||
if DIR_BUILD_DOCS.exists():
|
||||
shutil.rmtree(DIR_BUILD_DOCS)
|
||||
|
||||
def test_sphinx_build(self, sphinx_changed: bool, is_full_run: bool):
|
||||
def test_sphinx_build(self, sphinx_changed: Optional[str], is_finalize: bool):
|
||||
"""Build Sphinx documentation and ensure no major warnings appear in the build output."""
|
||||
if not is_full_run:
|
||||
pytest.skip("Skipping Sphinx test — not full run")
|
||||
# Ensure docs folder exists
|
||||
if not DIR_DOCS.exists():
|
||||
pytest.skip(f"Skipping Sphinx build test - docs folder not present: {DIR_DOCS}")
|
||||
|
||||
if not sphinx_changed:
|
||||
pytest.skip(f"Skipping Sphinx build — no relevant file changes detected: {HASH_FILE}")
|
||||
|
||||
# Ensure docs folder exists
|
||||
if not Path("docs").exists():
|
||||
pytest.skip(f"Skipping Sphinx build test - docs folder not present: {DIR_DOCS}")
|
||||
if not is_finalize:
|
||||
pytest.skip("Skipping Sphinx test — not full run")
|
||||
|
||||
# Clean directories
|
||||
self._cleanup_autosum_dirs()
|
||||
@@ -176,3 +134,7 @@ class TestSphinxDocumentation:
|
||||
]
|
||||
|
||||
assert not bad_lines, f"Sphinx build contained errors:\n" + "\n".join(bad_lines)
|
||||
|
||||
# Update stored hash
|
||||
HASH_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
HASH_FILE.write_text(json.dumps({"hash": sphinx_changed}, indent=2))
|
||||
|
||||
@@ -102,6 +102,9 @@ IGNORE_LOCATIONS = [
|
||||
# functools
|
||||
r"\.partial$",
|
||||
|
||||
# fnmatch
|
||||
r"\.fnmatch$",
|
||||
|
||||
]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@@ -50,7 +50,7 @@ def test_optimize(
|
||||
fn_out: str,
|
||||
ngen: int,
|
||||
config_eos: ConfigEOS,
|
||||
is_full_run: bool,
|
||||
is_finalize: bool,
|
||||
):
|
||||
"""Test optimierung_ems."""
|
||||
# Test parameters
|
||||
@@ -107,8 +107,8 @@ def test_optimize(
|
||||
|
||||
genetic_optimization = GeneticOptimization(fixed_seed=fixed_seed)
|
||||
|
||||
# Activate with pytest --full-run
|
||||
if ngen > 10 and not is_full_run:
|
||||
# Activate with pytest --finalize
|
||||
if ngen > 10 and not is_finalize:
|
||||
pytest.skip()
|
||||
|
||||
visualize_filename = str((DIR_TESTDATA / f"new_{fn_out}").with_suffix(".pdf"))
|
||||
|
||||
119
tests/test_version.py
Normal file
119
tests/test_version.py
Normal file
@@ -0,0 +1,119 @@
|
||||
# tests/test_version.py
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
|
||||
DIR_PROJECT_ROOT = Path(__file__).parent.parent
|
||||
GET_VERSION_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "get_version.py"
|
||||
BUMP_DEV_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "bump_dev_version.py"
|
||||
UPDATE_SCRIPT = DIR_PROJECT_ROOT / "scripts" / "update_version.py"
|
||||
|
||||
|
||||
# --- Helper to create test files ---
|
||||
def write_file(path: Path, content: str):
|
||||
path.write_text(content, encoding="utf-8")
|
||||
return path
|
||||
|
||||
|
||||
# --- 1️⃣ Test get_version.py ---
|
||||
def test_get_version_prints_non_empty():
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(GET_VERSION_SCRIPT)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
version = result.stdout.strip()
|
||||
assert version, "get_version.py should print a non-empty version"
|
||||
assert len(version.split(".")) >= 3, "Version should have at least MAJOR.MINOR.PATCH"
|
||||
|
||||
|
||||
# --- 2️⃣ Test update_version.py on multiple file types ---
|
||||
def test_update_version_multiple_formats(tmp_path):
|
||||
py_file = write_file(tmp_path / "version.py", '__version__ = "0.1.0"\n')
|
||||
yaml_file = write_file(tmp_path / "config.yaml", 'version: "0.1.0"\n')
|
||||
json_file = write_file(tmp_path / "package.json", '{"version": "0.1.0"}\n')
|
||||
|
||||
new_version = "0.2.0"
|
||||
files = [py_file, yaml_file, json_file]
|
||||
|
||||
subprocess.run(
|
||||
[sys.executable, str(UPDATE_SCRIPT), new_version] + [str(f.resolve()) for f in files],
|
||||
check=True
|
||||
)
|
||||
|
||||
# Verify updates
|
||||
assert f'__version__ = "{new_version}"' in py_file.read_text()
|
||||
assert yaml.safe_load(yaml_file.read_text())["version"] == new_version
|
||||
assert f'"version": "{new_version}"' in json_file.read_text()
|
||||
|
||||
|
||||
# --- 3️⃣ Test bump_dev_version.py ---
|
||||
def test_bump_dev_version_appends_dev(tmp_path):
|
||||
version_file = write_file(tmp_path / "version.py", 'VERSION_BASE = "0.2.0"\n')
|
||||
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(BUMP_DEV_SCRIPT), str(version_file.resolve())],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
new_version = result.stdout.strip()
|
||||
assert new_version == "0.2.0+dev"
|
||||
|
||||
content = version_file.read_text()
|
||||
assert f'VERSION_BASE = "{new_version}"' in content
|
||||
|
||||
|
||||
# --- 4️⃣ Full workflow simulation with git ---
|
||||
def test_workflow_git(tmp_path):
|
||||
# Create git repo
|
||||
subprocess.run(["git", "init"], cwd=tmp_path, check=True)
|
||||
subprocess.run(["git", "config", "user.name", "test"], cwd=tmp_path, check=True)
|
||||
subprocess.run(["git", "config", "user.email", "test@test.com"], cwd=tmp_path, check=True)
|
||||
|
||||
# Create files
|
||||
version_file = write_file(tmp_path / "version.py", 'VERSION_BASE = "0.1.0"\n')
|
||||
config_file = write_file(tmp_path / "config.yaml", 'version: "0.1.0"\n')
|
||||
|
||||
subprocess.run(["git", "add", "."], cwd=tmp_path, check=True)
|
||||
subprocess.run(["git", "commit", "-m", "initial commit"], cwd=tmp_path, check=True)
|
||||
|
||||
# --- Step 1: Calculate version (mock) ---
|
||||
new_version = "0.2.0"
|
||||
|
||||
# --- Step 2: Update files ---
|
||||
subprocess.run(
|
||||
[sys.executable, str(UPDATE_SCRIPT), new_version, str(config_file.resolve()), str(version_file.resolve())],
|
||||
cwd=tmp_path,
|
||||
check=True
|
||||
)
|
||||
|
||||
# --- Step 3: Commit updated files if needed ---
|
||||
subprocess.run(["git", "add", str(config_file.resolve()), str(version_file.resolve())], cwd=tmp_path, check=True)
|
||||
diff_result = subprocess.run(["git", "diff", "--cached", "--quiet"], cwd=tmp_path)
|
||||
assert diff_result.returncode == 1, "There should be staged changes to commit"
|
||||
subprocess.run(["git", "commit", "-m", f"chore: bump version to {new_version}"], cwd=tmp_path, check=True)
|
||||
|
||||
# --- Step 4: Tag version ---
|
||||
tag_name = f"v{new_version}"
|
||||
subprocess.run(["git", "tag", "-a", tag_name, "-m", f"Release {new_version}"], cwd=tmp_path, check=True)
|
||||
tags = subprocess.run(["git", "tag"], cwd=tmp_path, capture_output=True, text=True, check=True).stdout
|
||||
assert tag_name in tags
|
||||
|
||||
# --- Step 5: Bump dev version ---
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(BUMP_DEV_SCRIPT), str(version_file.resolve())],
|
||||
cwd=tmp_path,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
dev_version = result.stdout.strip()
|
||||
assert dev_version.endswith("+dev")
|
||||
assert dev_version.count("+dev") == 1
|
||||
content = version_file.read_text()
|
||||
assert f'VERSION_BASE = "{dev_version}"' in content
|
||||
Reference in New Issue
Block a user