diff --git a/.gitignore b/.gitignore index f2221b5b9..35821a7f7 100644 --- a/.gitignore +++ b/.gitignore @@ -30,6 +30,7 @@ !src/ !tests/ !typings/ +!libs/ !.vscode/ !.claude/ @@ -93,6 +94,7 @@ !src/** !tests/** !typings/** +!libs/** !.vscode/** !.claude/** diff --git a/Makefile b/Makefile index 38f24acf6..8f340f3af 100644 --- a/Makefile +++ b/Makefile @@ -23,6 +23,17 @@ VERSION ?= TAG ?= BUMP ?= CREATE_BRANCHES ?= 1 +PR_ACTION ?= status +PR_BASE ?= main +PR_HEAD ?= +PR_NUMBER ?= +PR_TITLE ?= +PR_BODY ?= +PR_DRAFT ?= 0 +PR_MERGE_METHOD ?= squash +PR_AUTO ?= 0 +PR_DELETE_BRANCH ?= 0 +PR_CHECKS_STRICT ?= 0 Q := @ ifdef VERBOSE @@ -121,7 +132,7 @@ if [ -n "$$residual_venvs" ]; then \ fi endef -.PHONY: help setup upgrade build check security format docs test validate typings clean release release-ci +.PHONY: help setup upgrade build check security format docs test validate typings clean release release-ci pr help: ## Show simple workspace verbs $(Q)echo "FLEXT Workspace" @@ -141,6 +152,7 @@ help: ## Show simple workspace verbs $(Q)echo " validate Run validate gates (FIX=1 auto-fix, VALIDATE_SCOPE=workspace for repo-level)" $(Q)echo " release Interactive workspace release orchestration" $(Q)echo " release-ci Non-interactive release run for CI/tag workflows" + $(Q)echo " pr Manage PRs for selected projects" $(Q)echo " typings Stub supply-chain + typing report (PROJECT/PROJECTS to scope)" $(Q)echo " clean Clean all projects" $(Q)echo "" @@ -158,8 +170,13 @@ help: ## Show simple workspace verbs $(Q)echo " INTERACTIVE=1|0 Release prompt mode" $(Q)echo " DRY_RUN=1 Print plan, do not tag/push" $(Q)echo " PUSH=1 Push release commit/tag" - $(Q)echo " VERSION=0.10.0 TAG=v0.10.0 BUMP=patch Release controls" + $(Q)echo " VERSION= TAG=v BUMP=patch Release controls" $(Q)echo " CREATE_BRANCHES=1|0 Create release branches in workspace + projects" + $(Q)echo " PR_ACTION=status|create|view|checks|merge|close" + $(Q)echo " PR_BASE=main PR_HEAD= PR_NUMBER= PR_DRAFT=0|1" + $(Q)echo " PR_TITLE='title' PR_BODY='body' PR_MERGE_METHOD=squash|merge|rebase" + $(Q)echo " PR_AUTO=0|1 PR_DELETE_BRANCH=0|1" + $(Q)echo " PR_CHECKS_STRICT=0|1 checks action strict failure toggle" $(Q)echo " DEPS_REPORT=0 Skip dependency report after upgrade/typings" $(Q)echo "" $(Q)echo "Examples:" @@ -171,7 +188,9 @@ help: ## Show simple workspace verbs $(Q)echo " make test PROJECT=flext-api PYTEST_ARGS=\"-k unit\" FAIL_FAST=1" $(Q)echo " make validate VALIDATE_SCOPE=workspace" $(Q)echo " make release BUMP=minor" - $(Q)echo " make release-ci VERSION=0.10.0 TAG=v0.10.0 RELEASE_PHASE=all" + $(Q)echo " make release-ci VERSION=0.11.0 TAG=v0.11.0 RELEASE_PHASE=all" + $(Q)echo " make pr PROJECT=flext-core PR_ACTION=status" + $(Q)echo " make pr PROJECT=flext-core PR_ACTION=create PR_TITLE='release: 0.11.0-dev'" $(Q)echo " NOTE: External projects (not in .gitmodules) require manual clone." setup: ## Install all projects into workspace .venv @@ -186,6 +205,7 @@ setup: ## Install all projects into workspace .venv $(Q)$(ENFORCE_WORKSPACE_VENV) $(Q)$(ENSURE_SELECTED_PROJECTS) $(Q)$(ENSURE_PROJECTS_EXIST) + $(Q)echo "Enforcing Python 3.13 version guards..."; python3.13 scripts/maintenance/enforce_python_version.py || exit 1 $(Q)$(AUTO_ADJUST_SELECTED_PROJECTS) $(Q)echo "Modernizing pyproject.toml files..."; \ $(POETRY_ENV) python scripts/dependencies/modernize_pyproject.py --skip-check 2>&1 | grep -E "^Phase|Total:|✓|No semantic" || true; \ @@ -284,6 +304,7 @@ upgrade: ## Upgrade Python dependencies to latest via Poetry $(Q)$(ENFORCE_WORKSPACE_VENV) $(Q)$(ENSURE_SELECTED_PROJECTS) $(Q)$(ENSURE_PROJECTS_EXIST) + $(Q)echo "Enforcing Python 3.13 version guards..."; python3.13 scripts/maintenance/enforce_python_version.py || exit 1 $(Q)echo "Modernizing pyproject.toml files..."; \ $(POETRY_ENV) python scripts/dependencies/modernize_pyproject.py --skip-check 2>&1 | grep -E "^Phase|Total:|✓|No semantic" || true; \ echo "" @@ -400,12 +421,16 @@ build: ## Build/package all selected projects $(Q)$(ORCHESTRATOR) --verb build $(if $(filter 1,$(FAIL_FAST)),--fail-fast) $(SELECTED_PROJECTS) release: ## Interactive workspace release orchestration + $(Q)$(ENSURE_NO_PROJECT_CONFLICT) $(Q)$(ENFORCE_WORKSPACE_VENV) + $(Q)$(ENSURE_SELECTED_PROJECTS) + $(Q)$(ENSURE_PROJECTS_EXIST) $(Q)python scripts/release/run.py \ --root "$(CURDIR)" \ --phase "$(RELEASE_PHASE)" \ --interactive "$(INTERACTIVE)" \ --create-branches "$(CREATE_BRANCHES)" \ + --projects $(SELECTED_PROJECTS) \ $(if $(DRY_RUN),--dry-run "$(DRY_RUN)",) \ $(if $(PUSH),--push "$(PUSH)",) \ $(if $(VERSION),--version "$(VERSION)",) \ @@ -413,18 +438,41 @@ release: ## Interactive workspace release orchestration $(if $(BUMP),--bump "$(BUMP)",) release-ci: ## Non-interactive release run for CI/tag workflows + $(Q)$(ENSURE_NO_PROJECT_CONFLICT) $(Q)$(ENFORCE_WORKSPACE_VENV) + $(Q)$(ENSURE_SELECTED_PROJECTS) + $(Q)$(ENSURE_PROJECTS_EXIST) $(Q)python scripts/release/run.py \ --root "$(CURDIR)" \ --phase "$(RELEASE_PHASE)" \ --interactive 0 \ --create-branches 0 \ + --projects $(SELECTED_PROJECTS) \ $(if $(DRY_RUN),--dry-run "$(DRY_RUN)",) \ $(if $(PUSH),--push "$(PUSH)",) \ $(if $(VERSION),--version "$(VERSION)",) \ $(if $(TAG),--tag "$(TAG)",) \ $(if $(BUMP),--bump "$(BUMP)",) +pr: ## Manage pull requests for selected projects + $(Q)$(ENSURE_NO_PROJECT_CONFLICT) + $(Q)$(ENSURE_SELECTED_PROJECTS) + $(Q)$(ENSURE_PROJECTS_EXIST) + $(Q)$(ORCHESTRATOR) --verb pr \ + $(if $(filter 1,$(FAIL_FAST)),--fail-fast) \ + --make-arg "PR_ACTION=$(PR_ACTION)" \ + --make-arg "PR_BASE=$(PR_BASE)" \ + $(if $(PR_HEAD),--make-arg "PR_HEAD=$(PR_HEAD)",) \ + $(if $(PR_NUMBER),--make-arg "PR_NUMBER=$(PR_NUMBER)",) \ + $(if $(PR_TITLE),--make-arg "PR_TITLE=$(PR_TITLE)",) \ + $(if $(PR_BODY),--make-arg "PR_BODY=$(PR_BODY)",) \ + --make-arg "PR_DRAFT=$(PR_DRAFT)" \ + --make-arg "PR_MERGE_METHOD=$(PR_MERGE_METHOD)" \ + --make-arg "PR_AUTO=$(PR_AUTO)" \ + --make-arg "PR_DELETE_BRANCH=$(PR_DELETE_BRANCH)" \ + --make-arg "PR_CHECKS_STRICT=$(PR_CHECKS_STRICT)" \ + $(SELECTED_PROJECTS) + security: ## Run all security checks in all projects $(Q)$(ENSURE_NO_PROJECT_CONFLICT) $(Q)$(ENFORCE_WORKSPACE_VENV) @@ -470,6 +518,7 @@ ifeq ($(VALIDATE_SCOPE),workspace) $(Q)$(AUTO_ADJUST_SELECTED_PROJECTS) $(Q)mkdir -p .reports $(Q)echo "Running workspace validation (inventory + strict anti-drift gates)..." + $(Q)python3.13 scripts/maintenance/enforce_python_version.py --check || exit 1 $(Q)$(WORKSPACE_VENV)/bin/python scripts/core/generate_scripts_inventory.py --root . $(Q)$(WORKSPACE_VENV)/bin/python scripts/core/check_base_mk_sync.py $(Q)$(WORKSPACE_VENV)/bin/python scripts/github/lint_workflows.py --root . --report .reports/workflows/actionlint.json diff --git a/base.mk b/base.mk index 77541d100..cf69373ba 100644 --- a/base.mk +++ b/base.mk @@ -19,6 +19,17 @@ CHECK_GATES ?= VALIDATE_GATES ?= DOCS_PHASE ?= all AUTO_ADJUST ?= 1 +PR_ACTION ?= status +PR_BASE ?= main +PR_HEAD ?= +PR_NUMBER ?= +PR_TITLE ?= +PR_BODY ?= +PR_DRAFT ?= 0 +PR_MERGE_METHOD ?= squash +PR_AUTO ?= 0 +PR_DELETE_BRANCH ?= 0 +PR_CHECKS_STRICT ?= 0 PYTEST_REPORT_ARGS := -ra --durations=25 --durations-min=0.001 --tb=short PYTEST_DIAG_ARGS := -rA --durations=0 --tb=long --showlocals @@ -89,8 +100,8 @@ $(LINT_CACHE_DIR): $(Q)mkdir -p $(LINT_CACHE_DIR) # === SIMPLE VERB SURFACE === -.PHONY: help setup build check security format docs docs-base docs-sync-scripts test validate clean _preflight -STANDARD_VERBS := setup build check security format docs test validate clean +.PHONY: help setup build check security format docs docs-base docs-sync-scripts test validate clean pr _preflight +STANDARD_VERBS := setup build check security format docs test validate clean pr $(STANDARD_VERBS): _preflight define ENFORCE_WORKSPACE_VENV @@ -170,7 +181,15 @@ help: ## Show commands $(Q)echo " docs Build docs" $(Q)echo " test Run pytest only" $(Q)echo " validate Run validate gates only (use FIX=1 to auto-fix first)" + $(Q)echo " pr Manage this repository PR (default: status)" $(Q)echo " clean Clean build/test/type artifacts" + $(Q)echo "" + $(Q)echo "PR variables:" + $(Q)echo " PR_ACTION=status|create|view|checks|merge|close" + $(Q)echo " PR_BASE=main PR_HEAD= PR_NUMBER=" + $(Q)echo " PR_TITLE='title' PR_BODY='body' PR_DRAFT=0|1" + $(Q)echo " PR_MERGE_METHOD=squash|merge|rebase PR_AUTO=0|1 PR_DELETE_BRANCH=0|1" + $(Q)echo " PR_CHECKS_STRICT=0|1 (checks: fail command only when strict=1)" setup: ## Complete setup $(Q)if [ "$(CORE_STACK)" = "go" ]; then \ @@ -484,6 +503,21 @@ validate: ## Run validate gates (VALIDATE_GATES=complexity,docstring to select, $(POETRY) run interrogate $(SRC_DIR) --fail-under=$(DOCSTRING_MIN) --ignore-init-method --ignore-magic -q; \ fi +pr: ## Manage pull requests for this repository + $(Q)python3 "$(WORKSPACE_ROOT)/scripts/github/pr_manager.py" \ + --repo-root "$(CURDIR)" \ + --action "$(PR_ACTION)" \ + --base "$(PR_BASE)" \ + $(if $(PR_HEAD),--head "$(PR_HEAD)",) \ + $(if $(PR_NUMBER),--number "$(PR_NUMBER)",) \ + $(if $(PR_TITLE),--title "$(PR_TITLE)",) \ + $(if $(PR_BODY),--body "$(PR_BODY)",) \ + --draft "$(PR_DRAFT)" \ + --merge-method "$(PR_MERGE_METHOD)" \ + --auto "$(PR_AUTO)" \ + --delete-branch "$(PR_DELETE_BRANCH)" \ + --checks-strict "$(PR_CHECKS_STRICT)" + clean: ## Clean artifacts $(Q)if [ "$(CORE_STACK)" = "go" ]; then \ rm -f coverage.out coverage.html; \ diff --git a/flexcore b/flexcore index 7b12d4b7a..14f505685 160000 --- a/flexcore +++ b/flexcore @@ -1 +1 @@ -Subproject commit 7b12d4b7a062c04e8e4b803d50b19061e28b4cbb +Subproject commit 14f5056857ddf7a7d9ae5a658ccc4a52d513b8c5 diff --git a/flext-api b/flext-api index 37d68b5de..a7078d7dd 160000 --- a/flext-api +++ b/flext-api @@ -1 +1 @@ -Subproject commit 37d68b5dee4fa1ea5aaf7c72b20b438e946f676c +Subproject commit a7078d7ddb93c365f8729b23710379cd5e769d0c diff --git a/flext-auth b/flext-auth index 566608791..3a69c72cd 160000 --- a/flext-auth +++ b/flext-auth @@ -1 +1 @@ -Subproject commit 56660879174d07332ff06edde280864e1b7aa33e +Subproject commit 3a69c72cd7c39473006a6c296b462579970c490d diff --git a/flext-cli b/flext-cli index 5e65b95d1..a07e51d8f 160000 --- a/flext-cli +++ b/flext-cli @@ -1 +1 @@ -Subproject commit 5e65b95d16eff8dbf19a4655263622b982b61d54 +Subproject commit a07e51d8f7a791a204af4db53e30ee4376ecda42 diff --git a/flext-core b/flext-core index a474573ba..22daaf349 160000 --- a/flext-core +++ b/flext-core @@ -1 +1 @@ -Subproject commit a474573ba3e518e8e7d1590800ae07ca4e2779da +Subproject commit 22daaf349bb6ef4c7328f8ab2b4ede026cfe4d52 diff --git a/flext-db-oracle b/flext-db-oracle index 0c02a456e..e1ad2f9a2 160000 --- a/flext-db-oracle +++ b/flext-db-oracle @@ -1 +1 @@ -Subproject commit 0c02a456eeac940679251c934bd858410affb4b7 +Subproject commit e1ad2f9a2d6bb530bef96a8042ff889e3f287646 diff --git a/flext-dbt-ldap b/flext-dbt-ldap index 6d5b62e7c..4887fb02f 160000 --- a/flext-dbt-ldap +++ b/flext-dbt-ldap @@ -1 +1 @@ -Subproject commit 6d5b62e7c0fc01caacbc90c9ed242836c1c0485b +Subproject commit 4887fb02f214faffeead37c74cc8003d9a10700b diff --git a/flext-dbt-ldif b/flext-dbt-ldif index 5ab5a2cb4..fe5655c64 160000 --- a/flext-dbt-ldif +++ b/flext-dbt-ldif @@ -1 +1 @@ -Subproject commit 5ab5a2cb40a26d9afbf1e2713b952c9053b847e7 +Subproject commit fe5655c642819c96932a13720d89e576eea625e0 diff --git a/flext-dbt-oracle b/flext-dbt-oracle index d36174673..15b14ae53 160000 --- a/flext-dbt-oracle +++ b/flext-dbt-oracle @@ -1 +1 @@ -Subproject commit d36174673293bf2748c8d681741523aa10e0aa6c +Subproject commit 15b14ae532f7059690fd945e3ebbd803e023b43d diff --git a/flext-dbt-oracle-wms b/flext-dbt-oracle-wms index dfa89830a..ef4cc2471 160000 --- a/flext-dbt-oracle-wms +++ b/flext-dbt-oracle-wms @@ -1 +1 @@ -Subproject commit dfa89830ab65cb622a87b633a7d156a98fd4e964 +Subproject commit ef4cc2471d25e0d691fb300c6bb35f6a037a848c diff --git a/flext-grpc b/flext-grpc index f9d5613e3..126a0dd3e 160000 --- a/flext-grpc +++ b/flext-grpc @@ -1 +1 @@ -Subproject commit f9d5613e3dc6ef5969d935063fab9adc52d5399b +Subproject commit 126a0dd3ebf578cd56c1391d9979551b26e2ca8b diff --git a/flext-ldap b/flext-ldap index 36ceb892b..f99e9e3c4 160000 --- a/flext-ldap +++ b/flext-ldap @@ -1 +1 @@ -Subproject commit 36ceb892b457785465a8d390f067daf52a2baa00 +Subproject commit f99e9e3c43a0fd8f746638c918d398ce8aaffdab diff --git a/flext-ldif b/flext-ldif index 291015f3d..f8e80e40f 160000 --- a/flext-ldif +++ b/flext-ldif @@ -1 +1 @@ -Subproject commit 291015f3d02ef45cd36d0cb5b20130461d697b3f +Subproject commit f8e80e40ff73c47b7dce0b9df8f0d95505f47a64 diff --git a/flext-meltano b/flext-meltano index 0a3a4386d..7fadb3747 160000 --- a/flext-meltano +++ b/flext-meltano @@ -1 +1 @@ -Subproject commit 0a3a4386d5e25d8a2b1f9896db76e3dc25dd2320 +Subproject commit 7fadb37478b1d2d629f07093a0ddb4e1d8a98d98 diff --git a/flext-observability b/flext-observability index af808de82..2d10a1f0e 160000 --- a/flext-observability +++ b/flext-observability @@ -1 +1 @@ -Subproject commit af808de828f7fbb29b3023ae122ec9b2122ec210 +Subproject commit 2d10a1f0e87502bf0c782e17bf37639272d967a9 diff --git a/flext-oracle-oic b/flext-oracle-oic index 7e6286dd5..cc2e16173 160000 --- a/flext-oracle-oic +++ b/flext-oracle-oic @@ -1 +1 @@ -Subproject commit 7e6286dd5f9b279e6fc2b6c8953ef722fc28137b +Subproject commit cc2e161734520bae4420c0796e85bb6e7b991db4 diff --git a/flext-oracle-wms b/flext-oracle-wms index 3ef7d7065..937391808 160000 --- a/flext-oracle-wms +++ b/flext-oracle-wms @@ -1 +1 @@ -Subproject commit 3ef7d7065a8a5f3120caa114c5e0272ecaf1038c +Subproject commit 9373918086b265969b728fd2f35aecffb71a18ef diff --git a/flext-plugin b/flext-plugin index 9c405c1e7..322310953 160000 --- a/flext-plugin +++ b/flext-plugin @@ -1 +1 @@ -Subproject commit 9c405c1e7ff81c88bef1887057c91e4666f268bf +Subproject commit 32231095315848dfa18676b6f21791a39a7eabe6 diff --git a/flext-quality b/flext-quality index e942a599e..d4dde3104 160000 --- a/flext-quality +++ b/flext-quality @@ -1 +1 @@ -Subproject commit e942a599e1da20e37272f75f89504d8d1dc8557a +Subproject commit d4dde3104bd714f3f46f3a5866663d7124917c57 diff --git a/flext-tap-ldap b/flext-tap-ldap index 7526648a8..8541c6c47 160000 --- a/flext-tap-ldap +++ b/flext-tap-ldap @@ -1 +1 @@ -Subproject commit 7526648a86c4ec48566a455b863263db8a5bfe3e +Subproject commit 8541c6c472aac1f3395e291d7d861b32068810f3 diff --git a/flext-tap-ldif b/flext-tap-ldif index 1e7855c4e..b479b6c9f 160000 --- a/flext-tap-ldif +++ b/flext-tap-ldif @@ -1 +1 @@ -Subproject commit 1e7855c4ed4a2e7ecd3f1362883fc72837f35bd5 +Subproject commit b479b6c9fc6c96e73e754bf15c0b5ff6a65ea941 diff --git a/flext-tap-oracle b/flext-tap-oracle index d6c524993..0fd876924 160000 --- a/flext-tap-oracle +++ b/flext-tap-oracle @@ -1 +1 @@ -Subproject commit d6c5249938df7419db4c11e0b4a51cbe4de6de8d +Subproject commit 0fd8769249bf3b7d4be3d8e379f4dd0f6b22540b diff --git a/flext-tap-oracle-oic b/flext-tap-oracle-oic index c9bc050ef..e4e3ae6f2 160000 --- a/flext-tap-oracle-oic +++ b/flext-tap-oracle-oic @@ -1 +1 @@ -Subproject commit c9bc050ef90ad765362744826368b32b347f45f2 +Subproject commit e4e3ae6f26fb8ce351936536aaa082734bb5203c diff --git a/flext-tap-oracle-wms b/flext-tap-oracle-wms index 288b0374c..c01b262ca 160000 --- a/flext-tap-oracle-wms +++ b/flext-tap-oracle-wms @@ -1 +1 @@ -Subproject commit 288b0374c7468b42ca926a715217bd1db0871d6b +Subproject commit c01b262ca838c14decb92410d2bc523d6c3f3fd6 diff --git a/flext-target-ldap b/flext-target-ldap index a65f13fb2..45bcf0b8b 160000 --- a/flext-target-ldap +++ b/flext-target-ldap @@ -1 +1 @@ -Subproject commit a65f13fb2e111d4cb977136ac10c62c71d4c169b +Subproject commit 45bcf0b8bb366faad7ed7090613e627b49828bab diff --git a/flext-target-ldif b/flext-target-ldif index a2e176503..a4181c289 160000 --- a/flext-target-ldif +++ b/flext-target-ldif @@ -1 +1 @@ -Subproject commit a2e176503df1321b3f3d4437a6febf6860d93ced +Subproject commit a4181c28995ea0fb3ccdb453c1f9d0f1ed4d5a20 diff --git a/flext-target-oracle b/flext-target-oracle index 07228747f..bf05d77a2 160000 --- a/flext-target-oracle +++ b/flext-target-oracle @@ -1 +1 @@ -Subproject commit 07228747ff6c41ebb9eaaea4f25dee51a8b47c72 +Subproject commit bf05d77a27fb4a3eac8b7e74996deee6968f7e37 diff --git a/flext-target-oracle-oic b/flext-target-oracle-oic index 52514a169..efc7d1354 160000 --- a/flext-target-oracle-oic +++ b/flext-target-oracle-oic @@ -1 +1 @@ -Subproject commit 52514a1699da146e03ffcecfcb0bd46b9daedb3d +Subproject commit efc7d1354fdd4e355d4667b3f2c7cab031133221 diff --git a/flext-target-oracle-wms b/flext-target-oracle-wms index c2ec5e67b..2ddd84389 160000 --- a/flext-target-oracle-wms +++ b/flext-target-oracle-wms @@ -1 +1 @@ -Subproject commit c2ec5e67bf9b841a915721b810018ec34e730802 +Subproject commit 2ddd8438953882740460a86d5c8953fe5e456419 diff --git a/flext-web b/flext-web index 3b1787fe9..3d3de1889 160000 --- a/flext-web +++ b/flext-web @@ -1 +1 @@ -Subproject commit 3b1787fe9bc889ead2eb2b85d4736b34f96c6d0c +Subproject commit 3d3de188943e99ffffff382f53f36588ac068e06 diff --git a/libs/__init__.py b/libs/__init__.py new file mode 100644 index 000000000..9eb54b1de --- /dev/null +++ b/libs/__init__.py @@ -0,0 +1,10 @@ +from __future__ import annotations + +__all__ = [ + "discovery", + "git", + "paths", + "reporting", + "selection", + "subprocess", +] diff --git a/libs/discovery.py b/libs/discovery.py new file mode 100644 index 000000000..85b31be68 --- /dev/null +++ b/libs/discovery.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +import re +from dataclasses import dataclass +from pathlib import Path + + +@dataclass(frozen=True) +class ProjectInfo: + path: Path + name: str + kind: str + + +def _is_git_project(path: Path) -> bool: + return (path / ".git").exists() + + +def _submodule_names(workspace_root: Path) -> set[str]: + gitmodules = workspace_root / ".gitmodules" + if not gitmodules.exists(): + return set() + try: + content = gitmodules.read_text(encoding="utf-8") + except OSError: + return set() + return set(re.findall(r"^\s*path\s*=\s*(.+?)\s*$", content, re.MULTILINE)) + + +def discover_projects(workspace_root: Path) -> list[ProjectInfo]: + projects: list[ProjectInfo] = [] + submodules = _submodule_names(workspace_root) + for entry in sorted(workspace_root.iterdir(), key=lambda value: value.name): + if not entry.is_dir() or entry.name == "cmd" or entry.name.startswith("."): + continue + if not _is_git_project(entry): + continue + if not (entry / "Makefile").exists(): + continue + if not (entry / "pyproject.toml").exists() and not (entry / "go.mod").exists(): + continue + kind = "submodule" if entry.name in submodules else "external" + projects.append(ProjectInfo(path=entry, name=entry.name, kind=kind)) + return projects diff --git a/libs/git.py b/libs/git.py new file mode 100644 index 000000000..ff152f993 --- /dev/null +++ b/libs/git.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from pathlib import Path + +from libs.subprocess import run_capture + + +def current_branch(repo_root: Path) -> str: + return run_capture(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=repo_root) + + +def tag_exists(repo_root: Path, tag: str) -> bool: + value = run_capture(["git", "tag", "-l", tag], cwd=repo_root) + return value.strip() == tag diff --git a/libs/paths.py b/libs/paths.py new file mode 100644 index 000000000..46bc4f25b --- /dev/null +++ b/libs/paths.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from pathlib import Path + + +def workspace_root(path: str | Path = ".") -> Path: + return Path(path).resolve() + + +def repo_root_from_script(script_file: str | Path) -> Path: + return Path(script_file).resolve().parents[1] diff --git a/libs/reporting.py b/libs/reporting.py new file mode 100644 index 000000000..a2f57a5a4 --- /dev/null +++ b/libs/reporting.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from pathlib import Path + + +def reports_root(workspace_root: Path) -> Path: + return workspace_root / ".reports" + + +def ensure_report_dir(workspace_root: Path, *parts: str) -> Path: + path = reports_root(workspace_root).joinpath(*parts) + path.mkdir(parents=True, exist_ok=True) + return path diff --git a/libs/selection.py b/libs/selection.py new file mode 100644 index 000000000..d44082f58 --- /dev/null +++ b/libs/selection.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from pathlib import Path + +from libs.discovery import ProjectInfo, discover_projects + + +def filter_projects(projects: list[ProjectInfo], kind: str) -> list[ProjectInfo]: + if kind == "all": + return list(projects) + return [project for project in projects if project.kind == kind] + + +def resolve_projects(workspace_root: Path, names: list[str]) -> list[ProjectInfo]: + projects = discover_projects(workspace_root) + if not names: + return sorted(projects, key=lambda project: project.name) + + by_name = {project.name: project for project in projects} + missing = [name for name in names if name not in by_name] + if missing: + missing_text = ", ".join(sorted(missing)) + raise RuntimeError(f"unknown projects: {missing_text}") + + resolved = [by_name[name] for name in names] + return sorted(resolved, key=lambda project: project.name) diff --git a/libs/subprocess.py b/libs/subprocess.py new file mode 100644 index 000000000..cec83ca2d --- /dev/null +++ b/libs/subprocess.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import shlex +import subprocess +from pathlib import Path + + +def run_checked(command: list[str], cwd: Path | None = None) -> None: + result = subprocess.run(command, cwd=cwd, check=False) + if result.returncode != 0: + cmd = shlex.join(command) + raise RuntimeError(f"command failed ({result.returncode}): {cmd}") + + +def run_capture(command: list[str], cwd: Path | None = None) -> str: + result = subprocess.run( + command, + cwd=cwd, + capture_output=True, + text=True, + check=False, + ) + if result.returncode != 0: + cmd = shlex.join(command) + detail = (result.stderr or result.stdout).strip() + raise RuntimeError(f"command failed ({result.returncode}): {cmd}: {detail}") + return result.stdout.strip() diff --git a/pyproject.toml b/pyproject.toml index 694225e57..f5e90275b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -529,6 +529,7 @@ types-tabulate = "^0.9.0.20241207" ] python_files = [ "*_test.py", + "*_tests.py", "test_*.py", ] python_functions = [ diff --git a/scripts/core/skill_validate.py b/scripts/core/skill_validate.py index a603630e4..661d64887 100644 --- a/scripts/core/skill_validate.py +++ b/scripts/core/skill_validate.py @@ -12,6 +12,11 @@ import time from pathlib import Path +if str(Path(__file__).resolve().parents[2]) not in sys.path: + sys.path.insert(0, str(Path(__file__).resolve().parents[2])) + +from libs.discovery import discover_projects as ssot_discover_projects + try: import yaml # type: ignore[import-untyped] except ImportError: @@ -198,43 +203,13 @@ def discover_projects(root: Path) -> dict[str, object]: "root": "."} """ - gitmodules = root / ".gitmodules" - gitmodules_text = "" - if gitmodules.exists(): - try: - gitmodules_text = gitmodules.read_text(encoding="utf-8") - except OSError as exc: - msg = f"Cannot read {gitmodules}" - raise SkillInfraError(msg) from exc - - flext_projects: list[str] = [] - for line in gitmodules_text.splitlines(): - stripped = line.strip() - if "path = flext-" not in stripped: - continue - name = stripped.split("path = ", 1)[-1].strip() - if not name: - continue - if (root / name / "pyproject.toml").is_file(): - flext_projects.append(name) - - external_projects: list[str] = [] - for child in sorted(root.iterdir(), key=lambda p: p.name): - if not child.is_dir(): - continue - name = child.name - pyproject = child / "pyproject.toml" - if not pyproject.is_file(): - continue - if f"path = {name}" in gitmodules_text: - continue - try: - pyproject_text = pyproject.read_text(encoding="utf-8") - except OSError as exc: - msg = f"Cannot read {pyproject}" - raise SkillInfraError(msg) from exc - if "flext-core" in pyproject_text or "flext_core" in pyproject_text: - external_projects.append(name) + discovered = ssot_discover_projects(root) + flext_projects = [ + project.name for project in discovered if project.kind == "submodule" + ] + external_projects = [ + project.name for project in discovered if project.kind == "external" + ] return { "flext": unique_sorted(flext_projects), diff --git a/scripts/core/stub_supply_chain.py b/scripts/core/stub_supply_chain.py index 1fd9024e3..ba32a0481 100644 --- a/scripts/core/stub_supply_chain.py +++ b/scripts/core/stub_supply_chain.py @@ -16,6 +16,11 @@ from dataclasses import dataclass from pathlib import Path +if str(Path(__file__).resolve().parents[2]) not in sys.path: + sys.path.insert(0, str(Path(__file__).resolve().parents[2])) + +from libs.selection import resolve_projects + MISSING_IMPORT_RE = re.compile(r"Cannot find module `([^`]+)` \[missing-import\]") MYPY_HINT_RE = re.compile(r'note: Hint: "python3 -m pip install ([^"]+)"') MYPY_STUB_RE = re.compile(r'Library stubs not installed for "([^"]+)"') @@ -65,13 +70,12 @@ def run_cmd( def discover_projects(root: Path) -> list[Path]: - projects: list[Path] = [] - for entry in sorted(root.iterdir()): - if not entry.is_dir(): - continue - if (entry / "pyproject.toml").exists() and (entry / "src").is_dir(): - projects.append(entry) - return projects + return [ + project.path + for project in resolve_projects(root, names=[]) + if (project.path / "pyproject.toml").exists() + and (project.path / "src").is_dir() + ] def load_pyproject(project_dir: Path) -> dict[str, object]: diff --git a/scripts/dependencies/dependency_detection.py b/scripts/dependencies/dependency_detection.py index 8d932fa21..a31bbf6e9 100644 --- a/scripts/dependencies/dependency_detection.py +++ b/scripts/dependencies/dependency_detection.py @@ -12,12 +12,19 @@ from __future__ import annotations import json +import os import re import subprocess +import sys import tomllib from pathlib import Path from typing import Any +if str(Path(__file__).resolve().parents[2]) not in sys.path: + sys.path.insert(0, str(Path(__file__).resolve().parents[2])) + +from libs.selection import resolve_projects + # Mypy output patterns for typing library detection (aligned with stub_supply_chain) MYPY_HINT_RE = re.compile(r'note: Hint: "python3 -m pip install ([^"]+)"') MYPY_STUB_RE = re.compile(r'Library stubs not installed for "([^"]+)"') @@ -62,22 +69,16 @@ def discover_projects( workspace_root: Path, projects_filter: list[str] | None = None, ) -> list[Path]: - """Discover all Python projects under workspace (top-level dirs with pyproject.toml). - - Matches root Makefile / sync_dependencies: any dir with pyproject.toml, excluding SKIP_DIRS. - """ - projects: list[Path] = [] - for item in sorted(workspace_root.iterdir()): - if not item.is_dir(): - continue - if any(skip in item.name for skip in SKIP_DIRS): - continue - if not (item / "pyproject.toml").exists(): - continue - if projects_filter is not None and item.name not in projects_filter: - continue - projects.append(item) - return projects + projects = [ + project.path + for project in resolve_projects(workspace_root, names=[]) + if (project.path / "pyproject.toml").exists() + and not any(skip in project.name for skip in SKIP_DIRS) + ] + if projects_filter is not None: + filter_set = set(projects_filter) + projects = [path for path in projects if path.name in filter_set] + return sorted(projects) def run_deptry( @@ -146,7 +147,7 @@ def run_pip_check(workspace_root: Path, venv_bin: Path) -> tuple[list[str], int] capture_output=True, text=True, timeout=60, - env={**subprocess.os.environ, "VIRTUAL_ENV": str(venv_bin.parent)}, + env={**os.environ, "VIRTUAL_ENV": str(venv_bin.parent)}, ) out = (result.stdout or "").strip().splitlines() if result.stdout else [] return out, result.returncode @@ -219,9 +220,9 @@ def run_mypy_stub_hints( "--no-error-summary", ] env = { - **subprocess.os.environ, + **os.environ, "VIRTUAL_ENV": str(venv_bin.parent), - "PATH": f"{venv_bin}:{subprocess.os.environ.get('PATH', '')}", + "PATH": f"{venv_bin}:{os.environ.get('PATH', '')}", } result = subprocess.run( cmd, diff --git a/scripts/github/pr_manager.py b/scripts/github/pr_manager.py new file mode 100644 index 000000000..d1cb7f7b9 --- /dev/null +++ b/scripts/github/pr_manager.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import subprocess +from pathlib import Path + + +def _run_capture(command: list[str], cwd: Path) -> str: + result = subprocess.run( + command, + cwd=cwd, + capture_output=True, + text=True, + check=False, + ) + if result.returncode != 0: + detail = (result.stderr or result.stdout).strip() + raise RuntimeError( + f"command failed ({result.returncode}): {' '.join(command)}: {detail}" + ) + return result.stdout.strip() + + +def _run_stream(command: list[str], cwd: Path) -> int: + result = subprocess.run(command, cwd=cwd, check=False) + return result.returncode + + +def _current_branch(repo_root: Path) -> str: + return _run_capture(["git", "rev-parse", "--abbrev-ref", "HEAD"], repo_root) + + +def _open_pr_for_head(repo_root: Path, head: str) -> dict[str, object] | None: + raw = _run_capture( + [ + "gh", + "pr", + "list", + "--state", + "open", + "--head", + head, + "--json", + "number,title,state,baseRefName,headRefName,url,isDraft", + "--limit", + "1", + ], + repo_root, + ) + payload = json.loads(raw) + if not payload: + return None + first = payload[0] + if not isinstance(first, dict): + return None + return first + + +def _print_status(repo_root: Path, base: str, head: str) -> int: + pr = _open_pr_for_head(repo_root, head) + print(f"repo={repo_root}") + print(f"base={base}") + print(f"head={head}") + if pr is None: + print("status=no-open-pr") + return 0 + print("status=open") + print(f"pr_number={pr.get('number')}") + print(f"pr_title={pr.get('title')}") + print(f"pr_url={pr.get('url')}") + print(f"pr_state={pr.get('state')}") + print(f"pr_draft={pr.get('isDraft')}") + return 0 + + +def _selector(pr_number: str, head: str) -> str: + return pr_number if pr_number else head + + +def _create_pr( + repo_root: Path, + base: str, + head: str, + title: str, + body: str, + draft: int, +) -> int: + existing = _open_pr_for_head(repo_root, head) + if existing is not None: + print(f"status=already-open") + print(f"pr_url={existing.get('url')}") + return 0 + + command = [ + "gh", + "pr", + "create", + "--base", + base, + "--head", + head, + "--title", + title, + "--body", + body, + ] + if draft == 1: + command.append("--draft") + + created = _run_capture(command, repo_root) + print("status=created") + print(f"pr_url={created}") + return 0 + + +def _merge_pr( + repo_root: Path, + selector: str, + method: str, + auto: int, + delete_branch: int, +) -> int: + command = ["gh", "pr", "merge", selector] + merge_flag = { + "merge": "--merge", + "rebase": "--rebase", + "squash": "--squash", + }.get(method, "--squash") + command.append(merge_flag) + if auto == 1: + command.append("--auto") + if delete_branch == 1: + command.append("--delete-branch") + exit_code = _run_stream(command, repo_root) + if exit_code == 0: + print("status=merged") + return exit_code + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--repo-root", type=Path, default=Path(".")) + _ = parser.add_argument( + "--action", + default="status", + choices=["status", "create", "view", "checks", "merge", "close"], + ) + _ = parser.add_argument("--base", default="main") + _ = parser.add_argument("--head", default="") + _ = parser.add_argument("--number", default="") + _ = parser.add_argument("--title", default="") + _ = parser.add_argument("--body", default="") + _ = parser.add_argument("--draft", type=int, default=0) + _ = parser.add_argument("--merge-method", default="squash") + _ = parser.add_argument("--auto", type=int, default=0) + _ = parser.add_argument("--delete-branch", type=int, default=0) + _ = parser.add_argument("--checks-strict", type=int, default=0) + return parser.parse_args() + + +def main() -> int: + args = _parse_args() + repo_root = args.repo_root.resolve() + head = args.head or _current_branch(repo_root) + base = args.base + selector = _selector(args.number, head) + + if args.action == "status": + return _print_status(repo_root, base, head) + + if args.action == "create": + title = args.title or f"chore: sync {head}" + body = args.body or "Automated PR managed by scripts/github/pr_manager.py" + return _create_pr(repo_root, base, head, title, body, args.draft) + + if args.action == "view": + return _run_stream(["gh", "pr", "view", selector], repo_root) + + if args.action == "checks": + exit_code = _run_stream(["gh", "pr", "checks", selector], repo_root) + if exit_code != 0 and args.checks_strict == 0: + print("status=checks-nonblocking") + return 0 + return exit_code + + if args.action == "merge": + return _merge_pr( + repo_root, + selector, + args.merge_method, + args.auto, + args.delete_branch, + ) + + if args.action == "close": + return _run_stream(["gh", "pr", "close", selector], repo_root) + + raise RuntimeError(f"unknown action: {args.action}") + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/github/sync_workflows.py b/scripts/github/sync_workflows.py index b6ba76846..7bf1dcb72 100644 --- a/scripts/github/sync_workflows.py +++ b/scripts/github/sync_workflows.py @@ -7,7 +7,12 @@ import sys from dataclasses import dataclass from pathlib import Path -from subprocess import CalledProcessError, run + +REPO_ROOT = Path(__file__).resolve().parents[2] +if str(REPO_ROOT) not in sys.path: + sys.path.insert(0, str(REPO_ROOT)) + +from libs.selection import resolve_projects GENERATED_HEADER = "# Generated by scripts/github/sync_workflows.py - DO NOT EDIT\n" MANAGED_FILES = {"ci.yml"} @@ -22,33 +27,10 @@ class Operation: def _discover_projects(workspace_root: Path) -> list[tuple[str, Path]]: - discover_script = workspace_root / "scripts" / "maintenance" / "_discover.py" - command = [ - sys.executable, - str(discover_script), - "--workspace-root", - str(workspace_root), - "--kind", - "all", - "--format", - "json", + return [ + (project.name, project.path) + for project in resolve_projects(workspace_root, names=[]) ] - try: - result = run(command, check=True, capture_output=True, text=True) - except CalledProcessError as exc: - message = (exc.stderr or exc.stdout or str(exc)).strip() - raise RuntimeError(f"project discovery failed: {message}") from exc - payload = json.loads(result.stdout) - projects: list[tuple[str, Path]] = [] - for item in payload.get("projects", []): - if not isinstance(item, dict): - continue - name = item.get("name") - path_value = item.get("path") - if not isinstance(name, str) or not isinstance(path_value, str): - continue - projects.append((name, Path(path_value).resolve())) - return projects def _render_template(template_path: Path) -> str: diff --git a/scripts/maintenance/_discover.py b/scripts/maintenance/_discover.py index 07fa9429c..0b1174499 100644 --- a/scripts/maintenance/_discover.py +++ b/scripts/maintenance/_discover.py @@ -4,49 +4,14 @@ import argparse import json -import re import sys -from dataclasses import dataclass from pathlib import Path +REPO_ROOT = Path(__file__).resolve().parents[2] +if str(REPO_ROOT) not in sys.path: + sys.path.insert(0, str(REPO_ROOT)) -@dataclass(frozen=True) -class ProjectInfo: - path: Path - name: str - kind: str - - -def _is_git_project(path: Path) -> bool: - return (path / ".git").exists() - - -def _submodule_names(workspace_root: Path) -> set[str]: - gitmodules = workspace_root / ".gitmodules" - if not gitmodules.exists(): - return set() - try: - content = gitmodules.read_text(encoding="utf-8") - except OSError: - return set() - return set(re.findall(r"^\s*path\s*=\s*(.+?)\s*$", content, re.MULTILINE)) - - -def _discover(workspace_root: Path) -> list[ProjectInfo]: - projects: list[ProjectInfo] = [] - submodules = _submodule_names(workspace_root) - for entry in sorted(workspace_root.iterdir(), key=lambda value: value.name): - if not entry.is_dir() or entry.name == "cmd" or entry.name.startswith("."): - continue - if not _is_git_project(entry): - continue - if not (entry / "Makefile").exists(): - continue - if not (entry / "pyproject.toml").exists() and not (entry / "go.mod").exists(): - continue - kind = "submodule" if entry.name in submodules else "external" - projects.append(ProjectInfo(path=entry, name=entry.name, kind=kind)) - return projects +from libs.discovery import discover_projects def main() -> int: @@ -60,7 +25,7 @@ def main() -> int: _ = parser.add_argument("--workspace-root", type=Path, default=Path.cwd()) args = parser.parse_args() - projects = _discover(args.workspace_root.resolve()) + projects = discover_projects(args.workspace_root.resolve()) if args.kind != "all": projects = [p for p in projects if p.kind == args.kind] diff --git a/scripts/maintenance/enforce_python_version.py b/scripts/maintenance/enforce_python_version.py new file mode 100644 index 000000000..6de692b78 --- /dev/null +++ b/scripts/maintenance/enforce_python_version.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python3 +# Owner-Skill: .claude/skills/workspace-maintenance/SKILL.md +"""Enforce Python version constraints across all workspace projects. + +Creates .python-version files and injects conftest.py version guards +to prevent venv creation with wrong Python interpreter. + +Usage:: + + python scripts/maintenance/enforce_python_version.py [--check] [--verbose] + +Modes: + (default) Apply: create .python-version, inject conftest guards + --check Verify: exit non-zero if any project is missing guards +""" + +from __future__ import annotations + +import argparse +import re +import sys +from pathlib import Path + +if str(Path(__file__).resolve().parents[2]) not in sys.path: + sys.path.insert(0, str(Path(__file__).resolve().parents[2])) + +from libs.selection import resolve_projects + +ROOT = Path(__file__).resolve().parents[2] +REQUIRED_MINOR = 13 +PYTHON_VERSION_CONTENT = f"3.{REQUIRED_MINOR}\n" + +# Marker comment used to detect if the guard is already injected +GUARD_MARKER = "# PYTHON_VERSION_GUARD" + +# The guard block injected into conftest.py files +GUARD_BLOCK = f"""\ +{GUARD_MARKER} — Do not remove. Managed by scripts/maintenance/enforce_python_version.py +import sys as _sys + +if _sys.version_info[:2] != (3, {REQUIRED_MINOR}): + _v = f"{{_sys.version_info.major}}.{{_sys.version_info.minor}}.{{_sys.version_info.micro}}" + raise RuntimeError( + f"\\n{{'=' * 72}}\\n" + f"FATAL: Python {{_v}} detected — this project requires Python 3.{REQUIRED_MINOR}.\\n" + f"\\n" + f"The virtual environment was created with the WRONG Python interpreter.\\n" + f"\\n" + f"Fix:\\n" + f" 1. rm -rf .venv\\n" + f" 2. poetry env use python3.{REQUIRED_MINOR}\\n" + f" 3. poetry install\\n" + f"\\n" + f"Or use the workspace Makefile:\\n" + f" make setup PROJECT=\\n" + f"{{'=' * 72}}\\n" + ) +del _sys +{GUARD_MARKER}_END +""" + + +def _discover_projects(workspace_root: Path) -> list[Path]: + return [ + project.path + for project in resolve_projects(workspace_root, names=[]) + if (project.path / "pyproject.toml").exists() + ] + + +def _ensure_python_version_file( + project: Path, *, check_only: bool, verbose: bool +) -> bool: + """Ensure .python-version exists with correct content.""" + pv_file = project / ".python-version" + if pv_file.exists(): + content = pv_file.read_text(encoding="utf-8").strip() + if content == f"3.{REQUIRED_MINOR}": + if verbose: + print(f" ✓ .python-version OK: {project.name}") + return True + if check_only: + print(f" ✗ .python-version WRONG ({content}): {project.name}") + return False + if verbose: + print( + f" ↻ .python-version FIXED ({content} → 3.{REQUIRED_MINOR}): {project.name}" + ) + else: + if check_only: + print(f" ✗ .python-version MISSING: {project.name}") + return False + if verbose: + print(f" + .python-version CREATED: {project.name}") + + _ = pv_file.write_text(PYTHON_VERSION_CONTENT, encoding="utf-8") + return True + + +def _has_guard(content: str) -> bool: + """Check if conftest.py already has the version guard.""" + return GUARD_MARKER in content + + +def _remove_existing_guard(content: str) -> str: + """Remove existing guard block (for replacement).""" + pattern = re.compile( + rf"^{re.escape(GUARD_MARKER)}.*?^{re.escape(GUARD_MARKER)}_END\n?", + re.MULTILINE | re.DOTALL, + ) + return pattern.sub("", content) + + +def _inject_guard(content: str) -> str: + """Inject version guard after the module docstring, before other imports.""" + # Remove any existing guard first + content = _remove_existing_guard(content) + + # Find insertion point: after module docstring, before first import + # Strategy: find the end of the docstring block, insert guard there + lines = content.split("\n") + insert_idx = 0 + + # Skip shebang + if lines and lines[0].startswith("#!"): + insert_idx = 1 + + # Skip leading comments + while insert_idx < len(lines) and lines[insert_idx].startswith("#"): + insert_idx += 1 + + # Skip blank lines + while insert_idx < len(lines) and not lines[insert_idx].strip(): + insert_idx += 1 + + # Skip docstring (triple-quoted) + if insert_idx < len(lines): + line = lines[insert_idx].strip() + if line.startswith('"""') or line.startswith("'''"): + quote = line[:3] + # Check if single-line docstring + if line.count(quote) >= 2 and len(line) > 3: + insert_idx += 1 + else: + # Multi-line docstring — find closing quotes + insert_idx += 1 + while insert_idx < len(lines) and quote not in lines[insert_idx]: + insert_idx += 1 + if insert_idx < len(lines): + insert_idx += 1 + + # Skip blank lines after docstring + while insert_idx < len(lines) and not lines[insert_idx].strip(): + insert_idx += 1 + + # Skip __future__ imports (must come before guard) + while insert_idx < len(lines) and lines[insert_idx].strip().startswith( + "from __future__" + ): + insert_idx += 1 + + # Skip blank lines after __future__ + while insert_idx < len(lines) and not lines[insert_idx].strip(): + insert_idx += 1 + + # Insert guard + before = "\n".join(lines[:insert_idx]) + after = "\n".join(lines[insert_idx:]) + + if before and not before.endswith("\n"): + before += "\n" + + return f"{before}{GUARD_BLOCK}\n{after}" + + +def _ensure_conftest_guard(project: Path, *, check_only: bool, verbose: bool) -> bool: + """Ensure tests/conftest.py has the Python version guard.""" + conftest = project / "tests" / "conftest.py" + + if not conftest.exists(): + if verbose: + print(f" - No tests/conftest.py: {project.name} (skipped)") + return True # Not a failure — project might not have tests + + content = conftest.read_text(encoding="utf-8") + + if _has_guard(content): + if verbose: + print(f" ✓ conftest.py guard OK: {project.name}") + return True + + if check_only: + print(f" ✗ conftest.py guard MISSING: {project.name}") + return False + + new_content = _inject_guard(content) + _ = conftest.write_text(new_content, encoding="utf-8") + if verbose: + print(f" + conftest.py guard INJECTED: {project.name}") + return True + + +def main(argv: list[str] | None = None) -> int: + """Run enforcement.""" + parser = argparse.ArgumentParser(description="Enforce Python version constraints") + _ = parser.add_argument( + "--check", action="store_true", help="Check mode (no writes)" + ) + _ = parser.add_argument( + "--verbose", "-v", action="store_true", help="Verbose output" + ) + args = parser.parse_args(argv) + + projects = _discover_projects(ROOT) + all_ok = True + mode = "Checking" if args.check else "Enforcing" + + print(f"{mode} Python 3.{REQUIRED_MINOR} for {len(projects)} projects...") + + # Workspace root .python-version + if not _ensure_python_version_file( + ROOT, check_only=args.check, verbose=args.verbose + ): + all_ok = False + + for project in projects: + if not _ensure_python_version_file( + project, check_only=args.check, verbose=args.verbose + ): + all_ok = False + if not _ensure_conftest_guard( + project, check_only=args.check, verbose=args.verbose + ): + all_ok = False + + if all_ok: + print(f"✓ All {len(projects)} projects enforce Python 3.{REQUIRED_MINOR}") + return 0 + + if args.check: + print(f"✗ Some projects missing Python 3.{REQUIRED_MINOR} enforcement") + print(f" Run: python scripts/maintenance/enforce_python_version.py") + return 1 + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/release/build.py b/scripts/release/build.py index 095194063..bc0a2ee0f 100644 --- a/scripts/release/build.py +++ b/scripts/release/build.py @@ -11,7 +11,7 @@ if str(SCRIPTS_ROOT) not in sys.path: sys.path.insert(0, str(SCRIPTS_ROOT)) -from release.shared import discover_projects, workspace_root +from release.shared import resolve_projects, workspace_root def _parse_args() -> argparse.Namespace: @@ -19,6 +19,7 @@ def _parse_args() -> argparse.Namespace: _ = parser.add_argument("--root", type=Path, default=Path(".")) _ = parser.add_argument("--version", required=True) _ = parser.add_argument("--output-dir", type=Path, required=True) + _ = parser.add_argument("--projects", nargs="*", default=[]) return parser.parse_args() @@ -38,12 +39,10 @@ def main() -> int: output_dir.mkdir(parents=True, exist_ok=True) report_path = output_dir / "build-report.json" - projects = discover_projects(root) + projects = resolve_projects(root, args.projects) targets = [ ("root", root), - ("algar-oud-mig", root / "algar-oud-mig"), *[(project.name, project.path) for project in projects], - ("gruponos-meltano-native", root / "gruponos-meltano-native"), ] seen: set[str] = set() diff --git a/scripts/release/changelog.py b/scripts/release/changelog.py index 4d01ea592..c056b9584 100644 --- a/scripts/release/changelog.py +++ b/scripts/release/changelog.py @@ -25,13 +25,14 @@ def _parse_args() -> argparse.Namespace: def _update_changelog(existing: str, version: str, tag: str) -> str: date = datetime.now(UTC).date().isoformat() + heading = f"## {version} - " section = ( - f"## {version} - {date}\n\n" + f"{heading}{date}\n\n" f"- Workspace release tag: `{tag}`\n" "- Status: Alpha, non-production\n\n" f"Full notes: `docs/releases/{tag}.md`\n\n" ) - if section in existing: + if heading in existing: return existing marker = "# Changelog\n\n" if marker in existing: diff --git a/scripts/release/notes.py b/scripts/release/notes.py index 50741f647..0fa8c5471 100644 --- a/scripts/release/notes.py +++ b/scripts/release/notes.py @@ -9,7 +9,7 @@ if str(SCRIPTS_ROOT) not in sys.path: sys.path.insert(0, str(SCRIPTS_ROOT)) -from release.shared import discover_projects, run_capture, workspace_root +from release.shared import resolve_projects, run_capture, workspace_root def _parse_args() -> argparse.Namespace: @@ -18,6 +18,7 @@ def _parse_args() -> argparse.Namespace: _ = parser.add_argument("--tag", required=True) _ = parser.add_argument("--output", type=Path, required=True) _ = parser.add_argument("--version", default="") + _ = parser.add_argument("--projects", nargs="*", default=[]) return parser.parse_args() @@ -56,7 +57,7 @@ def main() -> int: previous = _previous_tag(root, args.tag) changes = _collect_changes(root, previous, args.tag) - projects = discover_projects(root) + projects = resolve_projects(root, args.projects) version = args.version or args.tag.removeprefix("v") lines: list[str] = [ @@ -70,7 +71,7 @@ def main() -> int: "## Scope", "", f"- Workspace release version: {version}", - f"- Projects packaged: {len(projects) + 2}", + f"- Projects packaged: {len(projects) + 1}", "", "## Projects impacted", "", @@ -79,9 +80,7 @@ def main() -> int: f"- {name}" for name in [ "root", - "algar-oud-mig", *[project.name for project in projects], - "gruponos-meltano-native", ] ) lines.extend([ diff --git a/scripts/release/run.py b/scripts/release/run.py index acaad58bf..36758acf4 100644 --- a/scripts/release/run.py +++ b/scripts/release/run.py @@ -2,9 +2,9 @@ from __future__ import annotations import argparse -import re from pathlib import Path import sys +import tomllib SCRIPTS_ROOT = Path(__file__).resolve().parents[1] if str(SCRIPTS_ROOT) not in sys.path: @@ -12,8 +12,8 @@ from release.shared import ( bump_version, - discover_projects, parse_semver, + resolve_projects, run_capture, run_checked, workspace_root, @@ -31,17 +31,21 @@ def _parse_args() -> argparse.Namespace: _ = parser.add_argument("--push", type=int, default=0) _ = parser.add_argument("--dry-run", type=int, default=0) _ = parser.add_argument("--create-branches", type=int, default=1) + _ = parser.add_argument("--projects", nargs="*", default=[]) return parser.parse_args() def _current_version(root: Path) -> str: pyproject = root / "pyproject.toml" - content = pyproject.read_text(encoding="utf-8") - match = re.search(r'^version\s*=\s*"(?P[^"]+)"', content, flags=re.M) - if not match: + content = pyproject.read_bytes() + data = tomllib.loads(content.decode("utf-8")) + project = data.get("project") + if not isinstance(project, dict): + raise RuntimeError("unable to detect [project] section from pyproject.toml") + version = project.get("version") + if not isinstance(version, str) or not version: raise RuntimeError("unable to detect version from pyproject.toml") - value = match.group("version") - return value.removesuffix("-dev") + return version.removesuffix("-dev") def _resolve_version(args: argparse.Namespace, root: Path) -> str: @@ -71,18 +75,18 @@ def _resolve_tag(args: argparse.Namespace, version: str) -> str: return f"v{version}" -def _create_release_branches(root: Path, version: str) -> None: +def _create_release_branches( + root: Path, version: str, selected_projects: list[Path] +) -> None: branch = f"release/{version}" run_checked(["git", "checkout", "-B", branch], cwd=root) - for project in discover_projects(root): - run_checked(["git", "checkout", "-B", branch], cwd=project.path) - for extra in ("algar-oud-mig", "gruponos-meltano-native"): - project_root = root / extra - if project_root.exists(): - run_checked(["git", "checkout", "-B", branch], cwd=project_root) + for project_path in selected_projects: + run_checked(["git", "checkout", "-B", branch], cwd=project_path) -def _phase_version(root: Path, version: str, dry_run: bool) -> None: +def _phase_version( + root: Path, version: str, dry_run: bool, project_names: list[str] +) -> None: command = [ "python", "scripts/release/version.py", @@ -92,6 +96,8 @@ def _phase_version(root: Path, version: str, dry_run: bool) -> None: version, "--check" if dry_run else "--apply", ] + if project_names: + command.extend(["--projects", *project_names]) run_checked(command, cwd=root) @@ -99,43 +105,48 @@ def _phase_validate(root: Path) -> None: run_checked(["make", "validate", "VALIDATE_SCOPE=workspace"], cwd=root) -def _phase_build(root: Path, version: str) -> None: +def _phase_build(root: Path, version: str, project_names: list[str]) -> None: output = root / ".reports" / "release" / f"v{version}" - run_checked( - [ - "python", - "scripts/release/build.py", - "--root", - str(root), - "--version", - version, - "--output-dir", - str(output), - ], - cwd=root, - ) + command = [ + "python", + "scripts/release/build.py", + "--root", + str(root), + "--version", + version, + "--output-dir", + str(output), + ] + if project_names: + command.extend(["--projects", *project_names]) + run_checked(command, cwd=root) def _phase_publish( - root: Path, version: str, tag: str, push: bool, dry_run: bool + root: Path, + version: str, + tag: str, + push: bool, + dry_run: bool, + project_names: list[str], ) -> None: notes = root / ".reports" / "release" / tag / "RELEASE_NOTES.md" notes.parent.mkdir(parents=True, exist_ok=True) - run_checked( - [ - "python", - "scripts/release/notes.py", - "--root", - str(root), - "--tag", - tag, - "--version", - version, - "--output", - str(notes), - ], - cwd=root, - ) + command = [ + "python", + "scripts/release/notes.py", + "--root", + str(root), + "--tag", + tag, + "--version", + version, + "--output", + str(notes), + ] + if project_names: + command.extend(["--projects", *project_names]) + run_checked(command, cwd=root) if not dry_run: run_checked( [ @@ -164,6 +175,9 @@ def _phase_publish( def main() -> int: args = _parse_args() root = workspace_root(args.root) + selected_projects = resolve_projects(root, args.projects) + selected_project_names = [project.name for project in selected_projects] + selected_project_paths = [project.path for project in selected_projects] version = _resolve_version(args, root) tag = _resolve_tag(args, version) phases = ( @@ -175,22 +189,30 @@ def main() -> int: _ = print(f"release_version={version}") _ = print(f"release_tag={tag}") _ = print(f"phases={','.join(phases)}") + _ = print(f"projects={','.join(selected_project_names)}") if args.create_branches == 1 and args.dry_run == 0: - _create_release_branches(root, version) + _create_release_branches(root, version, selected_project_paths) for phase in phases: if phase == "validate": _phase_validate(root) continue if phase == "version": - _phase_version(root, version, args.dry_run == 1) + _phase_version(root, version, args.dry_run == 1, selected_project_names) continue if phase == "build": - _phase_build(root, version) + _phase_build(root, version, selected_project_names) continue if phase == "publish": - _phase_publish(root, version, tag, args.push == 1, args.dry_run == 1) + _phase_publish( + root, + version, + tag, + args.push == 1, + args.dry_run == 1, + selected_project_names, + ) continue raise RuntimeError(f"invalid phase: {phase}") diff --git a/scripts/release/shared.py b/scripts/release/shared.py index 0598b719c..a1abb23c9 100644 --- a/scripts/release/shared.py +++ b/scripts/release/shared.py @@ -2,56 +2,40 @@ # Owner-Skill: .claude/skills/scripts-maintenance/SKILL.md from __future__ import annotations -import json import re -import subprocess import sys -from dataclasses import dataclass from pathlib import Path +REPO_ROOT = Path(__file__).resolve().parents[2] +if str(REPO_ROOT) not in sys.path: + sys.path.insert(0, str(REPO_ROOT)) + +from libs.discovery import ProjectInfo +from libs.paths import workspace_root as _workspace_root +from libs.selection import resolve_projects as _resolve_projects +from libs.subprocess import run_capture as _run_capture +from libs.subprocess import run_checked as _run_checked + SEMVER_RE = re.compile( r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)$" ) -@dataclass(frozen=True) -class Project: - name: str - path: Path +Project = ProjectInfo def workspace_root(path: str | Path = ".") -> Path: - return Path(path).resolve() - - -def discover_projects(root: Path) -> list[Project]: - discover = root / "scripts" / "maintenance" / "_discover.py" - command = [ - sys.executable, - str(discover), - "--workspace-root", - str(root), - "--kind", - "all", - "--format", - "json", - ] - result = subprocess.run(command, capture_output=True, text=True, check=False) - if result.returncode != 0: - msg = (result.stderr or result.stdout).strip() - raise RuntimeError(f"project discovery failed: {msg}") - payload = json.loads(result.stdout) - projects: list[Project] = [] - for item in payload.get("projects", []): - if not isinstance(item, dict): - continue - name = item.get("name") - path_value = item.get("path") - if not isinstance(name, str) or not isinstance(path_value, str): - continue - projects.append(Project(name=name, path=Path(path_value).resolve())) - return sorted(projects, key=lambda project: project.name) + return _workspace_root(path) + + +def resolve_projects(root: Path, names: list[str]) -> list[Project]: + try: + return _resolve_projects(root, names) + except RuntimeError as exc: + raise RuntimeError( + str(exc).replace("unknown projects", "unknown release projects") + ) from exc def parse_semver(version: str) -> tuple[int, int, int]: @@ -77,18 +61,8 @@ def bump_version(current_version: str, bump: str) -> str: def run_checked(command: list[str], cwd: Path | None = None) -> None: - result = subprocess.run(command, cwd=cwd, check=False) - if result.returncode != 0: - cmd = " ".join(command) - raise RuntimeError(f"command failed ({result.returncode}): {cmd}") + _run_checked(command, cwd=cwd) def run_capture(command: list[str], cwd: Path | None = None) -> str: - result = subprocess.run( - command, cwd=cwd, capture_output=True, text=True, check=False - ) - if result.returncode != 0: - cmd = " ".join(command) - detail = (result.stderr or result.stdout).strip() - raise RuntimeError(f"command failed ({result.returncode}): {cmd}: {detail}") - return result.stdout.strip() + return _run_capture(command, cwd=cwd) diff --git a/scripts/release/version.py b/scripts/release/version.py index 48f497756..3851de0f2 100644 --- a/scripts/release/version.py +++ b/scripts/release/version.py @@ -3,49 +3,50 @@ import argparse from pathlib import Path +import re import sys SCRIPTS_ROOT = Path(__file__).resolve().parents[1] if str(SCRIPTS_ROOT) not in sys.path: sys.path.insert(0, str(SCRIPTS_ROOT)) -from release.shared import discover_projects, parse_semver, workspace_root +from release.shared import parse_semver, resolve_projects, workspace_root def _replace_version(content: str, version: str) -> tuple[str, bool]: - old = 'version = "0.10.0-dev"' - new = f'version = "{version}"' - if old in content: - return content.replace(old, new), True - - marker = 'version = "' - start = content.find(marker) - if start < 0: + project_match = re.search(r"(?ms)^\[project\]\n(?P.*?)(?:^\[|\Z)", content) + if not project_match: return content, False - value_start = start + len(marker) - value_end = content.find('"', value_start) - if value_end < 0: + + body = project_match.group("body") + version_match = re.search(r'(?m)^version\s*=\s*"(?P[^"]+)"\s*$', body) + if not version_match: return content, False - current = content[value_start:value_end] + current = version_match.group("value") current_clean = current.removesuffix("-dev") _ = parse_semver(current_clean) if current == version: return content, False - updated = content[:value_start] + version + content[value_end:] - return updated, True + + replacement = f'version = "{version}"' + updated_body = re.sub( + r'(?m)^version\s*=\s*"[^"]+"\s*$', + replacement, + body, + count=1, + ) + start, end = project_match.span("body") + updated = content[:start] + updated_body + content[end:] + return updated, updated != content -def _version_files(root: Path) -> list[Path]: +def _version_files(root: Path, project_names: list[str]) -> list[Path]: files: list[Path] = [root / "pyproject.toml"] - for project in discover_projects(root): + for project in resolve_projects(root, project_names): pyproject = project.path / "pyproject.toml" if pyproject.exists(): files.append(pyproject) - for extra in ("algar-oud-mig", "gruponos-meltano-native"): - pyproject = root / extra / "pyproject.toml" - if pyproject.exists(): - files.append(pyproject) dedup = sorted({path.resolve() for path in files}) return dedup @@ -54,6 +55,7 @@ def _parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser() _ = parser.add_argument("--root", type=Path, default=Path(".")) _ = parser.add_argument("--version", required=True) + _ = parser.add_argument("--projects", nargs="*", default=[]) _ = parser.add_argument("--apply", action="store_true") _ = parser.add_argument("--check", action="store_true") return parser.parse_args() @@ -65,7 +67,7 @@ def main() -> int: _ = parse_semver(args.version) changed = 0 - for file_path in _version_files(root): + for file_path in _version_files(root, args.projects): content = file_path.read_text(encoding="utf-8") updated, did_change = _replace_version(content, args.version) if did_change: diff --git a/tests/unit/scripts/core/skill_validate_tests.py b/tests/unit/scripts/core/skill_validate_tests.py new file mode 100644 index 000000000..08561aed7 --- /dev/null +++ b/tests/unit/scripts/core/skill_validate_tests.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import importlib.util +import sys +from pathlib import Path +from typing import Any + + +def load_module() -> Any: + module_path = ( + Path(__file__).resolve().parents[4] / "scripts" / "core" / "skill_validate.py" + ) + spec = importlib.util.spec_from_file_location("skill_validate", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _create_project(root: Path, name: str) -> None: + project = root / name + _ = project.mkdir(parents=True) + _ = (project / ".git").mkdir() + _ = (project / "Makefile").write_text("all:\n\t@true\n", encoding="utf-8") + _ = (project / "pyproject.toml").write_text( + "[project]\nname='demo'\nversion='0.1.0'\n", encoding="utf-8" + ) + + +def test_discover_projects_uses_ssot_submodule_and_external(tmp_path: Path) -> None: + mod = load_module() + _create_project(tmp_path, "flext-core") + _create_project(tmp_path, "algar-oud-mig") + _ = (tmp_path / ".gitmodules").write_text( + '[submodule "flext-core"]\n\tpath = flext-core\n\turl = git@github.com:flext-sh/flext-core.git\n', + encoding="utf-8", + ) + + discovered = mod.discover_projects(tmp_path) + + assert discovered["flext"] == ["flext-core"] + assert discovered["external"] == ["algar-oud-mig"] + assert discovered["root"] == "." diff --git a/tests/scripts/dependencies/test_modernize_pyproject.py b/tests/unit/scripts/dependencies/modernize_pyproject_tests.py similarity index 91% rename from tests/scripts/dependencies/test_modernize_pyproject.py rename to tests/unit/scripts/dependencies/modernize_pyproject_tests.py index 44de7a950..31037a79c 100644 --- a/tests/scripts/dependencies/test_modernize_pyproject.py +++ b/tests/unit/scripts/dependencies/modernize_pyproject_tests.py @@ -10,7 +10,7 @@ def load_module() -> Any: module_path = ( - Path(__file__).resolve().parents[3] + Path(__file__).resolve().parents[4] / "scripts" / "dependencies" / "modernize_pyproject.py" @@ -26,7 +26,7 @@ def load_module() -> Any: def write_pyproject(project_dir: Path, content: str) -> Path: pyproject = project_dir / "pyproject.toml" - pyproject.write_text(content, encoding="utf-8") + _ = pyproject.write_text(content, encoding="utf-8") return pyproject @@ -73,7 +73,7 @@ def test_audit_exit_codes_reflect_violations( project_dir = tmp_path / "pkg" project_dir.mkdir() - write_pyproject( + _ = write_pyproject( project_dir, """ [build-system] @@ -87,13 +87,16 @@ def test_audit_exit_codes_reflect_violations( + "\n", ) - write_pyproject( + _ = write_pyproject( tmp_path, """ [project] name = "workspace" version = "0.1.0" +[tool.pytest.ini_options] +addopts = ["--strict-config", "--strict-markers", "--tb=short", "-p no:sugar", "-q", "-ra"] + [tool.bandit] skips = ["B404", "B603", "B607", "B105", "B608"] """.strip() @@ -107,7 +110,7 @@ def test_audit_exit_codes_reflect_violations( ) assert mod.main() == 1 - write_pyproject( + _ = write_pyproject( project_dir, """ [build-system] @@ -135,7 +138,7 @@ def test_audit_exit_codes_reflect_violations( ) (project_dir / "src" / "pkg").mkdir(parents=True) - write_pyproject( + _ = write_pyproject( tmp_path, """ [project] @@ -202,13 +205,16 @@ def test_bandit_skips_are_loaded_from_root_ssot( project_dir = root_dir / "pkg" project_dir.mkdir(parents=True) - write_pyproject( + _ = write_pyproject( root_dir, """ [project] name = "workspace" version = "0.1.0" +[tool.pytest.ini_options] +addopts = ["--strict-config", "--strict-markers", "--tb=short", "-p no:sugar", "-q", "-ra"] + [tool.bandit] skips = ["B105", "B999"] """.strip() diff --git a/tests/scripts/dependencies/test_sync_internal_deps.py b/tests/unit/scripts/dependencies/sync_internal_deps_tests.py similarity index 99% rename from tests/scripts/dependencies/test_sync_internal_deps.py rename to tests/unit/scripts/dependencies/sync_internal_deps_tests.py index 7b94f98a9..5be73a590 100644 --- a/tests/scripts/dependencies/test_sync_internal_deps.py +++ b/tests/unit/scripts/dependencies/sync_internal_deps_tests.py @@ -11,7 +11,7 @@ def load_module() -> Any: module_path = ( - Path(__file__).resolve().parents[3] + Path(__file__).resolve().parents[4] / "scripts" / "dependencies" / "sync_internal_deps.py" diff --git a/tests/unit/scripts/github/pr_manager_tests.py b/tests/unit/scripts/github/pr_manager_tests.py new file mode 100644 index 000000000..462e31ac4 --- /dev/null +++ b/tests/unit/scripts/github/pr_manager_tests.py @@ -0,0 +1,153 @@ +from __future__ import annotations + +import importlib.util +import json +import sys +from pathlib import Path +from typing import Any + +import pytest + + +def _load_module(module_name: str, relative_path: str) -> Any: + module_path = Path(__file__).resolve().parents[4] / relative_path + spec = importlib.util.spec_from_file_location(module_name, module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def test_selector_prefers_number() -> None: + mod = _load_module("pr_manager_selector", "scripts/github/pr_manager.py") + assert mod._selector("123", "feature/branch") == "123" + assert mod._selector("", "feature/branch") == "feature/branch" + + +def test_status_reports_no_open_pr( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + mod = _load_module("pr_manager_status", "scripts/github/pr_manager.py") + + def _fake_capture(command: list[str], _cwd: Path) -> str: + if command[:3] == ["gh", "pr", "list"]: + return "[]" + raise AssertionError(command) + + monkeypatch.setattr(mod, "_run_capture", _fake_capture) + + exit_code = mod._print_status(Path("/tmp/repo"), "main", "0.11.0-dev") + output = capsys.readouterr().out + assert exit_code == 0 + assert "status=no-open-pr" in output + + +def test_create_skips_when_existing_open_pr( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + mod = _load_module("pr_manager_create_existing", "scripts/github/pr_manager.py") + + def _fake_open(_repo_root: Path, _head: str) -> dict[str, object] | None: + return {"url": "https://example.com/pr/1"} + + monkeypatch.setattr(mod, "_open_pr_for_head", _fake_open) + + exit_code = mod._create_pr( + Path("/tmp/repo"), + "main", + "0.11.0-dev", + "title", + "body", + 0, + ) + output = capsys.readouterr().out + assert exit_code == 0 + assert "status=already-open" in output + + +def test_open_pr_for_head_parses_payload(monkeypatch: pytest.MonkeyPatch) -> None: + mod = _load_module("pr_manager_open_payload", "scripts/github/pr_manager.py") + payload = [{"number": 5, "url": "https://example.com/pr/5"}] + + def _fake_capture(_command: list[str], _cwd: Path) -> str: + return json.dumps(payload) + + monkeypatch.setattr(mod, "_run_capture", _fake_capture) + pr = mod._open_pr_for_head(Path("/tmp/repo"), "0.11.0-dev") + assert pr is not None + assert pr.get("number") == 5 + + +def test_checks_action_nonblocking_by_default( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + mod = _load_module("pr_manager_checks_nonblocking", "scripts/github/pr_manager.py") + + def _fake_current_branch(_repo_root: Path) -> str: + return "0.11.0-dev" + + def _fake_run_stream(_command: list[str], _cwd: Path) -> int: + return 8 + + monkeypatch.setattr(mod, "_current_branch", _fake_current_branch) + monkeypatch.setattr(mod, "_run_stream", _fake_run_stream) + monkeypatch.setattr( + mod, + "_parse_args", + lambda: mod.argparse.Namespace( + repo_root=Path("."), + action="checks", + base="main", + head="", + number="", + title="", + body="", + draft=0, + merge_method="squash", + auto=0, + delete_branch=0, + checks_strict=0, + ), + ) + + exit_code = mod.main() + output = capsys.readouterr().out + assert exit_code == 0 + assert "status=checks-nonblocking" in output + + +def test_checks_action_strict_mode_returns_failure( + monkeypatch: pytest.MonkeyPatch, +) -> None: + mod = _load_module("pr_manager_checks_strict", "scripts/github/pr_manager.py") + + def _fake_current_branch(_repo_root: Path) -> str: + return "0.11.0-dev" + + def _fake_run_stream(_command: list[str], _cwd: Path) -> int: + return 8 + + monkeypatch.setattr(mod, "_current_branch", _fake_current_branch) + monkeypatch.setattr(mod, "_run_stream", _fake_run_stream) + monkeypatch.setattr( + mod, + "_parse_args", + lambda: mod.argparse.Namespace( + repo_root=Path("."), + action="checks", + base="main", + head="", + number="", + title="", + body="", + draft=0, + merge_method="squash", + auto=0, + delete_branch=0, + checks_strict=1, + ), + ) + + assert mod.main() == 8 diff --git a/tests/scripts/github/test_sync_workflows.py b/tests/unit/scripts/github/sync_workflows_tests.py similarity index 98% rename from tests/scripts/github/test_sync_workflows.py rename to tests/unit/scripts/github/sync_workflows_tests.py index 283d05f2b..66440f699 100644 --- a/tests/scripts/github/test_sync_workflows.py +++ b/tests/unit/scripts/github/sync_workflows_tests.py @@ -9,7 +9,7 @@ def load_module() -> Any: module_path = ( - Path(__file__).resolve().parents[3] / "scripts" / "github" / "sync_workflows.py" + Path(__file__).resolve().parents[4] / "scripts" / "github" / "sync_workflows.py" ) spec = importlib.util.spec_from_file_location("sync_workflows", module_path) assert spec is not None diff --git a/tests/scripts/maintenance/test_discover.py b/tests/unit/scripts/maintenance/_discover_tests.py similarity index 96% rename from tests/scripts/maintenance/test_discover.py rename to tests/unit/scripts/maintenance/_discover_tests.py index 127426901..fd1931b0a 100644 --- a/tests/scripts/maintenance/test_discover.py +++ b/tests/unit/scripts/maintenance/_discover_tests.py @@ -12,7 +12,7 @@ def load_module() -> Any: module_path = ( - Path(__file__).resolve().parents[3] / "scripts" / "maintenance" / "_discover.py" + Path(__file__).resolve().parents[4] / "scripts" / "maintenance" / "_discover.py" ) spec = importlib.util.spec_from_file_location("_discover", module_path) assert spec is not None diff --git a/tests/unit/scripts/maintenance/enforce_python_version_tests.py b/tests/unit/scripts/maintenance/enforce_python_version_tests.py new file mode 100644 index 000000000..5095b1b74 --- /dev/null +++ b/tests/unit/scripts/maintenance/enforce_python_version_tests.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import importlib.util +import sys +from pathlib import Path +from typing import Any + + +def load_module() -> Any: + module_path = ( + Path(__file__).resolve().parents[4] + / "scripts" + / "maintenance" + / "enforce_python_version.py" + ) + spec = importlib.util.spec_from_file_location("enforce_python_version", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _create_project(root: Path, name: str) -> None: + project = root / name + _ = project.mkdir(parents=True) + _ = (project / ".git").mkdir() + _ = (project / "Makefile").write_text("all:\n\t@true\n", encoding="utf-8") + _ = (project / "pyproject.toml").write_text( + "[project]\nname='demo'\nversion='0.1.0'\n", encoding="utf-8" + ) + + +def test_discover_projects_includes_external_projects(tmp_path: Path) -> None: + mod = load_module() + _create_project(tmp_path, "flext-core") + _create_project(tmp_path, "algar-oud-mig") + _ = (tmp_path / ".gitmodules").write_text( + '[submodule "flext-core"]\n\tpath = flext-core\n\turl = git@github.com:flext-sh/flext-core.git\n', + encoding="utf-8", + ) + + projects = mod._discover_projects(tmp_path) + names = [project.name for project in projects] + + assert names == ["algar-oud-mig", "flext-core"] diff --git a/tests/unit/scripts/release/release_scripts_tests.py b/tests/unit/scripts/release/release_scripts_tests.py new file mode 100644 index 000000000..14ea36603 --- /dev/null +++ b/tests/unit/scripts/release/release_scripts_tests.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +import importlib.util +import re +import sys +from pathlib import Path +from typing import Any + + +def _load_module(relative_path: str, module_name: str) -> Any: + module_path = Path(__file__).resolve().parents[4] / relative_path + spec = importlib.util.spec_from_file_location(module_name, module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def test_replace_version_updates_only_project_version() -> None: + mod = _load_module("scripts/release/version.py", "release_version") + content = """ +[project] +name = "demo" +version = "0.11.0-dev" + +[tool.poetry.dependencies] +python = ">=3.13,<4.0" +flext-core = "0.11.0-dev" +""".strip() + + updated, did_change = mod._replace_version(content, "0.11.0") + + assert did_change is True + assert 'version = "0.11.0"' in updated + assert 'flext-core = "0.11.0-dev"' in updated + + +def test_update_changelog_is_idempotent_by_version_heading() -> None: + mod = _load_module("scripts/release/changelog.py", "release_changelog") + first = mod._update_changelog("# Changelog\n\n", "0.11.0", "v0.11.0") + second = mod._update_changelog(first, "0.11.0", "v0.11.0") + + assert first == second + assert len(re.findall(r"^## 0\.11\.0 - ", second, flags=re.M)) == 1 diff --git a/tests/unit/scripts/release/release_shared_and_run_tests.py b/tests/unit/scripts/release/release_shared_and_run_tests.py new file mode 100644 index 000000000..bfc5292c5 --- /dev/null +++ b/tests/unit/scripts/release/release_shared_and_run_tests.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import importlib.util +import sys +from pathlib import Path +from types import SimpleNamespace +from typing import Any + +import pytest + + +def _load_module(module_name: str, relative_path: str) -> Any: + module_path = Path(__file__).resolve().parents[4] / relative_path + spec = importlib.util.spec_from_file_location(module_name, module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def test_resolve_projects_uses_auto_discovery(monkeypatch: pytest.MonkeyPatch) -> None: + shared = _load_module("release_shared", "scripts/release/shared.py") + + def _fake_resolve(_root: Path, _names: list[str]) -> list[SimpleNamespace]: + return [ + SimpleNamespace(name="external-tool"), + SimpleNamespace(name="flext-api"), + ] + + monkeypatch.setattr(shared, "_resolve_projects", _fake_resolve) + + projects = shared.resolve_projects(Path("/tmp/ws"), []) + assert [project.name for project in projects] == ["external-tool", "flext-api"] + + +def test_resolve_projects_rejects_unknown(monkeypatch: pytest.MonkeyPatch) -> None: + shared = _load_module("release_shared_unknown", "scripts/release/shared.py") + + def _fake_resolve(_root: Path, _names: list[str]) -> list[object]: + raise RuntimeError("unknown projects: missing-project") + + monkeypatch.setattr(shared, "_resolve_projects", _fake_resolve) + + with pytest.raises(RuntimeError, match="unknown release projects"): + _ = shared.resolve_projects(Path("/tmp/ws"), ["missing-project"]) + + +def test_current_version_reads_project_table(tmp_path: Path) -> None: + run_mod = _load_module("release_run", "scripts/release/run.py") + pyproject = tmp_path / "pyproject.toml" + _ = pyproject.write_text( + """[tool.sample] +version = "999.999.999" + +[project] +name = "demo" +version = "0.10.0-dev" +""", + encoding="utf-8", + ) + + assert run_mod._current_version(tmp_path) == "0.10.0"