diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..1a77b18f --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,48 @@ +# Generated by scripts/github/sync_workflows.py - DO NOT EDIT +name: CI + +on: + pull_request: + push: + branches: + - main + workflow_dispatch: + +permissions: + contents: read + +jobs: + ci: + name: ci + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 + + - name: Setup Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 + with: + python-version: "3.13" + + - name: Install Poetry + uses: snok/install-poetry@76e04a911780d5b312d89783f7b1cd627778900a + with: + virtualenvs-create: false + installer-parallel: true + + - name: Setup (advisory) + continue-on-error: true + run: make setup + + - name: Check (advisory) + continue-on-error: true + run: make check + + - name: Test (advisory) + continue-on-error: true + run: make test + + - name: Validate (advisory) + continue-on-error: true + run: make validate diff --git a/README.md b/README.md index 709659f7..ff5cc5fb 100644 --- a/README.md +++ b/README.md @@ -1,139 +1,27 @@ -# flext-cli +# FLEXT CLI - +Framework de linha de comando para construir interfaces operacionais padronizadas no portfolio FLEXT. -- [🚀 Key Features](#-key-features) -- [📦 Installation](#-installation) -- [🛠️ Usage](#-usage) - - [Basic CLI Application](#basic-cli-application) - - [File Operations](#file-operations) - - [Interactive Prompts](#interactive-prompts) - - [Tables and Formatting](#tables-and-formatting) -- [🏗️ Architecture](#-architecture) -- [🤝 Contributing](#-contributing) -- [📄 License](#-license) +Descricao oficial atual: "FLEXT CLI - Developer Command Line Interface". - +## O que este projeto entrega -[![Python 3.13+](https://img.shields.io/badge/python-3.13+-blue.svg)](https://www.python.org/downloads/) -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +- Define estrutura de comandos, opcoes e validacoes de uso. +- Padroniza experiencia em terminal para equipes tecnicas. +- Acelera criacao de CLIs consistentes entre projetos. -**flext-cli** is the foundational command-line interface library for the FLEXT ecosystem. It provides robust CLI primitives, abstracting underlying libraries like Click and Rich to ensure consistent interaction patterns, strict type safety, and seamless integration with `flext-core`. +## Contexto operacional -**Reviewed**: 2026-02-17 | **Version**: 0.10.0-dev +- Entrada: comandos e parametros do operador. +- Saida: execucao de rotinas com retorno padrao. +- Dependencias: flext-core e modulos de dominio chamados pela CLI. -Part of the [FLEXT](https://github.com/flext-sh/flext) ecosystem. +## Estado atual e risco de adocao -## 🚀 Key Features +- Qualidade: **Alpha** +- Uso recomendado: **Nao produtivo** +- Nivel de estabilidade: em maturacao funcional e tecnica, sujeito a mudancas de contrato sem garantia de retrocompatibilidade. -- **Robust CLI Framework**: Typesafe abstractions over `Click` and `Rich` for building complex commands. -- **File Operations**: Comprehensive support for reading and writing JSON, YAML, and CSV files with Pydantic validation. -- **Rich Output**: Pre-configured formatters and table styling powered by `Rich` and `Tabulate`. -- **Interactive Prompts**: Safe, validated user input handling for text, confirmations, and choices. -- **Configuration Management**: Strong configuration with Pydantic models and environment variable support. -- **Authentication Flow**: Built-in support for secure credential management and `flext-auth` integration. -- **Railway Oriented**: All operations return `FlextResult[T]` for predictable error handling. +## Diretriz para uso nesta fase -## 📦 Installation - -To install `flext-cli`: - -```bash -pip install flext-cli -``` - -Or with Poetry: - -```bash -poetry add flext-cli -``` - -## 🛠️ Usage - -### Basic CLI Application - -Create type-safe CLI commands with minimal boilerplate. - -```python -from flext_cli import FlextCli, FlextResult as r - -cli = FlextCli() - -@cli.command() -def greet(name: str): - cli.formatters.print(f"Hello, {name}!", style="green bold") - -if __name__ == "__main__": - cli.run() -``` - -### File Operations - -Safely read and write structured data files. - -```python -from flext_cli import FlextCli - -cli = FlextCli() - -# Write JSON -data = {"app": "myapp", "version": "1.0.0"} -cli.file_tools.write_json_file("config.json", data) - -# Read JSON -result = cli.file_tools.read_json_file("config.json") -if result.is_success: - config = result.unwrap() - cli.formatters.print(f"Config loaded: {config}") -``` - -### Interactive Prompts - -Securely collect user input with validation. - -```python -from flext_cli import FlextCli - -cli = FlextCli() - -if cli.prompts.confirm("Do you want to continue?", default=True).unwrap(): - username = cli.prompts.prompt_text("Username:").unwrap() - cli.formatters.print(f"Welcome back, {username}!") -``` - -### Tables and Formatting - -Display data beautifully using `Tabulate` and `Rich`. - -```python -from flext_cli import FlextCli - -cli = FlextCli() - -users = [ - {"name": "Alice", "role": "Admin"}, - {"name": "Bob", "role": "User"}, -] - -cli.output.format_data( - data={"users": users}, - format_type="table" -).map(lambda table: cli.formatters.print(table)) -``` - -## 🏗️ Architecture - -`flext-cli` abstracts direct dependencies (Click, Rich) into clean service layers, ensuring that your CLI logic remains decoupled from specific libraries. It strictly adheres to FLEXT architectural patterns: - -- **Models**: Pydantic models for strictly typed data structures. -- **Services**: All functionality is exposed via `FlextService` implementations. -- **Results**: Every operation returns a `FlextResult`, enforcing explicit error handling. - -## 🤝 Contributing - -We welcome contributions! Please see our [Contributing Guide](docs/development.md) for details on setting up your environment and submitting pull requests. - -## 📄 License - -This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. +Aplicar este projeto somente em desenvolvimento, prova de conceito e homologacao controlada, com expectativa de ajustes frequentes ate maturidade de release. diff --git a/poetry.lock b/poetry.lock index 78fc5927..66ccb95f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -66,15 +66,15 @@ files = [ [[package]] name = "autoflake" -version = "2.3.2" +version = "2.3.3" description = "Removes unused imports and unused variables" optional = true python-versions = ">=3.10" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "autoflake-2.3.2-py3-none-any.whl", hash = "sha256:4270b06ad5eb754d6b1b3cea51f195dab85f35a55afdb05c5d7bc96679dbf866"}, - {file = "autoflake-2.3.2.tar.gz", hash = "sha256:73d3b22bad89034879f7a4871c279c8d189b3f2c0b9d9e274b8e5b468c17f9a0"}, + {file = "autoflake-2.3.3-py3-none-any.whl", hash = "sha256:a51a3412aff16135ee5b3ec25922459fef10c1f23ce6d6c4977188df859e8b53"}, + {file = "autoflake-2.3.3.tar.gz", hash = "sha256:c24809541e23999f7a7b0d2faadf15deb0bc04cdde49728a2fd943a0c8055504"}, ] [package.dependencies] @@ -1127,20 +1127,19 @@ files = [ [[package]] name = "isort" -version = "7.0.0" +version = "8.0.0" description = "A Python utility / library to sort Python imports." optional = true python-versions = ">=3.10.0" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1"}, - {file = "isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187"}, + {file = "isort-8.0.0-py3-none-any.whl", hash = "sha256:184916a933041c7cf718787f7e52064f3c06272aff69a5cb4dc46497bd8911d9"}, + {file = "isort-8.0.0.tar.gz", hash = "sha256:fddea59202f231e170e52e71e3510b99c373b6e571b55d9c7b31b679c0fed47c"}, ] [package.extras] colors = ["colorama"] -plugins = ["setuptools"] [[package]] name = "ldap3" @@ -2000,22 +1999,22 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "4.0.4" +version = "4.0.5" description = "python code static checker" optional = true python-versions = ">=3.10.0" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "pylint-4.0.4-py3-none-any.whl", hash = "sha256:63e06a37d5922555ee2c20963eb42559918c20bd2b21244e4ef426e7c43b92e0"}, - {file = "pylint-4.0.4.tar.gz", hash = "sha256:d9b71674e19b1c36d79265b5887bf8e55278cbe236c9e95d22dc82cf044fdbd2"}, + {file = "pylint-4.0.5-py3-none-any.whl", hash = "sha256:00f51c9b14a3b3ae08cff6b2cdd43f28165c78b165b628692e428fb1f8dc2cf2"}, + {file = "pylint-4.0.5.tar.gz", hash = "sha256:8cd6a618df75deb013bd7eb98327a95f02a6fb839205a6bbf5456ef96afb317c"}, ] [package.dependencies] astroid = ">=4.0.2,<=4.1.dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} -isort = ">=5,<5.13 || >5.13,<8" +isort = ">=5,<5.13 || >5.13,<9" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2" tomlkit = ">=0.10.1" @@ -2570,31 +2569,31 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.15.1" +version = "0.15.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "ruff-0.15.1-py3-none-linux_armv6l.whl", hash = "sha256:b101ed7cf4615bda6ffe65bdb59f964e9f4a0d3f85cbf0e54f0ab76d7b90228a"}, - {file = "ruff-0.15.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:939c995e9277e63ea632cc8d3fae17aa758526f49a9a850d2e7e758bfef46602"}, - {file = "ruff-0.15.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1d83466455fdefe60b8d9c8df81d3c1bbb2115cede53549d3b522ce2bc703899"}, - {file = "ruff-0.15.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9457e3c3291024866222b96108ab2d8265b477e5b1534c7ddb1810904858d16"}, - {file = "ruff-0.15.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92c92b003e9d4f7fbd33b1867bb15a1b785b1735069108dfc23821ba045b29bc"}, - {file = "ruff-0.15.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fe5c41ab43e3a06778844c586251eb5a510f67125427625f9eb2b9526535779"}, - {file = "ruff-0.15.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66a6dd6df4d80dc382c6484f8ce1bcceb55c32e9f27a8b94c32f6c7331bf14fb"}, - {file = "ruff-0.15.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a4a42cbb8af0bda9bcd7606b064d7c0bc311a88d141d02f78920be6acb5aa83"}, - {file = "ruff-0.15.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab064052c31dddada35079901592dfba2e05f5b1e43af3954aafcbc1096a5b2"}, - {file = "ruff-0.15.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5631c940fe9fe91f817a4c2ea4e81f47bee3ca4aa646134a24374f3c19ad9454"}, - {file = "ruff-0.15.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:68138a4ba184b4691ccdc39f7795c66b3c68160c586519e7e8444cf5a53e1b4c"}, - {file = "ruff-0.15.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:518f9af03bfc33c03bdb4cb63fabc935341bb7f54af500f92ac309ecfbba6330"}, - {file = "ruff-0.15.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:da79f4d6a826caaea95de0237a67e33b81e6ec2e25fc7e1993a4015dffca7c61"}, - {file = "ruff-0.15.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3dd86dccb83cd7d4dcfac303ffc277e6048600dfc22e38158afa208e8bf94a1f"}, - {file = "ruff-0.15.1-py3-none-win32.whl", hash = "sha256:660975d9cb49b5d5278b12b03bb9951d554543a90b74ed5d366b20e2c57c2098"}, - {file = "ruff-0.15.1-py3-none-win_amd64.whl", hash = "sha256:c820fef9dd5d4172a6570e5721704a96c6679b80cf7be41659ed439653f62336"}, - {file = "ruff-0.15.1-py3-none-win_arm64.whl", hash = "sha256:5ff7d5f0f88567850f45081fac8f4ec212be8d0b963e385c3f7d0d2eb4899416"}, - {file = "ruff-0.15.1.tar.gz", hash = "sha256:c590fe13fb57c97141ae975c03a1aedb3d3156030cabd740d6ff0b0d601e203f"}, + {file = "ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d"}, + {file = "ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e"}, + {file = "ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87"}, + {file = "ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9"}, + {file = "ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80"}, + {file = "ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f"}, + {file = "ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77"}, + {file = "ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea"}, + {file = "ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a"}, + {file = "ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956"}, + {file = "ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4"}, + {file = "ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de"}, + {file = "ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c"}, + {file = "ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8"}, + {file = "ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f"}, + {file = "ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5"}, + {file = "ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e"}, + {file = "ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342"}, ] [[package]] @@ -2664,15 +2663,15 @@ doc = ["sphinx"] [[package]] name = "stevedore" -version = "5.6.0" +version = "5.7.0" description = "Manage dynamic plugins for Python applications" optional = true python-versions = ">=3.10" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820"}, - {file = "stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945"}, + {file = "stevedore-5.7.0-py3-none-any.whl", hash = "sha256:fd25efbb32f1abb4c9e502f385f0018632baac11f9ee5d1b70f88cc5e22ad4ed"}, + {file = "stevedore-5.7.0.tar.gz", hash = "sha256:31dd6fe6b3cbe921e21dcefabc9a5f1cf848cf538a1f27543721b8ca09948aa3"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 9c3b95f4..47ed997e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,10 @@ [build-system] build-backend = "poetry.core.masonry.api" -requires = [ - "poetry-core>=2", -] +requires = [ "poetry-core>=2.0", "poetry-core>=2" ] [project] name = "flext-cli" -version = "0.10.0-dev" +version = "0.10.0" description = "FLEXT CLI - Developer Command Line Interface" readme = "README.md" keywords = [ @@ -18,17 +16,18 @@ keywords = [ "typed", ] license = "MIT" -maintainers = [ { email = "team@flext.sh", name = "FLEXT Team" } ] -authors = [ { email = "team@flext.sh", name = "FLEXT Team" } ] +maintainers = [ {email = "team@flext.sh", name = "FLEXT Team"} ] +authors = [ {email = "team@flext.sh", name = "FLEXT Team"} ] requires-python = ">=3.13,<3.14" classifiers = [ - "Development Status :: 5 - Production/Stable", + "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.13", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed", + "Development Status :: 5 - Production/Stable", ] dependencies = [ "attrs>=25.4,<26", @@ -42,7 +41,7 @@ dependencies = [ "pyyaml>=6", "rich>=14.2,<15", "tabulate>=0.9", - "typer>=0.12" + "typer>=0.12", ] optional-dependencies.dev = [ "autoflake>=2.3.1", @@ -117,23 +116,23 @@ group.dev.dependencies.types-lxml = "^2025.8.25" group.dev.dependencies.types-pyyaml = "^6.0.12.20250809" group.dev.dependencies.types-tabulate = "^0.9.0.20241207" group.dev.dependencies.types-toml = "^0.10.8.20240310" -packages = [ { from = "src", include = "flext_cli" } ] +packages = [ {from = "src", include = "flext_cli"} ] [tool.ruff] extend = "../ruff-shared.toml" [tool.deptry] +extend_exclude = [ + "examples", +] known_first_party = [ "flext_cli", "flext_core", ] -extend_exclude = [ - "examples", -] +package_module_name_map.pyyaml = "yaml" pep621_dev_dependency_groups = [ "dev", ] -package_module_name_map.pyyaml = "yaml" per_rule_ignores.DEP002 = [ # CLI tools # Type stubs @@ -155,7 +154,7 @@ per_rule_ignores.DEP002 = [ "cachetools", "pluggy", "prompt-toolkit", - "pydantic-core" + "pydantic-core", ] [tool.pytest] @@ -206,9 +205,9 @@ ini_options.testpaths = [ ] [tool.coverage] -run.source = [ "src/flext_cli" ] report.fail_under = 100 report.precision = 2 +run.source = [ "src/flext_cli" ] [tool.mypy] check_untyped_defs = true @@ -222,14 +221,15 @@ incremental = false mypy_path = "src:$MYPYPATH:../typings" namespace_packages = true no_implicit_optional = true +overrides = [ {module = "flext_cli.services.cmd", ignore_errors = true} ] python_version = "3.13" warn_redundant_casts = true warn_return_any = true warn_unused_configs = true warn_unused_ignores = true -overrides = [ { module = "flext_cli.services.cmd", ignore_errors = true } ] [tool.pyrefly] +project-excludes = [ "**/*_pb2*.py", "**/*_pb2_grpc*.py" ] search-path = [ "../typings", "../typings/generated", diff --git a/scripts/core/generate_scripts_inventory.py b/scripts/core/generate_scripts_inventory.py index 89aa968c..fc42d16d 100644 --- a/scripts/core/generate_scripts_inventory.py +++ b/scripts/core/generate_scripts_inventory.py @@ -9,8 +9,8 @@ from pathlib import Path -def _artifact_path(directory: str, slug: str) -> Path: - return Path(".sisyphus") / directory / f"scripts-infra--json--{slug}.json" +def _artifact_path(slug: str) -> Path: + return Path(".reports") / f"scripts-infra--json--{slug}.json" def main() -> int: @@ -38,9 +38,9 @@ def main() -> int: external = {"generated_at": datetime.now(UTC).isoformat(), "candidates": []} outputs = { - _artifact_path("reports", "scripts-inventory"): inventory, - _artifact_path("reports", "scripts-wiring"): wiring, - _artifact_path("reports", "external-scripts-candidates"): external, + _artifact_path("scripts-inventory"): inventory, + _artifact_path("scripts-wiring"): wiring, + _artifact_path("external-scripts-candidates"): external, } for path, payload in outputs.items(): path.parent.mkdir(parents=True, exist_ok=True) diff --git a/scripts/dependencies/sync_internal_deps.py b/scripts/dependencies/sync_internal_deps.py index 77c11556..3f1fc85f 100644 --- a/scripts/dependencies/sync_internal_deps.py +++ b/scripts/dependencies/sync_internal_deps.py @@ -15,6 +15,10 @@ from pathlib import Path GIT_BIN = shutil.which("git") or "git" +GIT_REF_RE = re.compile(r"^[A-Za-z0-9][A-Za-z0-9._/-]{0,127}$") +GITHUB_REPO_URL_RE = re.compile( + r"^(?:git@github\.com:[A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+(?:\.git)?|https://github\.com/[A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+(?:\.git)?)$" +) def _run_git(args: list[str], cwd: Path) -> subprocess.CompletedProcess[str]: @@ -23,6 +27,20 @@ def _run_git(args: list[str], cwd: Path) -> subprocess.CompletedProcess[str]: ) +def _validate_git_ref(ref_name: str) -> str: + if not GIT_REF_RE.fullmatch(ref_name): + error_msg = f"invalid git ref: {ref_name!r}" + raise RuntimeError(error_msg) + return ref_name + + +def _validate_repo_url(repo_url: str) -> str: + if not GITHUB_REPO_URL_RE.fullmatch(repo_url): + error_msg = f"invalid repository URL: {repo_url!r}" + raise RuntimeError(error_msg) + return repo_url + + def _ssh_to_https(url: str) -> str: if url.startswith("git@github.com:"): return f"https://github.com/{url.removeprefix('git@github.com:')}" @@ -76,9 +94,41 @@ def _resolve_ref(project_root: Path) -> str: return "main" +def _is_relative_to(path: Path, parent: Path) -> bool: + try: + path.relative_to(parent) + except ValueError: + return False + return True + + +def _workspace_root_from_env(project_root: Path) -> Path | None: + env_root = os.getenv("FLEXT_WORKSPACE_ROOT") + if not env_root: + return None + candidate = Path(env_root).expanduser().resolve() + if not candidate.exists() or not candidate.is_dir(): + return None + if _is_relative_to(project_root, candidate): + return candidate + return None + + +def _workspace_root_from_parents(project_root: Path) -> Path | None: + for candidate in (project_root, *project_root.parents): + if (candidate / ".gitmodules").exists(): + return candidate + return None + + def _is_workspace_mode(project_root: Path) -> tuple[bool, Path | None]: if os.getenv("FLEXT_STANDALONE") == "1": return False, None + + env_workspace_root = _workspace_root_from_env(project_root) + if env_workspace_root is not None: + return True, env_workspace_root + superproject = _run_git( ["rev-parse", "--show-superproject-working-tree"], project_root ) @@ -86,11 +136,46 @@ def _is_workspace_mode(project_root: Path) -> tuple[bool, Path | None]: value = superproject.stdout.strip() if value: return True, Path(value) - if (project_root / ".gitmodules").exists(): - return True, project_root + heuristic_workspace_root = _workspace_root_from_parents(project_root) + if heuristic_workspace_root is not None: + return True, heuristic_workspace_root + return False, None +def _owner_from_remote_url(remote_url: str) -> str | None: + patterns = ( + r"^git@github\.com:(?P[^/]+)/[^/]+(?:\.git)?$", + r"^https://github\.com/(?P[^/]+)/[^/]+(?:\.git)?$", + r"^http://github\.com/(?P[^/]+)/[^/]+(?:\.git)?$", + ) + for pattern in patterns: + match = re.match(pattern, remote_url) + if match: + return match.group("owner") + return None + + +def _infer_owner_from_origin(project_root: Path) -> str | None: + remote = _run_git(["config", "--get", "remote.origin.url"], project_root) + if remote.returncode != 0: + return None + return _owner_from_remote_url(remote.stdout.strip()) + + +def _synthesized_repo_map( + owner: str, repo_names: set[str] +) -> dict[str, dict[str, str]]: + result: dict[str, dict[str, str]] = {} + for repo_name in sorted(repo_names): + ssh_url = f"git@github.com:{owner}/{repo_name}.git" + result[repo_name] = { + "ssh_url": ssh_url, + "https_url": _ssh_to_https(ssh_url), + } + return result + + def _ensure_symlink(target: Path, source: Path) -> None: target.parent.mkdir(parents=True, exist_ok=True) if target.is_symlink() and target.resolve() == source.resolve(): @@ -104,8 +189,15 @@ def _ensure_symlink(target: Path, source: Path) -> None: def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: + safe_repo_url = _validate_repo_url(repo_url) + safe_ref_name = _validate_git_ref(ref_name) dep_path.parent.mkdir(parents=True, exist_ok=True) if not (dep_path / ".git").exists(): + if dep_path.exists() or dep_path.is_symlink(): + if dep_path.is_dir() and not dep_path.is_symlink(): + shutil.rmtree(dep_path) + else: + dep_path.unlink() cloned = subprocess.run( [ GIT_BIN, @@ -113,8 +205,8 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: "--depth", "1", "--branch", - ref_name, - repo_url, + safe_ref_name, + safe_repo_url, str(dep_path), ], text=True, @@ -131,7 +223,7 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: "1", "--branch", "main", - repo_url, + safe_repo_url, str(dep_path), ], text=True, @@ -142,7 +234,7 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: error_msg = f"clone failed for {dep_path.name}: {fallback.stderr.strip()}" raise RuntimeError(error_msg) print( - f"[sync-deps] warning: {dep_path.name} missing ref '{ref_name}', using 'main'" + f"[sync-deps] warning: {dep_path.name} missing ref '{safe_ref_name}', using 'main'" ) return @@ -151,9 +243,9 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: error_msg = f"fetch failed for {dep_path.name}: {fetch.stderr.strip()}" raise RuntimeError(error_msg) - checkout = _run_git(["checkout", ref_name], dep_path) + checkout = _run_git(["checkout", safe_ref_name], dep_path) if checkout.returncode == 0: - _run_git(["pull", "--ff-only", "origin", ref_name], dep_path) + _run_git(["pull", "--ff-only", "origin", safe_ref_name], dep_path) return fallback_checkout = _run_git(["checkout", "main"], dep_path) @@ -162,7 +254,7 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: raise RuntimeError(error_msg) _run_git(["pull", "--ff-only", "origin", "main"], dep_path) print( - f"[sync-deps] warning: {dep_path.name} missing ref '{ref_name}', using 'main'" + f"[sync-deps] warning: {dep_path.name} missing ref '{safe_ref_name}', using 'main'" ) @@ -215,11 +307,21 @@ def _main() -> int: repo_map = {**_parse_repo_map(map_file), **repo_map} else: if not map_file.exists(): - error_msg = ( - "missing flext-repo-map.toml for standalone dependency resolution" + owner = _infer_owner_from_origin(project_root) + if owner is None: + error_msg = ( + "missing flext-repo-map.toml for standalone dependency resolution " + "and unable to infer GitHub owner from remote.origin.url" + ) + raise RuntimeError(error_msg) + repo_map = _synthesized_repo_map( + owner, {dep_path.name for dep_path in deps.values()} ) - raise RuntimeError(error_msg) - repo_map = _parse_repo_map(map_file) + print( + f"[sync-deps] warning: using synthesized standalone repo map for owner '{owner}'" + ) + else: + repo_map = _parse_repo_map(map_file) ref_name = _resolve_ref(project_root) force_https = ( diff --git a/scripts/github/lint_workflows.py b/scripts/github/lint_workflows.py new file mode 100644 index 00000000..98f29af3 --- /dev/null +++ b/scripts/github/lint_workflows.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import shutil +import subprocess +from pathlib import Path + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument( + "--report", + type=Path, + default=Path(".reports/workflows/actionlint.json"), + ) + _ = parser.add_argument("--strict", type=int, default=0) + return parser.parse_args() + + +def main() -> int: + args = _parse_args() + root = args.root.resolve() + report = args.report if args.report.is_absolute() else root / args.report + report.parent.mkdir(parents=True, exist_ok=True) + + actionlint = shutil.which("actionlint") + if actionlint is None: + payload = { + "status": "skipped", + "reason": "actionlint not installed", + } + report.write_text( + json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + _ = print(f"wrote: {report}") + return 0 + + result = subprocess.run( + [actionlint], + cwd=root, + capture_output=True, + text=True, + check=False, + ) + payload = { + "status": "ok" if result.returncode == 0 else "fail", + "exit_code": result.returncode, + "stdout": result.stdout, + "stderr": result.stderr, + } + report.write_text( + json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + _ = print(f"wrote: {report}") + if result.returncode != 0: + _ = print(result.stdout) + _ = print(result.stderr) + if args.strict == 1: + return result.returncode + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/github/sync_workflows.py b/scripts/github/sync_workflows.py new file mode 100644 index 00000000..b6ba7684 --- /dev/null +++ b/scripts/github/sync_workflows.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python3 +# Owner-Skill: .claude/skills/rules-github/SKILL.md +from __future__ import annotations + +import argparse +import json +import sys +from dataclasses import dataclass +from pathlib import Path +from subprocess import CalledProcessError, run + +GENERATED_HEADER = "# Generated by scripts/github/sync_workflows.py - DO NOT EDIT\n" +MANAGED_FILES = {"ci.yml"} + + +@dataclass(frozen=True) +class Operation: + project: str + path: str + action: str + reason: str + + +def _discover_projects(workspace_root: Path) -> list[tuple[str, Path]]: + discover_script = workspace_root / "scripts" / "maintenance" / "_discover.py" + command = [ + sys.executable, + str(discover_script), + "--workspace-root", + str(workspace_root), + "--kind", + "all", + "--format", + "json", + ] + try: + result = run(command, check=True, capture_output=True, text=True) + except CalledProcessError as exc: + message = (exc.stderr or exc.stdout or str(exc)).strip() + raise RuntimeError(f"project discovery failed: {message}") from exc + payload = json.loads(result.stdout) + projects: list[tuple[str, Path]] = [] + for item in payload.get("projects", []): + if not isinstance(item, dict): + continue + name = item.get("name") + path_value = item.get("path") + if not isinstance(name, str) or not isinstance(path_value, str): + continue + projects.append((name, Path(path_value).resolve())) + return projects + + +def _render_template(template_path: Path) -> str: + body = template_path.read_text(encoding="utf-8") + if body.startswith(GENERATED_HEADER): + return body + return GENERATED_HEADER + body + + +def _resolve_source_workflow( + workspace_root: Path, source_workflow: Path | None +) -> Path: + if source_workflow is not None: + candidate = ( + source_workflow + if source_workflow.is_absolute() + else (workspace_root / source_workflow) + ).resolve() + if candidate.exists(): + return candidate + raise RuntimeError(f"missing source workflow: {candidate}") + + default_source = (workspace_root / ".github" / "workflows" / "ci.yml").resolve() + if default_source.exists(): + return default_source + raise RuntimeError(f"missing source workflow: {default_source}") + + +def _sync_project( + *, + project_name: str, + project_root: Path, + rendered_template: str, + apply: bool, + prune: bool, +) -> list[Operation]: + operations: list[Operation] = [] + workflows_dir = project_root / ".github" / "workflows" + destination = workflows_dir / "ci.yml" + + if destination.exists(): + current = destination.read_text(encoding="utf-8") + if current != rendered_template: + if apply: + _ = destination.write_text(rendered_template, encoding="utf-8") + operations.append( + Operation( + project=project_name, + path=str(destination.relative_to(project_root)), + action="update", + reason="force overwrite ci.yml", + ) + ) + else: + operations.append( + Operation( + project=project_name, + path=str(destination.relative_to(project_root)), + action="noop", + reason="already synced", + ) + ) + else: + if apply: + workflows_dir.mkdir(parents=True, exist_ok=True) + _ = destination.write_text(rendered_template, encoding="utf-8") + operations.append( + Operation( + project=project_name, + path=str(destination.relative_to(project_root)), + action="create", + reason="missing ci.yml", + ) + ) + + if prune and workflows_dir.exists(): + candidates = sorted(workflows_dir.glob("*.yml")) + sorted( + workflows_dir.glob("*.yaml") + ) + for path in candidates: + if path.name in MANAGED_FILES: + continue + if apply: + path.unlink() + operations.append( + Operation( + project=project_name, + path=str(path.relative_to(project_root)), + action="prune", + reason="remove non-canonical workflow", + ) + ) + + return operations + + +def _write_report(report_path: Path, mode: str, operations: list[Operation]) -> None: + report_path.parent.mkdir(parents=True, exist_ok=True) + by_action: dict[str, int] = {} + for operation in operations: + by_action[operation.action] = by_action.get(operation.action, 0) + 1 + payload = { + "mode": mode, + "summary": by_action, + "operations": [ + { + "project": operation.project, + "path": operation.path, + "action": operation.action, + "reason": operation.reason, + } + for operation in operations + ], + } + _ = report_path.write_text( + json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + + +def _parse_args(argv: list[str]) -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--workspace-root", default=".", type=Path) + _ = parser.add_argument("--source-workflow", type=Path) + _ = parser.add_argument( + "--report", + default=".reports/workflows/sync.json", + type=Path, + ) + _ = parser.add_argument("--apply", action="store_true") + _ = parser.add_argument("--prune", action="store_true") + return parser.parse_args(argv) + + +def main(argv: list[str] | None = None) -> int: + args = _parse_args(sys.argv[1:] if argv is None else argv) + workspace_root = args.workspace_root.resolve() + report = ( + args.report if args.report.is_absolute() else (workspace_root / args.report) + ) + + source_workflow = _resolve_source_workflow(workspace_root, args.source_workflow) + + projects = _discover_projects(workspace_root) + rendered_template = _render_template(source_workflow) + operations: list[Operation] = [] + + for project_name, project_root in projects: + operations.extend( + _sync_project( + project_name=project_name, + project_root=project_root, + rendered_template=rendered_template, + apply=args.apply, + prune=args.prune, + ) + ) + + mode = "apply" if args.apply else "dry-run" + _write_report(report.resolve(), mode, operations) + print(f"Wrote: {report}") + for operation in operations: + print( + f"[{operation.project}] {operation.action}: {operation.path} ({operation.reason})" + ) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/maintenance/_discover.py b/scripts/maintenance/_discover.py index 0a49e55f..07fa9429 100644 --- a/scripts/maintenance/_discover.py +++ b/scripts/maintenance/_discover.py @@ -3,6 +3,7 @@ from __future__ import annotations import argparse +import json import re import sys from dataclasses import dataclass @@ -53,7 +54,9 @@ def main() -> int: _ = parser.add_argument( "--kind", choices=("submodule", "external", "all"), default="all" ) - _ = parser.add_argument("--format", choices=("human", "makefile"), default="human") + _ = parser.add_argument( + "--format", choices=("human", "makefile", "json"), default="human" + ) _ = parser.add_argument("--workspace-root", type=Path, default=Path.cwd()) args = parser.parse_args() @@ -65,6 +68,23 @@ def main() -> int: print(" ".join(project.name for project in projects)) return 0 + if args.format == "json": + payload = { + "workspace_root": str(args.workspace_root.resolve()), + "kind": args.kind, + "count": len(projects), + "projects": [ + { + "name": project.name, + "kind": project.kind, + "path": str(project.path.resolve()), + } + for project in projects + ], + } + print(json.dumps(payload, indent=2, sort_keys=True)) + return 0 + for project in projects: print(project.name) return 0 diff --git a/scripts/release/build.py b/scripts/release/build.py new file mode 100644 index 00000000..09519406 --- /dev/null +++ b/scripts/release/build.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import subprocess +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import discover_projects, workspace_root + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--version", required=True) + _ = parser.add_argument("--output-dir", type=Path, required=True) + return parser.parse_args() + + +def _run_make(project_path: Path, verb: str) -> tuple[int, str]: + command = ["make", "-C", str(project_path), verb] + result = subprocess.run(command, capture_output=True, text=True, check=False) + output = (result.stdout + "\n" + result.stderr).strip() + return result.returncode, output + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + output_dir = ( + args.output_dir if args.output_dir.is_absolute() else root / args.output_dir + ) + output_dir.mkdir(parents=True, exist_ok=True) + report_path = output_dir / "build-report.json" + + projects = discover_projects(root) + targets = [ + ("root", root), + ("algar-oud-mig", root / "algar-oud-mig"), + *[(project.name, project.path) for project in projects], + ("gruponos-meltano-native", root / "gruponos-meltano-native"), + ] + + seen: set[str] = set() + unique_targets: list[tuple[str, Path]] = [] + for name, path in targets: + if name in seen: + continue + seen.add(name) + if not path.exists(): + continue + unique_targets.append((name, path)) + + records: list[dict[str, str | int]] = [] + failures = 0 + for name, path in unique_targets: + code, output = _run_make(path, "build") + if code != 0: + failures += 1 + log = output_dir / f"build-{name}.log" + log.write_text(output + "\n", encoding="utf-8") + records.append({ + "project": name, + "path": str(path), + "exit_code": code, + "log": str(log), + }) + _ = print(f"[{name}] build exit={code}") + + report = { + "version": args.version, + "total": len(records), + "failures": failures, + "records": records, + } + report_path.write_text( + json.dumps(report, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + _ = print(f"report: {report_path}") + return 1 if failures else 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/changelog.py b/scripts/release/changelog.py new file mode 100644 index 00000000..4d01ea59 --- /dev/null +++ b/scripts/release/changelog.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +from datetime import UTC, datetime +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import workspace_root + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--version", required=True) + _ = parser.add_argument("--tag", required=True) + _ = parser.add_argument("--notes", type=Path, required=True) + _ = parser.add_argument("--apply", action="store_true") + return parser.parse_args() + + +def _update_changelog(existing: str, version: str, tag: str) -> str: + date = datetime.now(UTC).date().isoformat() + section = ( + f"## {version} - {date}\n\n" + f"- Workspace release tag: `{tag}`\n" + "- Status: Alpha, non-production\n\n" + f"Full notes: `docs/releases/{tag}.md`\n\n" + ) + if section in existing: + return existing + marker = "# Changelog\n\n" + if marker in existing: + return existing.replace(marker, marker + section, 1) + return "# Changelog\n\n" + section + existing + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + changelog_path = root / "docs" / "CHANGELOG.md" + latest_path = root / "docs" / "releases" / "latest.md" + tagged_notes_path = root / "docs" / "releases" / f"{args.tag}.md" + notes_path = args.notes if args.notes.is_absolute() else root / args.notes + + notes_text = notes_path.read_text(encoding="utf-8") + existing = ( + changelog_path.read_text(encoding="utf-8") + if changelog_path.exists() + else "# Changelog\n\n" + ) + updated = _update_changelog(existing, args.version, args.tag) + + if args.apply: + changelog_path.parent.mkdir(parents=True, exist_ok=True) + _ = changelog_path.write_text(updated, encoding="utf-8") + latest_path.parent.mkdir(parents=True, exist_ok=True) + _ = latest_path.write_text(notes_text, encoding="utf-8") + _ = tagged_notes_path.write_text(notes_text, encoding="utf-8") + + _ = print(f"changelog: {changelog_path}") + _ = print(f"latest: {latest_path}") + _ = print(f"release_notes: {tagged_notes_path}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/notes.py b/scripts/release/notes.py new file mode 100644 index 00000000..50741f64 --- /dev/null +++ b/scripts/release/notes.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import discover_projects, run_capture, workspace_root + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--tag", required=True) + _ = parser.add_argument("--output", type=Path, required=True) + _ = parser.add_argument("--version", default="") + return parser.parse_args() + + +def _tag_exists(root: Path, tag: str) -> bool: + try: + _ = run_capture(["git", "rev-parse", "--verify", f"refs/tags/{tag}"], cwd=root) + return True + except RuntimeError: + return False + + +def _previous_tag(root: Path, tag: str) -> str: + output = run_capture(["git", "tag", "--sort=-v:refname"], cwd=root) + tags = [line.strip() for line in output.splitlines() if line.strip()] + if tag in tags: + idx = tags.index(tag) + if idx + 1 < len(tags): + return tags[idx + 1] + for candidate in tags: + if candidate != tag: + return candidate + return "" + + +def _collect_changes(root: Path, previous: str, tag: str) -> str: + target = tag if _tag_exists(root, tag) else "HEAD" + rev = f"{previous}..{target}" if previous else target + return run_capture(["git", "log", "--pretty=format:- %h %s (%an)", rev], cwd=root) + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + output_path = args.output if args.output.is_absolute() else root / args.output + output_path.parent.mkdir(parents=True, exist_ok=True) + + previous = _previous_tag(root, args.tag) + changes = _collect_changes(root, previous, args.tag) + projects = discover_projects(root) + + version = args.version or args.tag.removeprefix("v") + lines: list[str] = [ + f"# Release {args.tag}", + "", + "## Status", + "", + "- Quality: Alpha", + "- Usage: Non-production", + "", + "## Scope", + "", + f"- Workspace release version: {version}", + f"- Projects packaged: {len(projects) + 2}", + "", + "## Projects impacted", + "", + ] + lines.extend( + f"- {name}" + for name in [ + "root", + "algar-oud-mig", + *[project.name for project in projects], + "gruponos-meltano-native", + ] + ) + lines.extend([ + "", + "## Changes since last tag", + "", + changes or "- Initial tagged release", + "", + "## Verification", + "", + "- make release-ci RELEASE_PHASE=all", + "- make validate VALIDATE_SCOPE=workspace", + "- make build", + ]) + + output_path.write_text("\n".join(lines).rstrip() + "\n", encoding="utf-8") + _ = print(f"wrote: {output_path}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/run.py b/scripts/release/run.py new file mode 100644 index 00000000..acaad58b --- /dev/null +++ b/scripts/release/run.py @@ -0,0 +1,202 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import re +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import ( + bump_version, + discover_projects, + parse_semver, + run_capture, + run_checked, + workspace_root, +) + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--phase", default="all") + _ = parser.add_argument("--version", default="") + _ = parser.add_argument("--tag", default="") + _ = parser.add_argument("--bump", default="") + _ = parser.add_argument("--interactive", type=int, default=1) + _ = parser.add_argument("--push", type=int, default=0) + _ = parser.add_argument("--dry-run", type=int, default=0) + _ = parser.add_argument("--create-branches", type=int, default=1) + return parser.parse_args() + + +def _current_version(root: Path) -> str: + pyproject = root / "pyproject.toml" + content = pyproject.read_text(encoding="utf-8") + match = re.search(r'^version\s*=\s*"(?P[^"]+)"', content, flags=re.M) + if not match: + raise RuntimeError("unable to detect version from pyproject.toml") + value = match.group("version") + return value.removesuffix("-dev") + + +def _resolve_version(args: argparse.Namespace, root: Path) -> str: + if args.version: + _ = parse_semver(args.version) + return args.version + + current = _current_version(root) + if args.bump: + return bump_version(current, args.bump) + + if args.interactive != 1: + return current + + print("Select version bump type: [major|minor|patch]") + bump = input("bump> ").strip().lower() + if bump not in {"major", "minor", "patch"}: + raise RuntimeError("invalid bump type") + return bump_version(current, bump) + + +def _resolve_tag(args: argparse.Namespace, version: str) -> str: + if args.tag: + if not args.tag.startswith("v"): + raise RuntimeError("tag must start with v") + return args.tag + return f"v{version}" + + +def _create_release_branches(root: Path, version: str) -> None: + branch = f"release/{version}" + run_checked(["git", "checkout", "-B", branch], cwd=root) + for project in discover_projects(root): + run_checked(["git", "checkout", "-B", branch], cwd=project.path) + for extra in ("algar-oud-mig", "gruponos-meltano-native"): + project_root = root / extra + if project_root.exists(): + run_checked(["git", "checkout", "-B", branch], cwd=project_root) + + +def _phase_version(root: Path, version: str, dry_run: bool) -> None: + command = [ + "python", + "scripts/release/version.py", + "--root", + str(root), + "--version", + version, + "--check" if dry_run else "--apply", + ] + run_checked(command, cwd=root) + + +def _phase_validate(root: Path) -> None: + run_checked(["make", "validate", "VALIDATE_SCOPE=workspace"], cwd=root) + + +def _phase_build(root: Path, version: str) -> None: + output = root / ".reports" / "release" / f"v{version}" + run_checked( + [ + "python", + "scripts/release/build.py", + "--root", + str(root), + "--version", + version, + "--output-dir", + str(output), + ], + cwd=root, + ) + + +def _phase_publish( + root: Path, version: str, tag: str, push: bool, dry_run: bool +) -> None: + notes = root / ".reports" / "release" / tag / "RELEASE_NOTES.md" + notes.parent.mkdir(parents=True, exist_ok=True) + run_checked( + [ + "python", + "scripts/release/notes.py", + "--root", + str(root), + "--tag", + tag, + "--version", + version, + "--output", + str(notes), + ], + cwd=root, + ) + if not dry_run: + run_checked( + [ + "python", + "scripts/release/changelog.py", + "--root", + str(root), + "--version", + version, + "--tag", + tag, + "--notes", + str(notes), + "--apply", + ], + cwd=root, + ) + tag_exists = run_capture(["git", "tag", "-l", tag], cwd=root) + if tag_exists.strip() != tag: + run_checked(["git", "tag", "-a", tag, "-m", f"release: {tag}"], cwd=root) + if push: + run_checked(["git", "push", "origin", "HEAD"], cwd=root) + run_checked(["git", "push", "origin", tag], cwd=root) + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + version = _resolve_version(args, root) + tag = _resolve_tag(args, version) + phases = ( + ["validate", "version", "build", "publish"] + if args.phase == "all" + else [part.strip() for part in args.phase.split(",") if part.strip()] + ) + + _ = print(f"release_version={version}") + _ = print(f"release_tag={tag}") + _ = print(f"phases={','.join(phases)}") + + if args.create_branches == 1 and args.dry_run == 0: + _create_release_branches(root, version) + + for phase in phases: + if phase == "validate": + _phase_validate(root) + continue + if phase == "version": + _phase_version(root, version, args.dry_run == 1) + continue + if phase == "build": + _phase_build(root, version) + continue + if phase == "publish": + _phase_publish(root, version, tag, args.push == 1, args.dry_run == 1) + continue + raise RuntimeError(f"invalid phase: {phase}") + + _ = print("release_run=ok") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/shared.py b/scripts/release/shared.py new file mode 100644 index 00000000..0598b719 --- /dev/null +++ b/scripts/release/shared.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +# Owner-Skill: .claude/skills/scripts-maintenance/SKILL.md +from __future__ import annotations + +import json +import re +import subprocess +import sys +from dataclasses import dataclass +from pathlib import Path + + +SEMVER_RE = re.compile( + r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)$" +) + + +@dataclass(frozen=True) +class Project: + name: str + path: Path + + +def workspace_root(path: str | Path = ".") -> Path: + return Path(path).resolve() + + +def discover_projects(root: Path) -> list[Project]: + discover = root / "scripts" / "maintenance" / "_discover.py" + command = [ + sys.executable, + str(discover), + "--workspace-root", + str(root), + "--kind", + "all", + "--format", + "json", + ] + result = subprocess.run(command, capture_output=True, text=True, check=False) + if result.returncode != 0: + msg = (result.stderr or result.stdout).strip() + raise RuntimeError(f"project discovery failed: {msg}") + payload = json.loads(result.stdout) + projects: list[Project] = [] + for item in payload.get("projects", []): + if not isinstance(item, dict): + continue + name = item.get("name") + path_value = item.get("path") + if not isinstance(name, str) or not isinstance(path_value, str): + continue + projects.append(Project(name=name, path=Path(path_value).resolve())) + return sorted(projects, key=lambda project: project.name) + + +def parse_semver(version: str) -> tuple[int, int, int]: + match = SEMVER_RE.match(version) + if not match: + raise ValueError(f"invalid semver version: {version}") + return ( + int(match.group("major")), + int(match.group("minor")), + int(match.group("patch")), + ) + + +def bump_version(current_version: str, bump: str) -> str: + major, minor, patch = parse_semver(current_version) + if bump == "major": + return f"{major + 1}.0.0" + if bump == "minor": + return f"{major}.{minor + 1}.0" + if bump == "patch": + return f"{major}.{minor}.{patch + 1}" + raise ValueError(f"unsupported bump: {bump}") + + +def run_checked(command: list[str], cwd: Path | None = None) -> None: + result = subprocess.run(command, cwd=cwd, check=False) + if result.returncode != 0: + cmd = " ".join(command) + raise RuntimeError(f"command failed ({result.returncode}): {cmd}") + + +def run_capture(command: list[str], cwd: Path | None = None) -> str: + result = subprocess.run( + command, cwd=cwd, capture_output=True, text=True, check=False + ) + if result.returncode != 0: + cmd = " ".join(command) + detail = (result.stderr or result.stdout).strip() + raise RuntimeError(f"command failed ({result.returncode}): {cmd}: {detail}") + return result.stdout.strip() diff --git a/scripts/release/version.py b/scripts/release/version.py new file mode 100644 index 00000000..48f49775 --- /dev/null +++ b/scripts/release/version.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import discover_projects, parse_semver, workspace_root + + +def _replace_version(content: str, version: str) -> tuple[str, bool]: + old = 'version = "0.10.0-dev"' + new = f'version = "{version}"' + if old in content: + return content.replace(old, new), True + + marker = 'version = "' + start = content.find(marker) + if start < 0: + return content, False + value_start = start + len(marker) + value_end = content.find('"', value_start) + if value_end < 0: + return content, False + + current = content[value_start:value_end] + current_clean = current.removesuffix("-dev") + _ = parse_semver(current_clean) + if current == version: + return content, False + updated = content[:value_start] + version + content[value_end:] + return updated, True + + +def _version_files(root: Path) -> list[Path]: + files: list[Path] = [root / "pyproject.toml"] + for project in discover_projects(root): + pyproject = project.path / "pyproject.toml" + if pyproject.exists(): + files.append(pyproject) + for extra in ("algar-oud-mig", "gruponos-meltano-native"): + pyproject = root / extra / "pyproject.toml" + if pyproject.exists(): + files.append(pyproject) + dedup = sorted({path.resolve() for path in files}) + return dedup + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--version", required=True) + _ = parser.add_argument("--apply", action="store_true") + _ = parser.add_argument("--check", action="store_true") + return parser.parse_args() + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + _ = parse_semver(args.version) + + changed = 0 + for file_path in _version_files(root): + content = file_path.read_text(encoding="utf-8") + updated, did_change = _replace_version(content, args.version) + if did_change: + changed += 1 + if args.apply: + _ = file_path.write_text(updated, encoding="utf-8") + _ = print(f"update: {file_path}") + + if args.check: + _ = print(f"checked_version={args.version}") + _ = print(f"files_changed={changed}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main())