diff --git a/.claude/skills/scripts-architecture/SKILL.md b/.claude/skills/scripts-architecture/SKILL.md index 9666890e5..33e6d60b1 100644 --- a/.claude/skills/scripts-architecture/SKILL.md +++ b/.claude/skills/scripts-architecture/SKILL.md @@ -60,7 +60,7 @@ description: Architecture scripts — import analysis, violation detection, code ## Rules - Architecture scripts must not modify code without explicit `--fix` or `--apply` flag. -- Analysis output must go to `.sisyphus/reports/` using artifact naming contract. +- Analysis output must go to `.reports/` using artifact naming contract. - Standard quality gates run via Make verbs (`make check`, `make validate`); architecture scripts are implementation details behind Make. - Cross-project tests run via `make test` (or `make test FAIL_FAST=1` to stop on first failure). @@ -94,7 +94,7 @@ Why good: Canonical Make contract, consistent with CLAUDE.md. Good (internal — architecture analysis scripts behind Make): ```bash -python scripts/architecture/analyze_violations.py --output .sisyphus/reports/scripts-architecture--json--violations-latest.json +python scripts/architecture/analyze_violations.py --output .reports/scripts-architecture--json--violations-latest.json ``` Why acceptable: Direct script invocation for detailed architecture analysis. Make verbs are the recommended workflow for standard gates. diff --git a/.claude/skills/scripts-dependencies/SKILL.md b/.claude/skills/scripts-dependencies/SKILL.md index 9acd41ca3..81718a94d 100644 --- a/.claude/skills/scripts-dependencies/SKILL.md +++ b/.claude/skills/scripts-dependencies/SKILL.md @@ -70,7 +70,7 @@ make check PROJECT=flext-core # verify after dependency changes Good (internal — dependency analysis scripts): ```bash -python scripts/dependencies/analyze_dependencies.py --output .sisyphus/reports/scripts-dependencies--json--analysis-latest.json +python scripts/dependencies/analyze_dependencies.py --output .reports/scripts-dependencies--json--analysis-latest.json ``` Why good: Make verbs for standard workflow; artifact naming and structured output for detailed analysis. diff --git a/.claude/skills/scripts-infra/SKILL.md b/.claude/skills/scripts-infra/SKILL.md index 5d0543316..62f76f724 100644 --- a/.claude/skills/scripts-infra/SKILL.md +++ b/.claude/skills/scripts-infra/SKILL.md @@ -103,7 +103,7 @@ Why good: Uses shared lib for artifact naming, deterministic path construction. Bad: ```bash -REPORT=".sisyphus/reports/my_report.json" +REPORT=".reports/my_report.json" ``` Why bad: Hardcoded path bypasses artifact naming contract. diff --git a/.claude/skills/scripts-infra/validate_artifact_naming.py b/.claude/skills/scripts-infra/validate_artifact_naming.py index 893b1bdbc..46193a1fd 100644 --- a/.claude/skills/scripts-infra/validate_artifact_naming.py +++ b/.claude/skills/scripts-infra/validate_artifact_naming.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Owner-Skill: .claude/skills/scripts-infra/SKILL.md -"""Validate script-generated artifact naming under .sisyphus/.""" +"""Validate script-generated artifact naming under .reports/.""" from __future__ import annotations @@ -17,8 +17,8 @@ EXIT_INFRA = 3 ARTIFACT_PATTERN = re.compile(r"^[a-z][-a-z0-9]*--[a-z]+--[a-z][-a-z0-9]*\.[a-z]+$") -VALIDATED_TOP_DIRS = {"reports", "baselines"} -SKIPPED_TOP_DIRS = {"evidence", "plans", "drafts"} +VALIDATED_TOP_DIRS = {"."} +SKIPPED_TOP_DIRS = {"evidence", "plans", "drafts", "validation", "dependencies"} SKIPPED_FILES = {".gitkeep"} @@ -53,7 +53,7 @@ def validate_artifact_name(filename: str) -> bool: def parse_args(argv: list[str]) -> argparse.Namespace: parser = argparse.ArgumentParser( description=( - "Validate .sisyphus artifact files follow " + "Validate .reports artifact files follow " "----. naming contract." ), ) @@ -74,33 +74,33 @@ def parse_args(argv: list[str]) -> argparse.Namespace: raise UsageError(msg) from exc -def should_validate(path: Path, sisyphus_root: Path) -> bool: +def should_validate(path: Path, reports_root: Path) -> bool: if not path.is_file(): return False if path.name in SKIPPED_FILES: return False try: - relative = path.relative_to(sisyphus_root) + relative = path.relative_to(reports_root) except ValueError: return False if not relative.parts: return False + if len(relative.parts) == 1: + return True top_dir = relative.parts[0] if top_dir in SKIPPED_TOP_DIRS: return False return top_dir in VALIDATED_TOP_DIRS -def collect_artifacts(sisyphus_root: Path) -> list[Path]: - if not sisyphus_root.exists(): +def collect_artifacts(reports_root: Path) -> list[Path]: + if not reports_root.exists(): return [] return sorted( - path - for path in sisyphus_root.rglob("*") - if should_validate(path, sisyphus_root) + path for path in reports_root.rglob("*") if should_validate(path, reports_root) ) @@ -130,9 +130,9 @@ def suggest_filename(filename: str) -> str: def validate( *, repo_root: Path, - sisyphus_root: Path, + reports_root: Path, ) -> list[NamingViolation]: - artifacts = collect_artifacts(sisyphus_root) + artifacts = collect_artifacts(reports_root) violations: list[NamingViolation] = [] eprint("Artifact Naming Validation") @@ -200,10 +200,10 @@ def run_main(argv: list[str]) -> int: msg = f"--root must point to an existing directory: {repo_root}" raise UsageError(msg) - sisyphus_root = repo_root / ".sisyphus" + reports_root = repo_root / ".reports" report_path = repo_root / ".claude" / "skills" / "scripts-infra" / "report.json" - violations = validate(repo_root=repo_root, sisyphus_root=sisyphus_root) + violations = validate(repo_root=repo_root, reports_root=reports_root) violation_count = len(violations) write_report(report_path, violations) eprint(f"Violations report: {report_path}") diff --git a/.claude/skills/scripts-infra/validate_gate_contract.py b/.claude/skills/scripts-infra/validate_gate_contract.py index e00a02d59..d2224acc5 100644 --- a/.claude/skills/scripts-infra/validate_gate_contract.py +++ b/.claude/skills/scripts-infra/validate_gate_contract.py @@ -19,7 +19,7 @@ r"^# Owner-Skill:\s+(.claude/skills/[a-z0-9][-a-z0-9]*/SKILL\.md)\s*$" ) ARTIFACT_NAME_RE = re.compile(r"[a-z][-a-z0-9]*--[a-z]+--[a-z][-a-z0-9]*\.[a-z]+") -SISYPHUS_PATH_RE = re.compile(r"\.sisyphus/(?:reports|baselines|evidence)/([^\s\"']+)") +REPORTS_PATH_RE = re.compile(r"\.reports/([^\s\"']+)") BASH_EXIT_RE = re.compile(r"^\s*exit\s+(\d+)") INTERACTIVE_PY_RE = re.compile(r"\binput\s*\(") INTERACTIVE_SH_RE = re.compile( @@ -259,7 +259,7 @@ def check_interactive( def check_artifact_naming(content: str) -> list[Violation]: violations: list[Violation] = [] for i, line in enumerate(content.splitlines(), 1): - for match in SISYPHUS_PATH_RE.finditer(line): + for match in REPORTS_PATH_RE.finditer(line): filename = Path(match.group(1)).name if "$" in filename or "*" in filename or "{" in filename: continue diff --git a/.claude/skills/scripts-security/SKILL.md b/.claude/skills/scripts-security/SKILL.md index d37b6ff23..d74441046 100644 --- a/.claude/skills/scripts-security/SKILL.md +++ b/.claude/skills/scripts-security/SKILL.md @@ -35,7 +35,7 @@ description: Security scripts — secrets management, vault operations, and secu - Security scripts must never log or print secrets to stdout/stderr. - All scripts must be non-interactive by default; interactive prompts require `--interactive` flag. - Secrets must be read from environment variables or encrypted vaults, never hardcoded. -- Security audit output must go to `.sisyphus/reports/` using artifact naming contract. +- Security audit output must go to `.reports/` using artifact naming contract. ## Instructions @@ -65,7 +65,7 @@ make check PROJECT=flext-core # all 4 gates including sec Good (internal — security audit scripts): ```bash -python scripts/security/security_audit.py --output .sisyphus/reports/scripts-security--json--audit-latest.json +python scripts/security/security_audit.py --output .reports/scripts-security--json--audit-latest.json ``` Why good: Make verbs for standard security gates; artifact naming for detailed audits. diff --git a/.claude/skills/scripts-testing/SKILL.md b/.claude/skills/scripts-testing/SKILL.md index 186d75ab5..af000cc76 100644 --- a/.claude/skills/scripts-testing/SKILL.md +++ b/.claude/skills/scripts-testing/SKILL.md @@ -39,7 +39,7 @@ description: Testing scripts — pytest runners, test analysis, quality gates, s ## Rules - Test runners must support `--help` and be runnable from repo root. -- Test output must go to stdout; structured reports to `.sisyphus/reports/` via artifact naming. +- Test output must go to stdout; structured reports to `.reports/` via artifact naming. - Stress tests and distributed tests must be explicitly opt-in (not part of quick validation). ## Instructions diff --git a/.claude/skills/workspace-maintenance/SKILL.md b/.claude/skills/workspace-maintenance/SKILL.md index 2d94d92ae..c5434faba 100644 --- a/.claude/skills/workspace-maintenance/SKILL.md +++ b/.claude/skills/workspace-maintenance/SKILL.md @@ -32,7 +32,7 @@ description: Workspace-wide maintenance automation — hygiene checks, dependabo - All checks must be idempotent and safe by default (read-only unless `--apply`). - Mutations (cleanup, lock updates) require explicit `--apply` flag. - Scripts must discover `flext-*` projects with `pyproject.toml` for workspace iteration. -- Reports output to `.sisyphus/reports/workspace-maintenance--json--.json`. +- Reports output to `.reports/workspace-maintenance--json--.json`. - Exit 0 = all checks pass, exit 1 = violations found. - Each script must be standalone (stdlib + PyYAML only, no flext_core imports). @@ -49,7 +49,7 @@ description: Workspace-wide maintenance automation — hygiene checks, dependabo 1. Identify the maintenance concern (hygiene, dependabot, poetry, security). 2. Run standard gates first: `make check` and `make validate`. 3. Run specific maintenance checker with `--help` first, then default (dry-run) mode. -4. Review the JSON report in `.sisyphus/reports/` or the ANSI terminal output. +4. Review the JSON report in `.reports/` or the ANSI terminal output. 5. If fixes are needed, re-run with `--apply` to mutate state. 6. Verify: `make validate VALIDATE_SCOPE=workspace` for workspace-level inventory. diff --git a/.github/ci-template/ci.yml b/.github/ci-template/ci.yml new file mode 100644 index 000000000..ed1db360c --- /dev/null +++ b/.github/ci-template/ci.yml @@ -0,0 +1,60 @@ +name: CI + +on: + pull_request: + push: + branches: + - main + workflow_dispatch: + +permissions: + contents: read + +jobs: + ci: + name: ci + runs-on: ubuntu-latest + timeout-minutes: 120 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.13" + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: "1.24" + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: false + installer-parallel: true + + - name: Install workflow tools + shell: bash + run: | + set -euo pipefail + npm install -g markdownlint-cli + go install github.com/securego/gosec/v2/cmd/gosec@latest + go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest + + - name: Setup + run: make setup + + - name: Check + run: make check + + - name: Test + run: make test diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..c75f22aac --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,47 @@ +name: CI + +on: + pull_request: + push: + branches: + - main + workflow_dispatch: + +permissions: + contents: read + +jobs: + ci: + name: ci + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 + + - name: Setup Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 + with: + python-version: "3.13" + + - name: Install Poetry + uses: snok/install-poetry@76e04a911780d5b312d89783f7b1cd627778900a + with: + virtualenvs-create: false + installer-parallel: true + + - name: Setup (advisory) + continue-on-error: true + run: make setup + + - name: Check (advisory) + continue-on-error: true + run: make check + + - name: Test (advisory) + continue-on-error: true + run: make test + + - name: Validate (advisory) + continue-on-error: true + run: make validate diff --git a/.github/workflows/docs_maintenance.yml b/.github/workflows/docs_maintenance.yml deleted file mode 100644 index d7828412a..000000000 --- a/.github/workflows/docs_maintenance.yml +++ /dev/null @@ -1,239 +0,0 @@ -name: Documentation Maintenance & Quality Assurance - -on: - schedule: - # Run weekly on Sundays at 00:00 UTC - - cron: "0 0 * * 0" - workflow_dispatch: - inputs: - check_external_links: - description: "Check external links (slower)" - required: false - default: false - type: boolean - pull_request: - paths: - - "**/*.md" - - ".github/workflows/docs_maintenance.yml" - -env: - PYTHON_VERSION: "3.13" - -jobs: - docs-audit: - name: Documentation Quality Audit - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - issues: write - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Full history for change tracking - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install requests beautifulsoup4 markdown - - - name: Run documentation audit - id: audit - run: | - make docs DOCS_PHASE=audit - - cp .reports/docs/audit-report.md docs_audit_report.md - - - name: Generate JSON report for metrics - run: | - cp .reports/docs/audit-summary.json docs_audit_report.json - - - name: Generate HTML dashboard - run: | - python - <<'PY' - from pathlib import Path - source = Path(".reports/docs/audit-report.md") - target = Path("docs_audit_report.html") - body = source.read_text(encoding="utf-8") if source.exists() else "No report" - html = "
" + body.replace("&", "&").replace("<", "<").replace(">", ">") + "
\n" - target.write_text(html, encoding="utf-8") - PY - - - name: Upload audit reports as artifacts - uses: actions/upload-artifact@v4 - with: - name: docs-audit-reports - path: | - docs_audit_report.md - docs_audit_report.json - docs_audit_report.html - retention-days: 30 - - - name: Create or update audit issue - if: github.event_name == 'schedule' - uses: actions/github-script@v7 - with: - script: | - const fs = require('fs'); - const reportContent = fs.readFileSync('docs_audit_report.md', 'utf8'); - - // Find existing audit issue - const issues = await github.rest.issues.listForRepo({ - owner: context.repo.owner, - repo: context.repo.repo, - state: 'open', - labels: ['documentation', 'maintenance', 'automated'] - }); - - const existingIssue = issues.data.find( - issue => issue.title.includes('Weekly Documentation Audit') - ); - - const issueBody = `## 📊 Weekly Documentation Audit Report - - ${reportContent} - - --- - - *This issue is automatically generated and updated weekly. Review the findings and address high-priority issues.* - - **Action Items:** - - [ ] Review and fix critical/high severity issues - - [ ] Update stale documentation (>90 days) - - [ ] Resolve broken links and missing images - - [ ] Add alt text to images for accessibility - - **Download Full Reports:** - - [Markdown Report](https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) - - [JSON Report](https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) - - [HTML Dashboard](https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) - `; - - if (existingIssue) { - // Update existing issue - await github.rest.issues.update({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: existingIssue.number, - body: issueBody - }); - } else { - // Create new issue - await github.rest.issues.create({ - owner: context.repo.owner, - repo: context.repo.repo, - title: `📚 Weekly Documentation Audit - ${new Date().toISOString().split('T')[0]}`, - body: issueBody, - labels: ['documentation', 'maintenance', 'automated'] - }); - } - - - name: Comment on PR with audit summary - if: github.event_name == 'pull_request' - uses: actions/github-script@v7 - with: - script: | - const fs = require('fs'); - const reportContent = fs.readFileSync('docs_audit_report.md', 'utf8'); - - // Extract summary statistics from report - const summaryMatch = reportContent.match(/Total Issues Found:\*\* (\d+)/); - const totalIssues = summaryMatch ? summaryMatch[1] : 'unknown'; - - const comment = `## 📊 Documentation Quality Audit - - **Total Issues Found:** ${totalIssues} - -
- View Full Audit Report - - ${reportContent} - -
- - --- - - Download detailed reports from the workflow artifacts. - `; - - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: comment - }); - - link-validation: - name: Validate Links - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install dependencies - run: | - pip install requests beautifulsoup4 - - - name: Validate internal links - run: | - make docs DOCS_PHASE=audit - cp .reports/docs/audit-report.md link_validation.md - - - name: Check for broken links - run: | - if grep -q "broken_link" link_validation.md; then - echo "::error::Broken links found in documentation" - exit 1 - fi - - style-check: - name: Style & Formatting Check - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Check markdown formatting - uses: DavidAnson/markdownlint-action@v1 - with: - config_file: ".markdownlint.json" - files: "**/*.md" - ignore: "node_modules" - - - name: Check for common issues - run: | - if git grep -nE " +$" -- "*.md"; then - echo "::warning::Found trailing whitespace in markdown files" - fi - - if python - <<'PY' - from pathlib import Path - - files = [p for p in Path('.').rglob('*.md') if '.git' not in p.parts] - found = False - for path in files: - text = path.read_text(encoding='utf-8', errors='ignore') - if '\n\n\n' in text: - print(path.as_posix()) - found = True - raise SystemExit(0 if found else 1) - PY - then - echo "::warning::Found multiple consecutive blank lines" - fi diff --git a/.github/workflows/docs_publish.yml b/.github/workflows/docs_publish.yml deleted file mode 100644 index 2d5c5410d..000000000 --- a/.github/workflows/docs_publish.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: Publish Docs Portal - -on: - release: - types: [published] - workflow_dispatch: - inputs: - force: - description: "Force docs publish deployment" - required: true - type: boolean - default: false - -permissions: - contents: read - -jobs: - build: - if: github.event_name == 'release' || (github.event_name == 'workflow_dispatch' && inputs.force == true) - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.13" - - - name: Install docs dependencies - run: | - python -m pip install --upgrade pip - pip install mkdocs - - - name: Build root docs portal - run: | - make docs DOCS_PHASE=build - - - name: Upload pages artifact - uses: actions/upload-pages-artifact@v3 - with: - path: ./.reports/docs/site/ - - deploy: - needs: build - runs-on: ubuntu-latest - permissions: - pages: write - id-token: write - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - steps: - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v4 diff --git a/.github/workflows/flx_comprehensive_tests.yml b/.github/workflows/flx_comprehensive_tests.yml deleted file mode 100644 index 0c9396959..000000000 --- a/.github/workflows/flx_comprehensive_tests.yml +++ /dev/null @@ -1,622 +0,0 @@ -name: FLEXT Comprehensive Tests - -on: - push: - branches: [main, develop, feature/*] - pull_request: - branches: [main, develop] - schedule: - # Run tests daily at 2 AM UTC - - cron: "0 2 * * *" - workflow_dispatch: - inputs: - test_type: - description: "Type of tests to run" - required: true - default: "all" - type: choice - options: - - all - - unit - - integration - - performance - - security - coverage_threshold: - description: "Coverage threshold percentage" - required: false - default: "80" - type: string - -env: - PYTHON_VERSION: "3.13" - POETRY_VERSION: "1.8.3" - COVERAGE_THRESHOLD: ${{ github.event.inputs.coverage_threshold || '80' }} - -jobs: - # ============================================================================ - # SETUP AND VALIDATION - # ============================================================================ - setup: - name: Setup and Validation - runs-on: ubuntu-latest - outputs: - test-matrix: ${{ steps.test-matrix.outputs.matrix }} - projects: ${{ steps.projects.outputs.list }} - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - virtualenvs-create: true - - - name: Cache dependencies - uses: actions/cache@v4 - with: - path: .venv - key: venv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }} - restore-keys: | - venv-${{ runner.os }}-${{ env.PYTHON_VERSION }}- - - - name: Install dependencies - run: | - poetry install --with dev,test - poetry run pip install --upgrade pip - - - name: Validate flext_project structure - run: | - echo "🔍 Validating FLEXT flext_project structure..." - poetry run python -c " - import sys - from pathlib import Path - - required_dirs = [ - 'flext/src', 'flext/tests', - 'flext-database-oracle/src', 'flext-database-oracle/tests', - 'flext-http-oracle-wms/src', 'flext-http-oracle-wms/tests' - ] - - missing = [d for d in required_dirs if not Path(d).exists()] - if missing: - print(f'❌ Missing directories: {missing}') - sys.exit(1) - else: - print('✅ Project structure validated') - " - - - name: Generate test matrix - id: test-matrix - run: | - echo "🎯 Generating test execution matrix..." - - TEST_TYPE="${{ github.event.inputs.test_type || 'all' }}" - - if [ "$TEST_TYPE" = "all" ]; then - CATEGORIES='["unit", "integration", "performance", "security"]' - else - CATEGORIES='["${{ github.event.inputs.test_type }}"]' - fi - - MATRIX=$(cat << EOF - { - "include": [ - { - "flext_project": "flext-core", - "path": "flext/tests", - "categories": $CATEGORIES - }, - { - "flext_project": "flext-database-oracle", - "path": "flext-database-oracle/tests", - "categories": $CATEGORIES - }, - { - "flext_project": "flext-http-oracle-wms", - "path": "flext-http-oracle-wms/tests", - "categories": $CATEGORIES - } - ] - } - EOF - ) - - echo "matrix=$MATRIX" >> $GITHUB_OUTPUT - - - name: Discover projects - id: projects - run: | - PROJECTS=$(find . -name "tests" -type d | grep -E "(flext|dc-)" | head -10 | jq -R -s -c 'split("\n")[:-1]') - echo "list=$PROJECTS" >> $GITHUB_OUTPUT - - # ============================================================================ - # UNIT TESTS - # ============================================================================ - unit-tests: - name: Unit Tests - runs-on: ubuntu-latest - needs: setup - if: ${{ github.event.inputs.test_type == 'unit' || github.event.inputs.test_type == 'all' || github.event.inputs.test_type == '' }} - strategy: - fail-fast: false - matrix: - flext_project: - [ - { name: "flext-core", path: "flext/tests" }, - { - name: "flext-database-oracle", - path: "flext-database-oracle/tests", - }, - { - name: "flext-http-oracle-wms", - path: "flext-http-oracle-wms/tests", - }, - ] - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - - name: Cache dependencies - uses: actions/cache@v4 - with: - path: .venv - key: venv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - - name: Install dependencies - run: poetry install --with dev,test - - - name: Run unit tests - run: | - echo "🧪 Running unit tests for ${{ matrix.flext_project.name }}..." - poetry run python -m pytest ${{ matrix.flext_project.path }} \ - -m "unit" \ - --verbose \ - --tb=short \ - --color=yes \ - --durations=10 \ - --maxfail=5 \ - --cov=flext \ - --cov=wms \ - --cov=db \ - --cov-report=xml:coverage-${{ matrix.flext_project.name }}.xml \ - --cov-report=html:htmlcov-${{ matrix.flext_project.name }} \ - --cov-report=term-missing \ - --junit-xml=junit-${{ matrix.flext_project.name }}.xml \ - --html=report-${{ matrix.flext_project.name }}.html \ - --self-contained-html \ - || true - - - name: Upload test results - uses: actions/upload-artifact@v4 - if: always() - with: - name: unit-test-results-${{ matrix.flext_project.name }} - path: | - junit-*.xml - report-*.html - coverage-*.xml - htmlcov-*/ - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4 - if: always() - with: - file: coverage-${{ matrix.flext_project.name }}.xml - flags: unit,${{ matrix.flext_project.name }} - name: ${{ matrix.flext_project.name }}-unit-coverage - - # ============================================================================ - # INTEGRATION TESTS - # ============================================================================ - integration-tests: - name: Integration Tests - runs-on: ubuntu-latest - needs: setup - if: ${{ github.event.inputs.test_type == 'integration' || github.event.inputs.test_type == 'all' || github.event.inputs.test_type == '' }} - - services: - postgres: - image: postgres:15 - env: - POSTGRES_PASSWORD: ${{ github.run_id }} - POSTGRES_DB: test_db - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - - name: Install dependencies - run: poetry install --with dev,test - - - name: Set up test environment - run: | - echo "🔧 Setting up integration test environment..." - export DATABASE_URL="postgresql://postgres:${{ github.run_id }}@localhost:5432/test_db" - export TEST_MODE="integration" - - - name: Run integration tests - run: | - echo "🔗 Running integration tests..." - poetry run python -m pytest \ - -m "integration" \ - --verbose \ - --tb=short \ - --color=yes \ - --durations=10 \ - --maxfail=3 \ - --cov=flext \ - --cov-report=xml:coverage-integration.xml \ - --cov-report=term-missing \ - --junit-xml=junit-integration.xml \ - --html=report-integration.html \ - --self-contained-html \ - || true - - - name: Upload integration test results - uses: actions/upload-artifact@v4 - if: always() - with: - name: integration-test-results - path: | - junit-integration.xml - report-integration.html - coverage-integration.xml - - # ============================================================================ - # PERFORMANCE TESTS - # ============================================================================ - performance-tests: - name: Performance Tests - runs-on: ubuntu-latest - needs: setup - if: ${{ github.event.inputs.test_type == 'performance' || github.event.inputs.test_type == 'all' }} - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - - name: Install dependencies - run: poetry install --with dev,test - - - name: Run performance tests - run: | - echo "⚡ Running performance tests..." - poetry run python -m pytest \ - -m "performance" \ - --verbose \ - --tb=short \ - --color=yes \ - --benchmark-only \ - --benchmark-json=benchmark-results.json \ - --junit-xml=junit-performance.xml \ - || true - - - name: Upload performance results - uses: actions/upload-artifact@v4 - if: always() - with: - name: performance-test-results - path: | - benchmark-results.json - junit-performance.xml - - # ============================================================================ - # SECURITY TESTS - # ============================================================================ - security-tests: - name: Security Tests - runs-on: ubuntu-latest - needs: setup - if: ${{ github.event.inputs.test_type == 'security' || github.event.inputs.test_type == 'all' }} - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - - name: Install dependencies - run: poetry install --with dev,test - - - name: Run security scans - run: | - echo "🔒 Running security scans..." - - # Bandit security scan - poetry run bandit -r flext/ -f json -o bandit-report.json || true - - # Safety check for vulnerabilities - poetry run safety check --json --output safety-report.json || true - - # Security-focused tests - poetry run python -m pytest \ - -m "security" \ - --verbose \ - --tb=short \ - --junit-xml=junit-security.xml \ - || true - - - name: Upload security results - uses: actions/upload-artifact@v4 - if: always() - with: - name: security-test-results - path: | - bandit-report.json - safety-report.json - junit-security.xml - - # ============================================================================ - # CODE QUALITY CHECKS - # ============================================================================ - quality-checks: - name: Code Quality - runs-on: ubuntu-latest - needs: setup - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - - name: Install dependencies - run: poetry install --with dev,test - - - name: Run linting - run: | - echo "🔍 Running code quality checks..." - - - poetry run ruff check . --output-format=json --output-file=ruff-report.json || true - - - poetry run pyrefly flext/ --json-report pyrefly-report || true - - # Black formatting check - poetry run black --check --diff flext/ || true - - - name: Upload quality reports - uses: actions/upload-artifact@v4 - if: always() - with: - name: quality-check-results - path: | - ruff-report.json - pyrefly-report/ - - # ============================================================================ - # DEPLOYMENT TESTS - # ============================================================================ - deployment-tests: - name: Deployment Tests - runs-on: ubuntu-latest - needs: [unit-tests, integration-tests] - if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop' }} - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - - name: Install dependencies - run: poetry install --with dev,test - - - name: Test package build - run: | - echo "📦 Testing package builds..." - - # Test FLEXT core build - cd flext && poetry build && cd .. - - # Test client packages build - cd flext-database-oracle && poetry build && cd .. - cd flext-http-oracle-wms && poetry build && cd .. - - - name: Test installation - run: | - echo "💿 Testing package installation..." - - # Create fresh virtual environment - python -m venv test-env - source test-env/bin/activate - - # Install built packages - pip install flext/dist/*.whl - pip install flext-database-oracle/dist/*.whl - pip install flext-http-oracle-wms/dist/*.whl - - # Test imports - python -c "import flext; print('✅ FLEXT core imported successfully')" - python -c "import flext_database_oracle; print('✅ FLEXT Oracle imported successfully')" || true - python -c "import flext_http_oracle_wms; print('✅ FLEXT WMS imported successfully')" || true - - # ============================================================================ - # COMPREHENSIVE REPORT - # ============================================================================ - comprehensive-report: - name: Generate Comprehensive Report - runs-on: ubuntu-latest - needs: - [ - unit-tests, - integration-tests, - performance-tests, - security-tests, - quality-checks, - ] - if: always() - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Download all artifacts - uses: actions/download-artifact@v4 - with: - path: test-results/ - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - - name: Install dependencies - run: poetry install --with dev,test - - - name: Generate comprehensive report - run: | - echo "📊 Generating comprehensive test report..." - poetry run python scripts/testing/run_all_tests.py || true - - - name: Upload comprehensive report - uses: actions/upload-artifact@v4 - if: always() - with: - name: comprehensive-test-report - path: | - reports/ - junit/ - test-results/ - - - name: Publish test results - uses: dorny/test-reporter@v1 - if: always() - with: - name: FLEXT Test Results - path: "test-results/**/*.xml" - reporter: java-junit - - - name: Comment PR with results - if: github.event_name == 'pull_request' - uses: actions/github-script@v7 - with: - script: | - const fs = require('fs'); - const path = require('path'); - - // Read test results and generate comment - let comment = '## 🧪 FLEXT Test Results\n\n'; - - // Add summary - comment += '### Summary\n'; - comment += '- ✅ Unit Tests: Completed\n'; - comment += '- 🔗 Integration Tests: Completed\n'; - comment += '- ⚡ Performance Tests: Completed\n'; - comment += '- 🔒 Security Tests: Completed\n'; - comment += '- 🔍 Quality Checks: Completed\n\n'; - - comment += '### Coverage\n'; - comment += `Target: ${process.env.COVERAGE_THRESHOLD}%\n\n`; - - comment += '### Artifacts\n'; - comment += '- 📊 [Comprehensive Report](../actions/runs/${{ github.run_id }})\n'; - comment += '- 📈 [Coverage Reports](../actions/runs/${{ github.run_id }})\n'; - comment += '- 🔒 [Security Scans](../actions/runs/${{ github.run_id }})\n'; - - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: comment - }); - - # ============================================================================ - # NOTIFICATION - # ============================================================================ - notify: - name: Notify Results - runs-on: ubuntu-latest - needs: [comprehensive-report] - if: always() && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') - - steps: - - name: Notify success - if: ${{ needs.comprehensive-report.result == 'success' }} - run: | - echo "✅ All FLEXT tests completed successfully!" - echo "🎉 Ready for deployment" - - - name: Notify failure - if: ${{ needs.comprehensive-report.result == 'failure' }} - run: | - echo "❌ Some FLEXT tests failed" - echo "🔧 Please review the test results and fix issues" - exit 1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..7ea974c84 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,140 @@ +name: Workspace Release + +on: + push: + tags: + - "v*" + workflow_dispatch: + inputs: + tag: + description: "Release tag (example: v0.10.0)" + required: true + type: string + +permissions: + contents: write + pull-requests: write + +concurrency: + group: release-${{ github.ref_name || inputs.tag }} + cancel-in-progress: false + +jobs: + release: + runs-on: ubuntu-latest + timeout-minutes: 180 + env: + FLEXT_USE_HTTPS: "1" + FLEXT_WORKSPACE_ROOT: ${{ github.workspace }} + steps: + - name: Resolve tag and version + id: release + shell: bash + run: | + set -euo pipefail + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + TAG="${{ inputs.tag }}" + else + TAG="${GITHUB_REF_NAME}" + fi + if [[ ! "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "Invalid release tag: $TAG" >&2 + exit 1 + fi + VERSION="${TAG#v}" + echo "tag=$TAG" >> "$GITHUB_OUTPUT" + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + submodules: recursive + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.13" + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: false + installer-parallel: true + + - name: Install system dependencies + shell: bash + run: | + set -euo pipefail + sudo apt-get update + sudo apt-get install -y libldap2-dev libsasl2-dev libssl-dev + + - name: Setup workspace + run: make setup + + - name: Run release-ci pipeline + run: | + make release-ci \ + RELEASE_PHASE=validate,version,build,publish \ + VERSION="${{ steps.release.outputs.version }}" \ + TAG="${{ steps.release.outputs.tag }}" \ + INTERACTIVE=0 \ + CREATE_BRANCHES=0 \ + PUSH=0 + + - name: Create or update GitHub Release + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + set -euo pipefail + TAG="${{ steps.release.outputs.tag }}" + NOTES=".reports/release/${TAG}/RELEASE_NOTES.md" + REPORT=".reports/release/v${{ steps.release.outputs.version }}/build-report.json" + + if gh release view "$TAG" >/dev/null 2>&1; then + gh release edit "$TAG" --notes-file "$NOTES" + gh release upload "$TAG" "$REPORT" --clobber + else + gh release create "$TAG" \ + "$REPORT" \ + --title "Release $TAG" \ + --notes-file "$NOTES" + fi + + - name: Open or update release PR to main + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + set -euo pipefail + TAG="${{ steps.release.outputs.tag }}" + VERSION="${{ steps.release.outputs.version }}" + BRANCH="release/${VERSION}-sync" + + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + + git fetch origin main --tags + if git merge-base --is-ancestor "$TAG" origin/main; then + echo "tag $TAG already reachable from main" + exit 0 + fi + + git checkout -B "$BRANCH" "$TAG" + if ! git diff --quiet; then + git add docs/CHANGELOG.md docs/releases/latest.md docs/releases/${TAG}.md || true + if ! git diff --cached --quiet; then + git commit -m "docs: close changelog for ${TAG}" + fi + fi + + git push -u origin "$BRANCH" --force-with-lease + + if gh pr view "$BRANCH" >/dev/null 2>&1; then + PR_URL="$(gh pr view "$BRANCH" --json url --jq .url)" + else + PR_URL="$(gh pr create --base main --head "$BRANCH" --title "release: ${TAG}" --body "Automated release merge for ${TAG}.\n\n- release: ${TAG}\n- version: ${VERSION}\n- status: alpha, non-production")" + fi + + gh pr merge "$PR_URL" --auto --squash diff --git a/CLAUDE.md b/CLAUDE.md index 9be915622..1afb93d80 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -286,7 +286,7 @@ Strictness policy: - Validation reports are machine-readable JSON artifacts. - Dependency and typing reports live under `.reports/dependencies/` (produced by `make upgrade` and `make typings` unless `DEPS_REPORT=0`). Stub supply-chain report: `.reports/validate/stub-supply-chain.json`. -- Workspace validation artifacts (e.g. scripts inventory) may live under `.sisyphus/reports/` when produced by `make validate VALIDATE_SCOPE=workspace`. +- Workspace validation artifacts (e.g. scripts inventory) must live under `.reports/` when produced by `make validate VALIDATE_SCOPE=workspace`. - Skill-local reports remain under `.claude/skills//report.json` and `.claude/skills//fix-report.json`. - Reports must include explicit next actions (`TODO: make PROJECT= `) for every failed gate. diff --git a/Makefile b/Makefile index e683f95b2..38f24acf6 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,14 @@ FAIL_FAST ?= JOBS ?= CHECK_GATES ?= VALIDATE_GATES ?= +RELEASE_PHASE ?= all +INTERACTIVE ?= 1 +DRY_RUN ?= +PUSH ?= +VERSION ?= +TAG ?= +BUMP ?= +CREATE_BRANCHES ?= 1 Q := @ ifdef VERBOSE @@ -113,7 +121,7 @@ if [ -n "$$residual_venvs" ]; then \ fi endef -.PHONY: help setup upgrade check security format docs test validate typings clean +.PHONY: help setup upgrade build check security format docs test validate typings clean release release-ci help: ## Show simple workspace verbs $(Q)echo "FLEXT Workspace" @@ -124,12 +132,15 @@ help: ## Show simple workspace verbs $(Q)echo "Core verbs:" $(Q)echo " setup Install all projects into workspace .venv, then run validate VALIDATE_SCOPE=workspace" $(Q)echo " upgrade Upgrade deps + modernize + dependency report (.reports/dependencies/)" + $(Q)echo " build Build/package all selected projects" $(Q)echo " check Run the 6 lint gates in all projects" $(Q)echo " security Run all security checks in all projects" $(Q)echo " format Run all formatting in all projects" $(Q)echo " docs Build docs in all projects" $(Q)echo " test Run tests only in all projects" $(Q)echo " validate Run validate gates (FIX=1 auto-fix, VALIDATE_SCOPE=workspace for repo-level)" + $(Q)echo " release Interactive workspace release orchestration" + $(Q)echo " release-ci Non-interactive release run for CI/tag workflows" $(Q)echo " typings Stub supply-chain + typing report (PROJECT/PROJECTS to scope)" $(Q)echo " clean Clean all projects" $(Q)echo "" @@ -143,15 +154,24 @@ help: ## Show simple workspace verbs $(Q)echo " VALIDATE_GATES=complexity,docstring Select validate gates (default: all)" $(Q)echo " VALIDATE_SCOPE=project|workspace Validate scope (default: project)" $(Q)echo " DOCS_PHASE=audit|fix|build|generate|validate|all" + $(Q)echo " RELEASE_PHASE=validate,version,build,publish|all" + $(Q)echo " INTERACTIVE=1|0 Release prompt mode" + $(Q)echo " DRY_RUN=1 Print plan, do not tag/push" + $(Q)echo " PUSH=1 Push release commit/tag" + $(Q)echo " VERSION=0.10.0 TAG=v0.10.0 BUMP=patch Release controls" + $(Q)echo " CREATE_BRANCHES=1|0 Create release branches in workspace + projects" $(Q)echo " DEPS_REPORT=0 Skip dependency report after upgrade/typings" $(Q)echo "" $(Q)echo "Examples:" $(Q)echo " make check PROJECT=flext-core" + $(Q)echo " make build" $(Q)echo " make typings PROJECT=flext-api" $(Q)echo " make check CHECK_GATES=lint,type" $(Q)echo " make validate PROJECTS=\"flext-core flext-api\" FIX=1" $(Q)echo " make test PROJECT=flext-api PYTEST_ARGS=\"-k unit\" FAIL_FAST=1" $(Q)echo " make validate VALIDATE_SCOPE=workspace" + $(Q)echo " make release BUMP=minor" + $(Q)echo " make release-ci VERSION=0.10.0 TAG=v0.10.0 RELEASE_PHASE=all" $(Q)echo " NOTE: External projects (not in .gitmodules) require manual clone." setup: ## Install all projects into workspace .venv @@ -178,7 +198,7 @@ setup: ## Install all projects into workspace .venv log_file="/tmp/flext-setup-$$proj.log"; \ start_ts=$$(date +%s); \ printf "[%2d/%2d] setup %s\n" $$step $$total_steps "$$proj"; \ - if python scripts/dependencies/sync_internal_deps.py --project-root "$$proj" >>"$$log_file" 2>&1; then \ + if FLEXT_WORKSPACE_ROOT="$(CURDIR)" python scripts/dependencies/sync_internal_deps.py --project-root "$$proj" >>"$$log_file" 2>&1; then \ :; \ else \ echo " sync ... failed"; \ @@ -218,7 +238,7 @@ setup: ## Install all projects into workspace .venv start_ts=$$(date +%s); \ root_lock_ok=0; \ printf "[%2d/%2d] setup %s\n" $$step $$total_steps "root"; \ - if ! python scripts/dependencies/sync_internal_deps.py --project-root . >"$$log_file" 2>&1; then \ + if ! FLEXT_WORKSPACE_ROOT="$(CURDIR)" python scripts/dependencies/sync_internal_deps.py --project-root . >"$$log_file" 2>&1; then \ echo " sync ... failed"; \ cat "$$log_file"; \ failed=$$((failed + 1)); \ @@ -275,7 +295,7 @@ upgrade: ## Upgrade Python dependencies to latest via Poetry log_file="/tmp/flext-upgrade-$$proj.log"; \ start_ts=$$(date +%s); \ printf "[%2d/%2d] upgrade %s\n" $$step $$total_steps "$$proj"; \ - if python scripts/dependencies/sync_internal_deps.py --project-root "$$proj" >>"$$log_file" 2>&1; then \ + if FLEXT_WORKSPACE_ROOT="$(CURDIR)" python scripts/dependencies/sync_internal_deps.py --project-root "$$proj" >>"$$log_file" 2>&1; then \ :; \ else \ echo " sync ... failed"; \ @@ -315,7 +335,7 @@ upgrade: ## Upgrade Python dependencies to latest via Poetry start_ts=$$(date +%s); \ root_update_ok=0; \ printf "[%2d/%2d] upgrade %s\n" $$step $$total_steps "root"; \ - if ! python scripts/dependencies/sync_internal_deps.py --project-root . >"$$log_file" 2>&1; then \ + if ! FLEXT_WORKSPACE_ROOT="$(CURDIR)" python scripts/dependencies/sync_internal_deps.py --project-root . >"$$log_file" 2>&1; then \ echo " sync ... failed"; \ cat "$$log_file"; \ failed=$$((failed + 1)); \ @@ -357,6 +377,8 @@ upgrade: ## Upgrade Python dependencies to latest via Poetry echo "Dependency report (deptry + pip check)..."; \ $(POETRY_ENV) python scripts/dependencies/detect_runtime_dev_deps.py -q --no-fail || true; \ fi + $(Q)echo "Syncing GitHub workflow templates..." + $(Q)$(WORKSPACE_VENV)/bin/python scripts/github/sync_workflows.py --workspace-root "$(CURDIR)" --apply --prune --report .reports/workflows/sync.json check: ## Run lint gates in all projects (CHECK_GATES=lint,format,pyrefly,mypy,pyright,security) $(Q)$(ENSURE_NO_PROJECT_CONFLICT) @@ -370,6 +392,39 @@ check: ## Run lint gates in all projects (CHECK_GATES=lint,format,pyrefly,mypy,p $(if $(CHECK_GATES),--make-arg "CHECK_GATES=$(CHECK_GATES)") \ $(SELECTED_PROJECTS) +build: ## Build/package all selected projects + $(Q)$(ENSURE_NO_PROJECT_CONFLICT) + $(Q)$(ENFORCE_WORKSPACE_VENV) + $(Q)$(ENSURE_SELECTED_PROJECTS) + $(Q)$(ENSURE_PROJECTS_EXIST) + $(Q)$(ORCHESTRATOR) --verb build $(if $(filter 1,$(FAIL_FAST)),--fail-fast) $(SELECTED_PROJECTS) + +release: ## Interactive workspace release orchestration + $(Q)$(ENFORCE_WORKSPACE_VENV) + $(Q)python scripts/release/run.py \ + --root "$(CURDIR)" \ + --phase "$(RELEASE_PHASE)" \ + --interactive "$(INTERACTIVE)" \ + --create-branches "$(CREATE_BRANCHES)" \ + $(if $(DRY_RUN),--dry-run "$(DRY_RUN)",) \ + $(if $(PUSH),--push "$(PUSH)",) \ + $(if $(VERSION),--version "$(VERSION)",) \ + $(if $(TAG),--tag "$(TAG)",) \ + $(if $(BUMP),--bump "$(BUMP)",) + +release-ci: ## Non-interactive release run for CI/tag workflows + $(Q)$(ENFORCE_WORKSPACE_VENV) + $(Q)python scripts/release/run.py \ + --root "$(CURDIR)" \ + --phase "$(RELEASE_PHASE)" \ + --interactive 0 \ + --create-branches 0 \ + $(if $(DRY_RUN),--dry-run "$(DRY_RUN)",) \ + $(if $(PUSH),--push "$(PUSH)",) \ + $(if $(VERSION),--version "$(VERSION)",) \ + $(if $(TAG),--tag "$(TAG)",) \ + $(if $(BUMP),--bump "$(BUMP)",) + security: ## Run all security checks in all projects $(Q)$(ENSURE_NO_PROJECT_CONFLICT) $(Q)$(ENFORCE_WORKSPACE_VENV) @@ -413,10 +468,11 @@ ifeq ($(VALIDATE_SCOPE),workspace) $(Q)$(ENFORCE_WORKSPACE_VENV) $(Q)$(AUTO_SYNC_ALL_PROJECTS) $(Q)$(AUTO_ADJUST_SELECTED_PROJECTS) - $(Q)mkdir -p .sisyphus/reports + $(Q)mkdir -p .reports $(Q)echo "Running workspace validation (inventory + strict anti-drift gates)..." $(Q)$(WORKSPACE_VENV)/bin/python scripts/core/generate_scripts_inventory.py --root . $(Q)$(WORKSPACE_VENV)/bin/python scripts/core/check_base_mk_sync.py + $(Q)$(WORKSPACE_VENV)/bin/python scripts/github/lint_workflows.py --root . --report .reports/workflows/actionlint.json $(Q)$(WORKSPACE_VENV)/bin/python scripts/core/skill_validate.py --skill scripts-validation --mode strict $(Q)$(WORKSPACE_VENV)/bin/python scripts/core/skill_validate.py --skill rules-github --mode strict $(Q)$(WORKSPACE_VENV)/bin/python scripts/core/skill_validate.py --skill rules-docker --mode strict diff --git a/README.md b/README.md index 3e510b8e8..a32337344 100644 --- a/README.md +++ b/README.md @@ -1,190 +1,61 @@ -# FLEXT - Enterprise Data Integration Platform - - - -- [🚀 Key Features](#-key-features) -- [📦 Ecosystem Overview](#-ecosystem-overview) -- [🏗️ Architecture](#-architecture) -- [🚀 Quick Start](#-quick-start) -- [Installation](#installation) -- [Usage](#usage) - - [Basic Usage: LDIF Processing](#basic-usage-ldif-processing) - - [Railway-Oriented Error Handling](#railway-oriented-error-handling) -- [🛠️ Development](#-development) - - [Prerequisites](#prerequisites) - - [Workflow (Make)](#workflow-make) -- [🤝 Contributing](#-contributing) -- [📄 License](#-license) - - -[![Python 3.13+](https://img.shields.io/badge/python-3.13+-blue.svg)](https://www.python.org/downloads/) -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) - -**FLEXT** is a comprehensive, enterprise-grade data integration platform built with Python 3.13+ and modern architectural patterns. - -**Reviewed**: 2026-02-17 | **Version**: 0.10.0-dev - -Part of the [FLEXT](https://github.com/flext-sh/flext) ecosystem. - -## 🚀 Key Features - -- **Unified API**: Single facade pattern across all libraries with `flext-core` integration. -- **Type Safety**: Full Pydantic v2 integration with comprehensive validation and strict type checking. -- **Enterprise Patterns**: Implements CQRS, Railway-oriented programming, and Dependency Injection. -- **Extensible Architecture**: Plugin system built on robust `flext-core` abstractions. -- **RFC Compliant**: Full RFC 2849/4512 LDIF processing capabilities with quirk handling. -- **Production Ready**: Comprehensive testing, monitoring, and structured logging. - -## 📦 Ecosystem Overview - -FLEXT is composed of specialized libraries designed to work together or independently. - -| Library | Description | -| ------- | ----------- | -| **[flext-core](flext-core/)** | Core framework providing base patterns, dependency injection, and error handling. | -| **[flext-api](flext-api/)** | REST API framework with OpenAPI support and unified HTTP clients. | -| **[flext-auth](flext-auth/)** | Authentication and authorization services supporting multiple providers. | -| **[flext-ldif](flext-ldif/)** | High-performance, RFC-compliant LDIF processing and migration engine. | -| **[flext-ldap](flext-ldap/)** | Universal LDAP client operations and directory management. | -| **[flext-oracle](flext-db-oracle/)** | Enterprise Oracle database integration with SQLAlchemy 2.0. | -| **[flext-grpc](flext-grpc/)** | gRPC services framework for high-performance microservices. | -| **[flext-meltano](flext-meltano/)** | Meltano integration for ELT pipelines and Singer taps/targets. | -| **[flext-web](flext-web/)** | Web application patterns and dashboarding components. | - -## 🏗️ Architecture - -FLEXT is built on a clean architecture foundation: - -- **Clean Architecture**: Clear separation of concerns with dependency inversion. -- **CQRS Pattern**: Command Query Responsibility Segregation for complex business logic. -- **Railway-Oriented Programming**: Functional error handling returning `FlextResult[T]`. -- **Dependency Injection**: `FlextContainer` for managing component lifecycles. - -``` -┌─────────────────────────────────────┐ -│ Application Layer │ -│ - Use Cases & Application Services│ -│ - Command/Query Handlers │ -└─────────────────┬───────────────────┘ - │ -┌─────────────────────────────────────┐ -│ Domain Layer │ -│ - Business Logic & Rules │ -│ - Domain Models & Value Objects │ -└─────────────────┬───────────────────┘ - │ -┌─────────────────────────────────────┐ -│ Infrastructure Layer │ -│ - External Services (DB, LDAP) │ -│ - File System, Network I/O │ -└─────────────────┬───────────────────┘ - │ -┌─────────────────────────────────────┐ -│ Core Layer │ -│ - flext-core Framework │ -│ - Common Patterns & Abstractions │ -└─────────────────────────────────────┘ -``` - -## 🚀 Quick Start - -## Installation - -Install the core framework: - -```bash -pip install flext-core -``` - -Install specific capabilities as needed: - -```bash -pip install flext-ldif flext-api flext-auth -``` - -## Usage - -### Basic Usage: LDIF Processing - -```python -from flext_ldif import FlextLdif - -# Initialize LDIF API -ldif = FlextLdif() - -# Parse LDIF content -ldif_content = """dn: cn=test,dc=example,dc=com -cn: test -sn: user -objectClass: inetOrgPerson""" - -result = ldif.parse(ldif_content) -if result.is_success: - entries = result.unwrap() - print(f"Successfully parsed {len(entries)} LDIF entries") -``` - -### Railway-Oriented Error Handling - -FLEXT uses `FlextResult` for consistent error handling across the ecosystem. - -```python -from flext_core import FlextResult - -def process_data(data: str) -> FlextResult[str]: - if not data: - return FlextResult.failure("Data cannot be empty") - - # Process data... - return FlextResult.success("processed data") - -# Usage -result = process_data("input") -if result.is_success: - print(result.unwrap()) -else: - print(f"Error: {result.error}") -``` - -## 🛠️ Development - -All development and maintenance run via Make from the repository root. No ad-hoc script invocations as the primary workflow. - -### Prerequisites - -- Python 3.13+ -- Poetry (for dependency management) -- Git - -### Workflow (Make) - -```bash -# Clone with submodules -git clone --recursive https://github.com/flext-sh/flext.git -cd flext - -# Setup: single workspace .venv, all projects (optional: PROJECT= or PROJECTS=) -make setup - -# Upgrade deps + dependency report (use DEPS_REPORT=0 to skip report) -make upgrade - -# Quality gates -make check -make test -make validate - -# Typings: stub supply-chain + typing report (optional: PROJECT=, DEPS_REPORT=0) -make typings -``` - -Run **make help** for all verbs and parameters (e.g. `PROJECT=flext-core`, `FAIL_FAST=1`, `FIX=1`). See [CLAUDE.md](CLAUDE.md) for the full automation contract and standard places. - -## 🤝 Contributing - -We welcome contributions! Please see our [Contributing Guide](docs/CONTRIBUTING.md) for details on the development workflow, coding standards, and submission process. - -## 📄 License - -FLEXT is released under the MIT License. See [LICENSE](LICENSE) for details. +# FLEXT + +Portifolio de 33 projetos de integracao de dados, revisados individualmente para orientar decisao tecnica e operacao. + +## O que o repositorio consolida + +- Bases arquiteturais para API, autenticacao, runtime, observabilidade e qualidade. +- Conectores Singer (taps e targets) para LDAP, LDIF, Oracle, OIC e WMS. +- Projetos dbt para publicacao de camada analitica por dominio. +- Solucoes operacionais dedicadas para cenarios de migracao e clientes. + +## Projetos revisados caso a caso + +| Projeto | Papel funcional no ecossistema | +| --- | --- | +| `algar-oud-mig` | Ferramenta operacional para migracao LDAP/LDIF de Oracle Internet Directory (OID) para Oracle Unified Directory (OUD) com execucao por fases. | +| `flexcore` | Runtime hibrido Go/Python para inicializacao de servicos e coordenacao operacional de componentes FLEXT. | +| `flext-api` | Camada de API HTTP para exposicao e consumo de servicos de dados no ecossistema FLEXT. | +| `flext-auth` | Servico de autenticacao e autorizacao para controle de acesso entre APIs, CLIs e componentes FLEXT. | +| `flext-cli` | Framework de linha de comando para construir interfaces operacionais padronizadas no portfolio FLEXT. | +| `flext-core` | Base arquitetural compartilhada do ecossistema, com contratos, utilitarios e padroes transversais. | +| `flext-db-oracle` | Biblioteca de acesso Oracle para leitura, escrita e suporte de persistencia em pipelines de dados. | +| `flext-dbt-ldap` | Projeto dbt para transformar dados LDAP em modelos analiticos operacionais e de auditoria. | +| `flext-dbt-ldif` | Projeto dbt para modelagem analitica de dados extraidos de arquivos LDIF. | +| `flext-dbt-oracle` | Projeto dbt para transformar dados Oracle em estruturas analiticas reutilizaveis. | +| `flext-dbt-oracle-wms` | Projeto dbt especializado na transformacao de dados Oracle WMS para analise operacional logistica. | +| `flext-grpc` | Camada gRPC para comunicacao service-to-service de baixa latencia entre componentes FLEXT. | +| `flext-ldap` | Biblioteca de operacoes LDAP para leitura, escrita e sincronizacao de identidades em diretorios corporativos. | +| `flext-ldif` | Biblioteca para parsing, validacao e transformacao de arquivos LDIF em fluxos de migracao de diretorio. | +| `flext-meltano` | Camada de orquestracao Singer/Meltano para coordenar extracao, carga e transformacao em pipelines FLEXT. | +| `flext-observability` | Componente de observabilidade para metricas, tracing e diagnostico operacional de servicos e pipelines. | +| `flext-oracle-oic` | Biblioteca de integracao com Oracle Integration Cloud para operacoes de conectividade e interoperabilidade. | +| `flext-oracle-wms` | Biblioteca de integracao com Oracle WMS para acesso a dados operacionais de armazem. | +| `flext-plugin` | Sistema de plugins para extensao modular de funcionalidades sem alterar o nucleo da plataforma. | +| `flext-quality` | Camada de validacao tecnica para qualidade, conformidade e seguranca no ecossistema FLEXT. | +| `flext-tap-ldap` | Singer Tap para extracao de dados de diretorios LDAP em pipelines de integracao. | +| `flext-tap-ldif` | Singer Tap para extracao de dados a partir de arquivos LDIF. | +| `flext-tap-oracle` | Singer Tap para extracao de dados de bancos Oracle para pipelines ELT. | +| `flext-tap-oracle-oic` | Singer Tap para extracao de entidades e dados de Oracle Integration Cloud. | +| `flext-tap-oracle-wms` | Singer Tap para extracao de dados operacionais de Oracle Warehouse Management System. | +| `flext-target-ldap` | Singer Target para aplicacao de dados em destinos LDAP. | +| `flext-target-ldif` | Singer Target para materializar saida de pipeline em formato LDIF. | +| `flext-target-oracle` | Singer Target para carga de dados em banco Oracle como destino final de pipeline. | +| `flext-target-oracle-oic` | Singer Target para enviar dados a recursos Oracle Integration Cloud. | +| `flext-target-oracle-wms` | Singer Target para aplicar dados em Oracle WMS como destino operacional. | +| `flext-web` | Camada web para operacao e visualizacao das capacidades do ecossistema FLEXT. | +| `gruponos-meltano-native` | Pipeline ETL Meltano dedicado ao contexto Grupo Nos, com foco operacional em cargas Oracle WMS. | + +## Estado atual do portfolio + +- Qualidade global: **Alpha** +- Uso recomendado: **Nao produtivo** +- Aplicacao permitida: desenvolvimento, POC e homologacao controlada. + +## Diretriz de governanca desta revisao + +Cada README foi tratado individualmente com foco no que o projeto faz, no contexto operacional de uso e no risco atual de adocao. + +## Repositorio oficial + +Codigo-fonte e governanca: [github.com/flext-sh/flext](https://github.com/flext-sh/flext). diff --git a/base.mk b/base.mk index 590288c8b..77541d100 100644 --- a/base.mk +++ b/base.mk @@ -89,8 +89,8 @@ $(LINT_CACHE_DIR): $(Q)mkdir -p $(LINT_CACHE_DIR) # === SIMPLE VERB SURFACE === -.PHONY: help setup check security format docs docs-base docs-sync-scripts test validate clean _preflight -STANDARD_VERBS := setup check security format docs test validate clean +.PHONY: help setup build check security format docs docs-base docs-sync-scripts test validate clean _preflight +STANDARD_VERBS := setup build check security format docs test validate clean $(STANDARD_VERBS): _preflight define ENFORCE_WORKSPACE_VENV @@ -163,6 +163,7 @@ help: ## Show commands $(Q)echo "" $(Q)echo "Core verbs:" $(Q)echo " setup Install dependencies and hooks (with automatic md/go adjustment)" + $(Q)echo " build Build distributable artifacts" $(Q)echo " check Run the 8 lint gates" $(Q)echo " security Run all security checks" $(Q)echo " format Run all formatting (including automatic md/go adjustment)" @@ -188,6 +189,14 @@ setup: ## Complete setup echo "INFO: skipping pre-commit install (no git repository)"; \ fi +build: ## Build distributable artifacts + $(Q)if [ "$(CORE_STACK)" = "go" ]; then \ + mkdir -p .reports/build; \ + go build -o .reports/build/$(PROJECT_NAME) ./...; \ + exit 0; \ + fi + $(Q)$(POETRY) build + check: ## Run lint gates (CHECK_GATES=lint,format,pyrefly,mypy,pyright,security,markdown,go,type to select) $(Q)if [ "$(CORE_STACK)" = "go" ]; then \ gates="$(CHECK_GATES)"; \ diff --git a/docs/scripts/gate-contract.md b/docs/scripts/gate-contract.md index 7dbaa2284..25fcde290 100644 --- a/docs/scripts/gate-contract.md +++ b/docs/scripts/gate-contract.md @@ -111,7 +111,7 @@ this naming convention: |----------|----------------|---------| | `FLEXT_POLICY_MODE` | `--mode` | `baseline` | | `FLEXT_VALIDATION_ROOT` | `--root` | `.` | -| `FLEXT_VALIDATION_REPORT_DIR` | `--report-file` directory | `.sisyphus/reports/validation` | +| `FLEXT_VALIDATION_REPORT_DIR` | `--report-file` directory | `.reports/validation` | CLI flags take precedence over environment variables. @@ -266,7 +266,7 @@ The contract validator (`scripts/core/check_script_gate_contract.py`) verifies: 2. **Shebang line** present (`#!/usr/bin/env bash` or `#!/usr/bin/env python3`). 3. **Exit code hygiene**: bash scripts use only `exit 0`, `exit 1`, `exit 2`, `exit 3`. 4. **No interactive prompts** in default path (unless `--interactive` gated). -5. **Artifact naming**: any `.sisyphus/` paths in the script follow the naming contract. +5. **Artifact naming**: any explicit report paths in scripts must target `.reports/` and follow the naming contract. 6. **Non-empty**: scripts classified as validators/fixers have >= 20 lines of code. Scripts not classified as validators or fixers (libraries, orchestrators) are diff --git a/flexcore b/flexcore index c0d15aae3..7b12d4b7a 160000 --- a/flexcore +++ b/flexcore @@ -1 +1 @@ -Subproject commit c0d15aae3a4742c84f2c3e76ad1bc4559597ff60 +Subproject commit 7b12d4b7a062c04e8e4b803d50b19061e28b4cbb diff --git a/flext-api b/flext-api index 3c37fbcab..37d68b5de 160000 --- a/flext-api +++ b/flext-api @@ -1 +1 @@ -Subproject commit 3c37fbcabb77d0388bf8702de2a94cc82fb42160 +Subproject commit 37d68b5dee4fa1ea5aaf7c72b20b438e946f676c diff --git a/flext-auth b/flext-auth index f42b88f77..566608791 160000 --- a/flext-auth +++ b/flext-auth @@ -1 +1 @@ -Subproject commit f42b88f775f6d66af9d5047aa4a313774429ec24 +Subproject commit 56660879174d07332ff06edde280864e1b7aa33e diff --git a/flext-cli b/flext-cli index 1f53985c7..5e65b95d1 160000 --- a/flext-cli +++ b/flext-cli @@ -1 +1 @@ -Subproject commit 1f53985c7443557df12dbe4e3c137f93838696b6 +Subproject commit 5e65b95d16eff8dbf19a4655263622b982b61d54 diff --git a/flext-core b/flext-core index 8c623357d..a474573ba 160000 --- a/flext-core +++ b/flext-core @@ -1 +1 @@ -Subproject commit 8c623357d342802d10d652e7b0d32530952a2b6e +Subproject commit a474573ba3e518e8e7d1590800ae07ca4e2779da diff --git a/flext-db-oracle b/flext-db-oracle index c969f98f8..0c02a456e 160000 --- a/flext-db-oracle +++ b/flext-db-oracle @@ -1 +1 @@ -Subproject commit c969f98f804848d9474a2d364150c107714ce41d +Subproject commit 0c02a456eeac940679251c934bd858410affb4b7 diff --git a/flext-dbt-ldap b/flext-dbt-ldap index 93e8c96aa..6d5b62e7c 160000 --- a/flext-dbt-ldap +++ b/flext-dbt-ldap @@ -1 +1 @@ -Subproject commit 93e8c96aa428b7fca17b9a512ce3016977c4d132 +Subproject commit 6d5b62e7c0fc01caacbc90c9ed242836c1c0485b diff --git a/flext-dbt-ldif b/flext-dbt-ldif index 4b6861c9d..5ab5a2cb4 160000 --- a/flext-dbt-ldif +++ b/flext-dbt-ldif @@ -1 +1 @@ -Subproject commit 4b6861c9de0e1a21f56a78517ac4665caa360299 +Subproject commit 5ab5a2cb40a26d9afbf1e2713b952c9053b847e7 diff --git a/flext-dbt-oracle b/flext-dbt-oracle index acdb58191..d36174673 160000 --- a/flext-dbt-oracle +++ b/flext-dbt-oracle @@ -1 +1 @@ -Subproject commit acdb58191dabb2b8b90afe15639e316bd14c6e89 +Subproject commit d36174673293bf2748c8d681741523aa10e0aa6c diff --git a/flext-dbt-oracle-wms b/flext-dbt-oracle-wms index 50cba2741..dfa89830a 160000 --- a/flext-dbt-oracle-wms +++ b/flext-dbt-oracle-wms @@ -1 +1 @@ -Subproject commit 50cba2741916e554aaecc8ee5fd214195f12d92d +Subproject commit dfa89830ab65cb622a87b633a7d156a98fd4e964 diff --git a/flext-grpc b/flext-grpc index 96b431dc9..f9d5613e3 160000 --- a/flext-grpc +++ b/flext-grpc @@ -1 +1 @@ -Subproject commit 96b431dc9235b06acb6a11a82e228cbfaa444464 +Subproject commit f9d5613e3dc6ef5969d935063fab9adc52d5399b diff --git a/flext-ldap b/flext-ldap index a58990bfd..36ceb892b 160000 --- a/flext-ldap +++ b/flext-ldap @@ -1 +1 @@ -Subproject commit a58990bfd6c423d791284fac7ef17896874f4548 +Subproject commit 36ceb892b457785465a8d390f067daf52a2baa00 diff --git a/flext-ldif b/flext-ldif index 8d0bc5b1f..291015f3d 160000 --- a/flext-ldif +++ b/flext-ldif @@ -1 +1 @@ -Subproject commit 8d0bc5b1f1961183860c19e2b8f84949f428f78b +Subproject commit 291015f3d02ef45cd36d0cb5b20130461d697b3f diff --git a/flext-meltano b/flext-meltano index 8eef05c93..0a3a4386d 160000 --- a/flext-meltano +++ b/flext-meltano @@ -1 +1 @@ -Subproject commit 8eef05c93514fa6d9ffc132ea502f8108eec38ff +Subproject commit 0a3a4386d5e25d8a2b1f9896db76e3dc25dd2320 diff --git a/flext-observability b/flext-observability index 05625c287..af808de82 160000 --- a/flext-observability +++ b/flext-observability @@ -1 +1 @@ -Subproject commit 05625c28731c9764610e2233f84ac5216e847f78 +Subproject commit af808de828f7fbb29b3023ae122ec9b2122ec210 diff --git a/flext-oracle-oic b/flext-oracle-oic index 872171209..7e6286dd5 160000 --- a/flext-oracle-oic +++ b/flext-oracle-oic @@ -1 +1 @@ -Subproject commit 872171209ae6bd11e6873577fc530108b896a236 +Subproject commit 7e6286dd5f9b279e6fc2b6c8953ef722fc28137b diff --git a/flext-oracle-wms b/flext-oracle-wms index fe3b94c6f..3ef7d7065 160000 --- a/flext-oracle-wms +++ b/flext-oracle-wms @@ -1 +1 @@ -Subproject commit fe3b94c6f87e59fedab28e862897de141aa30888 +Subproject commit 3ef7d7065a8a5f3120caa114c5e0272ecaf1038c diff --git a/flext-plugin b/flext-plugin index d19fed440..9c405c1e7 160000 --- a/flext-plugin +++ b/flext-plugin @@ -1 +1 @@ -Subproject commit d19fed4403e91068d9492788932a591aef67eb11 +Subproject commit 9c405c1e7ff81c88bef1887057c91e4666f268bf diff --git a/flext-quality b/flext-quality index 86e86e5f0..e942a599e 160000 --- a/flext-quality +++ b/flext-quality @@ -1 +1 @@ -Subproject commit 86e86e5f06a7101076f9fd5ba7a9240f041ac36c +Subproject commit e942a599e1da20e37272f75f89504d8d1dc8557a diff --git a/flext-tap-ldap b/flext-tap-ldap index d9b25f6e2..7526648a8 160000 --- a/flext-tap-ldap +++ b/flext-tap-ldap @@ -1 +1 @@ -Subproject commit d9b25f6e2d4094de3a912fa6e98fbe649ef2de50 +Subproject commit 7526648a86c4ec48566a455b863263db8a5bfe3e diff --git a/flext-tap-ldif b/flext-tap-ldif index 8cc3e196b..1e7855c4e 160000 --- a/flext-tap-ldif +++ b/flext-tap-ldif @@ -1 +1 @@ -Subproject commit 8cc3e196b0862e0b90b6ad6b636ba95381fbfaf6 +Subproject commit 1e7855c4ed4a2e7ecd3f1362883fc72837f35bd5 diff --git a/flext-tap-oracle b/flext-tap-oracle index 6eda99113..d6c524993 160000 --- a/flext-tap-oracle +++ b/flext-tap-oracle @@ -1 +1 @@ -Subproject commit 6eda991136cef1828031e77fb3d3418a8039a0d3 +Subproject commit d6c5249938df7419db4c11e0b4a51cbe4de6de8d diff --git a/flext-tap-oracle-oic b/flext-tap-oracle-oic index 5276a875c..c9bc050ef 160000 --- a/flext-tap-oracle-oic +++ b/flext-tap-oracle-oic @@ -1 +1 @@ -Subproject commit 5276a875cbf71147ae1d20075c975ea7ea9374b9 +Subproject commit c9bc050ef90ad765362744826368b32b347f45f2 diff --git a/flext-tap-oracle-wms b/flext-tap-oracle-wms index 7b09b98f0..288b0374c 160000 --- a/flext-tap-oracle-wms +++ b/flext-tap-oracle-wms @@ -1 +1 @@ -Subproject commit 7b09b98f0959a33d23ad8b1f1b319d698540421c +Subproject commit 288b0374c7468b42ca926a715217bd1db0871d6b diff --git a/flext-target-ldap b/flext-target-ldap index 7c3830b50..a65f13fb2 160000 --- a/flext-target-ldap +++ b/flext-target-ldap @@ -1 +1 @@ -Subproject commit 7c3830b501570299b321e163ca0508f9af90ea56 +Subproject commit a65f13fb2e111d4cb977136ac10c62c71d4c169b diff --git a/flext-target-ldif b/flext-target-ldif index 95baa67db..a2e176503 160000 --- a/flext-target-ldif +++ b/flext-target-ldif @@ -1 +1 @@ -Subproject commit 95baa67db5688be6d708a7fa0fcb67920326197e +Subproject commit a2e176503df1321b3f3d4437a6febf6860d93ced diff --git a/flext-target-oracle b/flext-target-oracle index 08c56b133..07228747f 160000 --- a/flext-target-oracle +++ b/flext-target-oracle @@ -1 +1 @@ -Subproject commit 08c56b133364b302b83405b434296c990e5232ab +Subproject commit 07228747ff6c41ebb9eaaea4f25dee51a8b47c72 diff --git a/flext-target-oracle-oic b/flext-target-oracle-oic index ef94e2dce..52514a169 160000 --- a/flext-target-oracle-oic +++ b/flext-target-oracle-oic @@ -1 +1 @@ -Subproject commit ef94e2dced3ef111e8039c0aeca62d8b7e5d232c +Subproject commit 52514a1699da146e03ffcecfcb0bd46b9daedb3d diff --git a/flext-target-oracle-wms b/flext-target-oracle-wms index eddb6571f..c2ec5e67b 160000 --- a/flext-target-oracle-wms +++ b/flext-target-oracle-wms @@ -1 +1 @@ -Subproject commit eddb6571f5a618cacedc89778045e273f2491788 +Subproject commit c2ec5e67bf9b841a915721b810018ec34e730802 diff --git a/flext-web b/flext-web index 04246654f..3b1787fe9 160000 --- a/flext-web +++ b/flext-web @@ -1 +1 @@ -Subproject commit 04246654f89a462dde0f3e167d4eb2a6be0e6975 +Subproject commit 3b1787fe9bc889ead2eb2b85d4736b34f96c6d0c diff --git a/poetry.lock b/poetry.lock index 818fd3db0..a1654ca4d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -355,15 +355,15 @@ cryptography = "*" [[package]] name = "autoflake" -version = "2.3.2" +version = "2.3.3" description = "Removes unused imports and unused variables" optional = true python-versions = ">=3.10" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "autoflake-2.3.2-py3-none-any.whl", hash = "sha256:4270b06ad5eb754d6b1b3cea51f195dab85f35a55afdb05c5d7bc96679dbf866"}, - {file = "autoflake-2.3.2.tar.gz", hash = "sha256:73d3b22bad89034879f7a4871c279c8d189b3f2c0b9d9e274b8e5b468c17f9a0"}, + {file = "autoflake-2.3.3-py3-none-any.whl", hash = "sha256:a51a3412aff16135ee5b3ec25922459fef10c1f23ce6d6c4977188df859e8b53"}, + {file = "autoflake-2.3.3.tar.gz", hash = "sha256:c24809541e23999f7a7b0d2faadf15deb0bc04cdde49728a2fd943a0c8055504"}, ] [package.dependencies] @@ -1656,14 +1656,14 @@ xml-validation = ["lxml (>=4,<7)"] [[package]] name = "cyclopts" -version = "4.5.3" +version = "4.5.4" description = "Intuitive, easy CLIs based on type hints." optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "cyclopts-4.5.3-py3-none-any.whl", hash = "sha256:50af3085bb15d4a6f2582dd383dad5e4ba6a0d4d4c64ee63326d881a752a6919"}, - {file = "cyclopts-4.5.3.tar.gz", hash = "sha256:35fa70971204c450d9668646a6ca372eb5fa3070fbe8dd51c5b4b31e65198f2d"}, + {file = "cyclopts-4.5.4-py3-none-any.whl", hash = "sha256:ad001986ec403ca1dc1ed20375c439d62ac796295ea32b451dfe25d6696bc71a"}, + {file = "cyclopts-4.5.4.tar.gz", hash = "sha256:eed4d6c76d4391aa796d8fcaabd50e5aad7793261792beb19285f62c5c456c8b"}, ] [package.dependencies] @@ -3719,41 +3719,6 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil", "setuptools"] -[[package]] -name = "griffe" -version = "2.0.0" -description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -optional = true -python-versions = ">=3.10" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "griffe-2.0.0-py3-none-any.whl", hash = "sha256:5418081135a391c3e6e757a7f3f156f1a1a746cc7b4023868ff7d5e2f9a980aa"}, -] - -[package.dependencies] -griffecli = "2.0.0" -griffelib = "2.0.0" - -[package.extras] -pypi = ["griffelib[pypi] (==2.0.0)"] - -[[package]] -name = "griffecli" -version = "2.0.0" -description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -optional = true -python-versions = ">=3.10" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "griffecli-2.0.0-py3-none-any.whl", hash = "sha256:9f7cd9ee9b21d55e91689358978d2385ae65c22f307a63fb3269acf3f21e643d"}, -] - -[package.dependencies] -colorama = ">=0.4" -griffelib = "2.0.0" - [[package]] name = "griffelib" version = "2.0.0" @@ -3771,154 +3736,154 @@ pypi = ["pip (>=24.0)", "platformdirs (>=4.2)", "wheel (>=0.42)"] [[package]] name = "grpcio" -version = "1.78.0" +version = "1.78.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5"}, - {file = "grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2"}, - {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d"}, - {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb"}, - {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7"}, - {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec"}, - {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a"}, - {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813"}, - {file = "grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de"}, - {file = "grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf"}, - {file = "grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6"}, - {file = "grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e"}, - {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911"}, - {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e"}, - {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303"}, - {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04"}, - {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec"}, - {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074"}, - {file = "grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856"}, - {file = "grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558"}, - {file = "grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97"}, - {file = "grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e"}, - {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996"}, - {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7"}, - {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9"}, - {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383"}, - {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6"}, - {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce"}, - {file = "grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68"}, - {file = "grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e"}, - {file = "grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b"}, - {file = "grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a"}, - {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84"}, - {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb"}, - {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5"}, - {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9"}, - {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702"}, - {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20"}, - {file = "grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670"}, - {file = "grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4"}, - {file = "grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e"}, - {file = "grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f"}, - {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724"}, - {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b"}, - {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7"}, - {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452"}, - {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127"}, - {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65"}, - {file = "grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c"}, - {file = "grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb"}, - {file = "grpcio-1.78.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:86f85dd7c947baa707078a236288a289044836d4b640962018ceb9cd1f899af5"}, - {file = "grpcio-1.78.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:de8cb00d1483a412a06394b8303feec5dcb3b55f81d83aa216dbb6a0b86a94f5"}, - {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e888474dee2f59ff68130f8a397792d8cb8e17e6b3434339657ba4ee90845a8c"}, - {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:86ce2371bfd7f212cf60d8517e5e854475c2c43ce14aa910e136ace72c6db6c1"}, - {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b0c689c02947d636bc7fab3e30cc3a3445cca99c834dfb77cd4a6cabfc1c5597"}, - {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ce7599575eeb25c0f4dc1be59cada6219f3b56176f799627f44088b21381a28a"}, - {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:684083fd383e9dc04c794adb838d4faea08b291ce81f64ecd08e4577c7398adf"}, - {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ab399ef5e3cd2a721b1038a0f3021001f19c5ab279f145e1146bb0b9f1b2b12c"}, - {file = "grpcio-1.78.0-cp39-cp39-win32.whl", hash = "sha256:f3d6379493e18ad4d39537a82371c5281e153e963cecb13f953ebac155756525"}, - {file = "grpcio-1.78.0-cp39-cp39-win_amd64.whl", hash = "sha256:5361a0630a7fdb58a6a97638ab70e1dae2893c4d08d7aba64ded28bb9e7a29df"}, - {file = "grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5"}, + {file = "grpcio-1.78.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4393bef64cf26dc07cd6f18eaa5170ae4eebaafd4418e7e3a59ca9526a6fa30b"}, + {file = "grpcio-1.78.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:917047c19cd120b40aab9a4b8a22e9ce3562f4a1343c0d62b3cd2d5199da3d67"}, + {file = "grpcio-1.78.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff7de398bb3528d44d17e6913a7cfe639e3b15c65595a71155322df16978c5e1"}, + {file = "grpcio-1.78.1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:15f6e636d1152667ddb4022b37534c161c8477274edb26a0b65b215dd0a81e97"}, + {file = "grpcio-1.78.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:27b5cb669603efb7883a882275db88b6b5d6b6c9f0267d5846ba8699b7ace338"}, + {file = "grpcio-1.78.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:86edb3966778fa05bfdb333688fde5dc9079f9e2a9aa6a5c42e9564b7656ba04"}, + {file = "grpcio-1.78.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:849cc62eb989bc3be5629d4f3acef79be0d0ff15622201ed251a86d17fef6494"}, + {file = "grpcio-1.78.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9a00992d6fafe19d648b9ccb4952200c50d8e36d0cce8cf026c56ed3fdc28465"}, + {file = "grpcio-1.78.1-cp310-cp310-win32.whl", hash = "sha256:f8759a1347f3b4f03d9a9d4ce8f9f31ad5e5d0144ba06ccfb1ffaeb0ba4c1e20"}, + {file = "grpcio-1.78.1-cp310-cp310-win_amd64.whl", hash = "sha256:e840405a3f1249509892be2399f668c59b9d492068a2cf326d661a8c79e5e747"}, + {file = "grpcio-1.78.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:3a8aa79bc6e004394c0abefd4b034c14affda7b66480085d87f5fbadf43b593b"}, + {file = "grpcio-1.78.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8e1fcb419da5811deb47b7749b8049f7c62b993ba17822e3c7231e3e0ba65b79"}, + {file = "grpcio-1.78.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b071dccac245c32cd6b1dd96b722283b855881ca0bf1c685cf843185f5d5d51e"}, + {file = "grpcio-1.78.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:d6fb962947e4fe321eeef3be1ba5ba49d32dea9233c825fcbade8e858c14aaf4"}, + {file = "grpcio-1.78.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6afd191551fd72e632367dfb083e33cd185bf9ead565f2476bba8ab864ae496"}, + {file = "grpcio-1.78.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b2acd83186305c0802dbc4d81ed0ec2f3e8658d7fde97cfba2f78d7372f05b89"}, + {file = "grpcio-1.78.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5380268ab8513445740f1f77bd966d13043d07e2793487e61fd5b5d0935071eb"}, + {file = "grpcio-1.78.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:389b77484959bdaad6a2b7dda44d7d1228381dd669a03f5660392aa0e9385b22"}, + {file = "grpcio-1.78.1-cp311-cp311-win32.whl", hash = "sha256:9dee66d142f4a8cca36b5b98a38f006419138c3c89e72071747f8fca415a6d8f"}, + {file = "grpcio-1.78.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b930cf4f9c4a2262bb3e5d5bc40df426a72538b4f98e46f158b7eb112d2d70"}, + {file = "grpcio-1.78.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:41e4605c923e0e9a84a2718e4948a53a530172bfaf1a6d1ded16ef9c5849fca2"}, + {file = "grpcio-1.78.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:39da1680d260c0c619c3b5fa2dc47480ca24d5704c7a548098bca7de7f5dd17f"}, + {file = "grpcio-1.78.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b5d5881d72a09b8336a8f874784a8eeffacde44a7bc1a148bce5a0243a265ef0"}, + {file = "grpcio-1.78.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:888ceb7821acd925b1c90f0cdceaed1386e69cfe25e496e0771f6c35a156132f"}, + {file = "grpcio-1.78.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8942bdfc143b467c264b048862090c4ba9a0223c52ae28c9ae97754361372e42"}, + {file = "grpcio-1.78.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:716a544969660ed609164aff27b2effd3ff84e54ac81aa4ce77b1607ca917d22"}, + {file = "grpcio-1.78.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d50329b081c223d444751076bb5b389d4f06c2b32d51b31a1e98172e6cecfb9"}, + {file = "grpcio-1.78.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e836778c13ff70edada16567e8da0c431e8818eaae85b80d11c1ba5782eccbb"}, + {file = "grpcio-1.78.1-cp312-cp312-win32.whl", hash = "sha256:07eb016ea7444a22bef465cce045512756956433f54450aeaa0b443b8563b9ca"}, + {file = "grpcio-1.78.1-cp312-cp312-win_amd64.whl", hash = "sha256:02b82dcd2fa580f5e82b4cf62ecde1b3c7cc9ba27b946421200706a6e5acaf85"}, + {file = "grpcio-1.78.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:2b7ad2981550ce999e25ce3f10c8863f718a352a2fd655068d29ea3fd37b4907"}, + {file = "grpcio-1.78.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:409bfe22220889b9906739910a0ee4c197a967c21b8dd14b4b06dd477f8819ce"}, + {file = "grpcio-1.78.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:34b6cb16f4b67eeb5206250dc5b4d5e8e3db939535e58efc330e4c61341554bd"}, + {file = "grpcio-1.78.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:39d21fd30d38a5afb93f0e2e71e2ec2bd894605fb75d41d5a40060c2f98f8d11"}, + {file = "grpcio-1.78.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:09fbd4bcaadb6d8604ed1504b0bdf7ac18e48467e83a9d930a70a7fefa27e862"}, + {file = "grpcio-1.78.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:db681513a1bdd879c0b24a5a6a70398da5eaaba0e077a306410dc6008426847a"}, + {file = "grpcio-1.78.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f81816faa426da461e9a597a178832a351d6f1078102590a4b32c77d251b71eb"}, + {file = "grpcio-1.78.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffbb760df1cd49e0989f9826b2fd48930700db6846ac171eaff404f3cfbe5c28"}, + {file = "grpcio-1.78.1-cp313-cp313-win32.whl", hash = "sha256:1a56bf3ee99af5cf32d469de91bf5de79bdac2e18082b495fc1063ea33f4f2d0"}, + {file = "grpcio-1.78.1-cp313-cp313-win_amd64.whl", hash = "sha256:8991c2add0d8505178ff6c3ae54bd9386279e712be82fa3733c54067aae9eda1"}, + {file = "grpcio-1.78.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:d101fe49b1e0fb4a7aa36ed0c3821a0f67a5956ef572745452d2cd790d723a3f"}, + {file = "grpcio-1.78.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:5ce1855e8cfc217cdf6bcfe0cf046d7cf81ddcc3e6894d6cfd075f87a2d8f460"}, + {file = "grpcio-1.78.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd26048d066b51f39fe9206e2bcc2cea869a5e5b2d13c8d523f4179193047ebd"}, + {file = "grpcio-1.78.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b8d7fda614cf2af0f73bbb042f3b7fee2ecd4aea69ec98dbd903590a1083529"}, + {file = "grpcio-1.78.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:656a5bd142caeb8b1efe1fe0b4434ecc7781f44c97cfc7927f6608627cf178c0"}, + {file = "grpcio-1.78.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:99550e344482e3c21950c034f74668fccf8a546d50c1ecb4f717543bbdc071ba"}, + {file = "grpcio-1.78.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8f27683ca68359bd3f0eb4925824d71e538f84338b3ae337ead2ae43977d7541"}, + {file = "grpcio-1.78.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a40515b69ac50792f9b8ead260f194ba2bb3285375b6c40c7ff938f14c3df17d"}, + {file = "grpcio-1.78.1-cp314-cp314-win32.whl", hash = "sha256:2c473b54ef1618f4fb85e82ff4994de18143b74efc088b91b5a935a3a45042ba"}, + {file = "grpcio-1.78.1-cp314-cp314-win_amd64.whl", hash = "sha256:e2a6b33d1050dce2c6f563c5caf7f7cbeebf7fba8cde37ffe3803d50526900d1"}, + {file = "grpcio-1.78.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:559f58b6823e1abc38f82e157800aff649146f8906f7998c356cd48ae274d512"}, + {file = "grpcio-1.78.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:36aeff5ba8aaf70ceb2cbf6cbba9ad6beef715ad744841f3e0cd977ec02e5966"}, + {file = "grpcio-1.78.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0fa9943d4c7f4a14a9a876153a4e8ee2bb20a410b65c09f31510b2a42271f41b"}, + {file = "grpcio-1.78.1-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:75fa92c47d048d696f12b81a775316fca68385ffc6e6cb1ed1d76c8562579f74"}, + {file = "grpcio-1.78.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ca6aebae928383e971d5eace4f1a217fd7aadaf18d5ddd3163d80354105e9068"}, + {file = "grpcio-1.78.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5572c5dd1e43dbb452b466be9794f77e3502bdb6aa6a1a7feca72c98c5085ca7"}, + {file = "grpcio-1.78.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e49e720cd6b092504ec7bb2f60eb459aaaf4ce0e5fe20521c201b179e93b5d5d"}, + {file = "grpcio-1.78.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ebeec1383aed86530a5f39646984e92d6596c050629982ac54eeb4e2f6ead668"}, + {file = "grpcio-1.78.1-cp39-cp39-win32.whl", hash = "sha256:263307118791bc350f4642749a9c8c2d13fec496228ab11070973e568c256bfd"}, + {file = "grpcio-1.78.1-cp39-cp39-win_amd64.whl", hash = "sha256:13937b28986f45fee342806b07c6344db785ad74a549ebcb00c659142973556f"}, + {file = "grpcio-1.78.1.tar.gz", hash = "sha256:27c625532d33ace45d57e775edf1982e183ff8641c72e4e91ef7ba667a149d72"}, ] [package.dependencies] typing-extensions = ">=4.12,<5.0" [package.extras] -protobuf = ["grpcio-tools (>=1.78.0)"] +protobuf = ["grpcio-tools (>=1.78.1)"] [[package]] name = "grpcio-tools" -version = "1.78.0" +version = "1.78.1" description = "Protobuf code generator for gRPC" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "grpcio_tools-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:ea64e38d1caa2b8468b08cb193f5a091d169b6dbfe1c7dac37d746651ab9d84e"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:4003fcd5cbb5d578b06176fd45883a72a8f9203152149b7c680ce28653ad9e3a"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe6b0081775394c61ec633c9ff5dbc18337100eabb2e946b5c83967fe43b2748"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:7e989ad2cd93db52d7f1a643ecaa156ac55bf0484f1007b485979ce8aef62022"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b874991797e96c41a37e563236c3317ed41b915eff25b292b202d6277d30da85"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8c288b728228377aaf758925692fc6068939d9fa32f92ca13dedcbeb41f33"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:87e648759b06133199f4bc0c0053e3819f4ec3b900dc399e1097b6065db998b5"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f3d3ced52bfe39eba3d24f5a8fab4e12d071959384861b41f0c52ca5399d6920"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-win32.whl", hash = "sha256:4bb6ed690d417b821808796221bde079377dff98fdc850ac157ad2f26cda7a36"}, - {file = "grpcio_tools-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c676d8342fd53bd85a5d5f0d070cd785f93bc040510014708ede6fcb32fada1"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:6a8b8b7b49f319d29dbcf507f62984fa382d1d10437d75c3f26db5f09c4ac0af"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d62cf3b68372b0c6d722a6165db41b976869811abeabc19c8522182978d8db10"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fa9056742efeaf89d5fe14198af71e5cbc4fbf155d547b89507e19d6025906c6"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e3191af125dcb705aa6bc3856ba81ba99b94121c1b6ebee152e66ea084672831"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:283239ddbb67ae83fac111c61b25d8527a1dbd355b377cbc8383b79f1329944d"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ac977508c0db15301ef36d6c79769ec1a6cc4e3bc75735afca7fe7e360cead3a"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4ff605e25652a0bd13aa8a73a09bc48669c68170902f5d2bf1468a57d5e78771"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0197d7b561c79be78ab93d0fe2836c8def470683df594bae3ac89dd8e5c821b2"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-win32.whl", hash = "sha256:28f71f591f7f39555863ced84fcc209cbf4454e85ef957232f43271ee99af577"}, - {file = "grpcio_tools-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a6de495dabf86a3b40b9a7492994e1232b077af9d63080811838b781abbe4e8"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:9eb122da57d4cad7d339fc75483116f0113af99e8d2c67f3ef9cae7501d806e4"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d0c501b8249940b886420e6935045c44cb818fa6f265f4c2b97d5cff9cb5e796"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:77e5aa2d2a7268d55b1b113f958264681ef1994c970f69d48db7d4683d040f57"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:8e3c0b0e6ba5275322ba29a97bf890565a55f129f99a21b121145e9e93a22525"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:975d4cb48694e20ebd78e1643e5f1cd94cdb6a3d38e677a8e84ae43665aa4790"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:553ff18c5d52807dedecf25045ae70bad7a3dbba0b27a9a3cdd9bcf0a1b7baec"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8c7f5e4af5a84d2e96c862b1a65e958a538237e268d5f8203a3a784340975b51"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:96183e2b44afc3f9a761e9d0f985c3b44e03e8bb98e626241a6cbfb3b6f7e88f"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-win32.whl", hash = "sha256:2250e8424c565a88573f7dc10659a0b92802e68c2a1d57e41872c9b88ccea7a6"}, - {file = "grpcio_tools-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:217d1fa29de14d9c567d616ead7cb0fef33cde36010edff5a9390b00d52e5094"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2d6de1cc23bdc1baafc23e201b1e48c617b8c1418b4d8e34cebf72141676e5fb"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2afeaad88040894c76656202ff832cb151bceb05c0e6907e539d129188b1e456"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:33cc593735c93c03d63efe7a8ba25f3c66f16c52f0651910712490244facad72"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:2921d7989c4d83b71f03130ab415fa4d66e6693b8b8a1fcbb7a1c67cff19b812"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6a0df438e82c804c7b95e3f311c97c2f876dcc36376488d5b736b7bcf5a9b45"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e9c6070a9500798225191ef25d0055a15d2c01c9c8f2ee7b681fffa99c98c822"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:394e8b57d85370a62e5b0a4d64c96fcf7568345c345d8590c821814d227ecf1d"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3ef700293ab375e111a2909d87434ed0a0b086adf0ce67a8d9cf12ea7765e63"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-win32.whl", hash = "sha256:6993b960fec43a8d840ee5dc20247ef206c1a19587ea49fe5e6cc3d2a09c1585"}, - {file = "grpcio_tools-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:275ce3c2978842a8cf9dd88dce954e836e590cf7029649ad5d1145b779039ed5"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:8b080d0d072e6032708a3a91731b808074d7ab02ca8fb9847b6a011fdce64cd9"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8c0ad8f8f133145cd7008b49cb611a5c6a9d89ab276c28afa17050516e801f79"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2f8ea092a7de74c6359335d36f0674d939a3c7e1a550f4c2c9e80e0226de8fe4"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:da422985e0cac822b41822f43429c19ecb27c81ffe3126d0b74e77edec452608"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4fab1faa3fbcb246263e68da7a8177d73772283f9db063fb8008517480888d26"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dd9c094f73f734becae3f20f27d4944d3cd8fb68db7338ee6c58e62fc5c3d99f"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:2ed51ce6b833068f6c580b73193fc2ec16468e6bc18354bc2f83a58721195a58"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:05803a5cdafe77c8bdf36aa660ad7a6a1d9e49bc59ce45c1bade2a4698826599"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-win32.whl", hash = "sha256:f7c722e9ce6f11149ac5bddd5056e70aaccfd8168e74e9d34d8b8b588c3f5c7c"}, - {file = "grpcio_tools-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:7d58ade518b546120ec8f0a8e006fc8076ae5df151250ebd7e82e9b5e152c229"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:30b1eef2afb6f2c3deb94525d60aedfea807d4937b5e23ad72600e3f8cd1c768"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:c70b07b2610db3743d831700301eb17a9e1de2818d1f36ad53cb5b8b593a5749"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f6d53392eb0f758eaa9ecfa6f9aab1e1f3c9db117a4242c802a30363fdc404d2"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:638fa11b4731dce2c662f685c3be0489246e8d2306654eb26ebd71e6a24c4b70"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21b31c87cef35af124f1cfb105614725b462656d2684f59d05a6210266b17b9e"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b81b4cf356272512172a604d4467af9b373de69cd69e1ac163fb41f7dac33099"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c8ceb32cd818e40739529b3c3143a30c899c247db22a6275c4798dece9a4ae7"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1872d01f984c85ee49ce581fcaffbcc9c792692b4b5ebf9bba4358fc895c316a"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-win32.whl", hash = "sha256:4eff49de5f8f320ed2a69bbb6bfe512175b1762d736cfce28aca0129939f7252"}, - {file = "grpcio_tools-1.78.0-cp39-cp39-win_amd64.whl", hash = "sha256:6ddf7e7a7d069e7287b9cb68937102efe1686e63117a162d01578ac2839b4acd"}, - {file = "grpcio_tools-1.78.0.tar.gz", hash = "sha256:4b0dd86560274316e155d925158276f8564508193088bc43e20d3f5dff956b2b"}, -] - -[package.dependencies] -grpcio = ">=1.78.0" + {file = "grpcio_tools-1.78.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:ec86147000d713bcf5116350607b16b488432fcae89e7fbb6ac4d388c241273b"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:ecb698ba221b279356590d65e456d2a3ba63b1668515c85c5a340bf98399acb7"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:77b8f61e5b6b774521875595d5f978dbd534086bc39205126345c7459cf18a44"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:6080c1541487071c6e2763be5ffee452139a919dc5fc9e0eaeca9737af913337"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:086cda613dc3a5b58ebd0852273fa76498d61e5296710654d66861309ea30faa"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:35668bc67bb5600d3f72e9cfbbe15a2ad2f616013b0598877a06396e7de3fa2f"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:63d578f37a6ccad7f61b1da29b219005874c097664a78967f8b60637f6f3f567"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c05507d90035e8c0b9617d2ff5c888b7e93e47c111e7880d8a2d190ca5734622"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-win32.whl", hash = "sha256:c7a33d981d33b54183e2fa872a4abea632396ef824ca60c268ce50e2fdb9d930"}, + {file = "grpcio_tools-1.78.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e8cdf8f75d24a70511b3db9e4e09cd7a4420ff1a3707e30cefc08f4be189e1f"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:ec4483749c7174c301a554191f6a9b28e2388636736a21886fe20025137cdaa5"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a81b30b0981cc64853bf28daa4d45f2ce8e4da47d831186a509c05660f23b133"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d6406f04b93e48ae3b4dca8f9f312f345265502dc54408056796813c1877f98a"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f46fa1430958fe93082d361711e261a482d5a505a9928bc28f7df3fb432d7203"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a5fbe7d04212248a94acfea86460f1e249f0e42b636de4e71ad518aaf7b24cc9"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e886a3f3284fbff5b4a5c0299427b42df1e1ad6ec9c88c41cfe94557ac191a34"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e1e19c3cb8c4bbfcc20c74b6ef50bd2fb18f82593e65c5b031a92f6794ab9a6d"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b0abde2cd28a5925da36776977064e0fe9be667a96ea454acad1eabc3eb7ec48"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-win32.whl", hash = "sha256:a62857bdd681469f7ea603078187399aa8bd8cd7bdeeb603497c993a06d0bb8d"}, + {file = "grpcio_tools-1.78.1-cp311-cp311-win_amd64.whl", hash = "sha256:e33de930d02e16d28a2e06d2a629cd5be18c0f386e8bc6c483b073f8898c283c"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:2fd5b9ba19849afb511f05f9eaf621aaf21d8582b06d23179b31fb72f2b0add1"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3e2148b0b15dea87d2fea17d1eda3ae0cdc6dd378fe75903f17515cbb6e5f4a3"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58714482282ba4f6ebe550a43284b3383761e7bf1c1cafa009740d4b20cfc5fd"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3edc65d8d547e2c3e90937896bce58f1a4187b45a5ac2d97c84d0501c917c6e7"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa5720e07b81e82107c33f1951572f4371b668933da110418146e8fe51813ec"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d665399893f79dfce1018143602b1e53cc6434cb919b141ad5ce9d09d25b6c88"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3ab437967bd61034b278ca1043a5f2f70ab3a8b45f2531b4295ffc7da27893c9"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:63c91efb22a6977111bbde16f58e393ab75f1f4ff95850abc24fd279402a02f7"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-win32.whl", hash = "sha256:7ecc57c2a82a7f67d07c1491eea39aec9660306a8b67b7b0116ade52c3466297"}, + {file = "grpcio_tools-1.78.1-cp312-cp312-win_amd64.whl", hash = "sha256:7e465bf6e49c8d3905997b079d4cab233cd1e0ad558aa3b93ce074172ad75fa1"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:3ad2cfae254f965776e296635d0ef96bdb2e6fde54c3d8e0f1ed98161ec00a8f"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:b5d4c75fa44d560e694b65b19df3d7e73d89c2bf9e2d7b672a9e650f40ca33df"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:63e87dd399a4071c0cfdf131cf382a7c3859f2bee9cff8ec996dd8dea3e3afbb"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08704fd6df74dd95c28a2c095f59e10aec61abe64e2c44f1109d725f728688ba"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e53faada7c186ae5a46b236a4961284c45f9eb069888c651021346f9360d58e0"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:99479dfa64faa8ed887df22a1489e6bd4027e38efdad7de9fdc6038e67569f0a"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e0335ba2e6b903b9156151a49d03e74d2876259d5233ac97de53b4c847a56000"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d54640c46d496ed9367caaa36a5742adca9b215ea06cf6714dcf1aa190a43b6d"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-win32.whl", hash = "sha256:df604903f86adae37eb90f4168db13090f723b3602bac89519aff451aea46ea3"}, + {file = "grpcio_tools-1.78.1-cp313-cp313-win_amd64.whl", hash = "sha256:7f4469a91556442330aad0710ffc16a853681e1aa7c0752b2db2e8255c872897"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:11c6a338c227e5aab76954f35959682d59c432a5b6d7db053fa1a99c7124bbde"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:090aeaa053f728539d0f84658bb5d88411a913cbcc49e990b5a80acd3c46dc94"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:203347a50b00e6a1793c35af437a39449b247b9461a9f1f9b9baf954b4255cd8"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:7c3cef48b10cccfc039b5ae054d7ad8d7b907ff03a283b606b3999ce3843b5a5"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:abb2aee19b91d619670a3598faaa8036b31dd96708ab82d8fb990da4b5c3fc01"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b6307ce936cd5f7714bba75e8b7c71f4e6a4da625b907960227568022ee812fa"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:40aad3da94bf261792ff998084117f6ce092b7b137dcea257628def834b91e96"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:36dbd00415376a3db03cd57a8063dfb5506c3ec69737945488f6c28a3e8b5cf1"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-win32.whl", hash = "sha256:6d284037ff456842324fa12b0a6455fce0b3ab92f218677b34c33cf4787a54c4"}, + {file = "grpcio_tools-1.78.1-cp314-cp314-win_amd64.whl", hash = "sha256:acb9849783dc7cf0e7359cbd60c6bf3154008bf9aeff12c696ec7289599eb3a8"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d10c27480bf3bf729de01979f3569aa128f5e5685c4cbed69c9bdbb200d62bcc"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:fb484640c759ab94d68f8944e00db94e590714e3d5a5a492816e19e3e8e25334"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ce974c88b36f3354574b77df26c9530efd90476f2bb7f71a2bc21096c235d931"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:7c14f58e9d4fc0fefe215862b52bcc79fefc1085e33d938b5070663985bed8e2"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:eb7ed3e69e6cbc13a10c1651bc59c8255cef56b35b2248211c341a0dd84d80fe"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f617dd5a74fcd15f333254edb55695ea439dfdf00a0a3fe157e020be13572bca"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a8b7119294179b409cd78a072d93c233533cc12067be16dca4398e1f20627289"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cd0aec543a809ab8eeeec35c66f7b86be01d669799ffac7e6c2cca5644e996c7"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-win32.whl", hash = "sha256:8736a6e33f077183593ade99064d7e7e6d77874ef0d6001f214ba909e4112c5e"}, + {file = "grpcio_tools-1.78.1-cp39-cp39-win_amd64.whl", hash = "sha256:e84f4a25e99fe5c6223142cae0ad18224759b4d11bd1ba35f47532f2c11a3cce"}, + {file = "grpcio_tools-1.78.1.tar.gz", hash = "sha256:f47b746b06a940954b9aa86b1824aa4874f068a7ec2d4b407980d202c86a691a"}, +] + +[package.dependencies] +grpcio = ">=1.78.1" protobuf = ">=6.31.1,<7.0.0" setuptools = ">=77.0.1" @@ -4220,20 +4185,19 @@ files = [ [[package]] name = "isort" -version = "7.0.0" +version = "8.0.0" description = "A Python utility / library to sort Python imports." optional = true python-versions = ">=3.10.0" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1"}, - {file = "isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187"}, + {file = "isort-8.0.0-py3-none-any.whl", hash = "sha256:184916a933041c7cf718787f7e52064f3c06272aff69a5cb4dc46497bd8911d9"}, + {file = "isort-8.0.0.tar.gz", hash = "sha256:fddea59202f231e170e52e71e3510b99c373b6e571b55d9c7b31b679c0fed47c"}, ] [package.extras] colors = ["colorama"] -plugins = ["setuptools"] [[package]] name = "itsdangerous" @@ -4476,21 +4440,23 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-path" -version = "0.3.4" +version = "0.4.1" description = "JSONSchema Spec with object-oriented paths" optional = false -python-versions = "<4.0.0,>=3.8.0" +python-versions = "<4.0.0,>=3.10" groups = ["main"] files = [ - {file = "jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8"}, - {file = "jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001"}, + {file = "jsonschema_path-0.4.1-py3-none-any.whl", hash = "sha256:727d8714158c41327908677e6119f9db9d5e0f486d4cc79ca4b4016eee2f33e8"}, + {file = "jsonschema_path-0.4.1.tar.gz", hash = "sha256:ffca3bd37f66364ae3afeaa2804d6078a9ab3b9359ade4dd9923aabbbd475e71"}, ] [package.dependencies] -pathable = ">=0.4.1,<0.5.0" +pathable = ">=0.5.0,<0.6.0" PyYAML = ">=5.1" -referencing = "<0.37.0" -requests = ">=2.31.0,<3.0.0" +referencing = "<0.38.0" + +[package.extras] +requests = ["requests (>=2.31.0,<3.0.0)"] [[package]] name = "jsonschema-specifications" @@ -5795,19 +5761,19 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "2.0.2" +version = "2.0.3" description = "A Python handler for mkdocstrings." optional = true python-versions = ">=3.10" groups = ["main"] markers = "extra == \"docs\"" files = [ - {file = "mkdocstrings_python-2.0.2-py3-none-any.whl", hash = "sha256:31241c0f43d85a69306d704d5725786015510ea3f3c4bdfdb5a5731d83cdc2b0"}, - {file = "mkdocstrings_python-2.0.2.tar.gz", hash = "sha256:4a32ccfc4b8d29639864698e81cfeb04137bce76bb9f3c251040f55d4b6e1ad8"}, + {file = "mkdocstrings_python-2.0.3-py3-none-any.whl", hash = "sha256:0b83513478bdfd803ff05aa43e9b1fca9dd22bcd9471f09ca6257f009bc5ee12"}, + {file = "mkdocstrings_python-2.0.3.tar.gz", hash = "sha256:c518632751cc869439b31c9d3177678ad2bfa5c21b79b863956ad68fc92c13b8"}, ] [package.dependencies] -griffe = ">=1.13" +griffelib = ">=2.0" mkdocs-autorefs = ">=1.4" mkdocstrings = ">=0.30" @@ -6756,14 +6722,14 @@ testing = ["docopt", "pytest"] [[package]] name = "pathable" -version = "0.4.4" +version = "0.5.0" description = "Object-oriented paths" optional = false -python-versions = "<4.0.0,>=3.7.0" +python-versions = "<4.0,>=3.10" groups = ["main"] files = [ - {file = "pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2"}, - {file = "pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2"}, + {file = "pathable-0.5.0-py3-none-any.whl", hash = "sha256:646e3d09491a6351a0c82632a09c02cdf70a252e73196b36d8a15ba0a114f0a6"}, + {file = "pathable-0.5.0.tar.gz", hash = "sha256:d81938348a1cacb525e7c75166270644782c0fb9c8cecc16be033e71427e0ef1"}, ] [[package]] @@ -7790,22 +7756,22 @@ tests = ["coverage[toml] (==7.10.7)", "pytest (>=8.4.2,<9.0.0)"] [[package]] name = "pylint" -version = "4.0.4" +version = "4.0.5" description = "python code static checker" optional = true python-versions = ">=3.10.0" groups = ["main"] markers = "extra == \"dev\"" files = [ - {file = "pylint-4.0.4-py3-none-any.whl", hash = "sha256:63e06a37d5922555ee2c20963eb42559918c20bd2b21244e4ef426e7c43b92e0"}, - {file = "pylint-4.0.4.tar.gz", hash = "sha256:d9b71674e19b1c36d79265b5887bf8e55278cbe236c9e95d22dc82cf044fdbd2"}, + {file = "pylint-4.0.5-py3-none-any.whl", hash = "sha256:00f51c9b14a3b3ae08cff6b2cdd43f28165c78b165b628692e428fb1f8dc2cf2"}, + {file = "pylint-4.0.5.tar.gz", hash = "sha256:8cd6a618df75deb013bd7eb98327a95f02a6fb839205a6bbf5456ef96afb317c"}, ] [package.dependencies] astroid = ">=4.0.2,<=4.1.dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} -isort = ">=5,<5.13 || >5.13,<8" +isort = ">=5,<5.13 || >5.13,<9" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2" tomlkit = ">=0.10.1" @@ -9605,15 +9571,15 @@ full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart [[package]] name = "stevedore" -version = "5.6.0" +version = "5.7.0" description = "Manage dynamic plugins for Python applications" optional = true python-versions = ">=3.10" groups = ["main"] markers = "extra == \"security\"" files = [ - {file = "stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820"}, - {file = "stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945"}, + {file = "stevedore-5.7.0-py3-none-any.whl", hash = "sha256:fd25efbb32f1abb4c9e502f385f0018632baac11f9ee5d1b70f88cc5e22ad4ed"}, + {file = "stevedore-5.7.0.tar.gz", hash = "sha256:31dd6fe6b3cbe921e21dcefabc9a5f1cf848cf538a1f27543721b8ca09948aa3"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 5084eeab5..694225e57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ name = "flext" readme = "README.md" requires-python = ">=3.13,<3.14" - version = "0.10.0-dev" + version = "0.10.0" license = "MIT" [[project.authors]] email = "team@flext.sh" diff --git a/scripts/core/generate_scripts_inventory.py b/scripts/core/generate_scripts_inventory.py index 89aa968cc..fc42d16da 100644 --- a/scripts/core/generate_scripts_inventory.py +++ b/scripts/core/generate_scripts_inventory.py @@ -9,8 +9,8 @@ from pathlib import Path -def _artifact_path(directory: str, slug: str) -> Path: - return Path(".sisyphus") / directory / f"scripts-infra--json--{slug}.json" +def _artifact_path(slug: str) -> Path: + return Path(".reports") / f"scripts-infra--json--{slug}.json" def main() -> int: @@ -38,9 +38,9 @@ def main() -> int: external = {"generated_at": datetime.now(UTC).isoformat(), "candidates": []} outputs = { - _artifact_path("reports", "scripts-inventory"): inventory, - _artifact_path("reports", "scripts-wiring"): wiring, - _artifact_path("reports", "external-scripts-candidates"): external, + _artifact_path("scripts-inventory"): inventory, + _artifact_path("scripts-wiring"): wiring, + _artifact_path("external-scripts-candidates"): external, } for path, payload in outputs.items(): path.parent.mkdir(parents=True, exist_ok=True) diff --git a/scripts/dependencies/sync_internal_deps.py b/scripts/dependencies/sync_internal_deps.py index 77c115566..3f1fc85f5 100644 --- a/scripts/dependencies/sync_internal_deps.py +++ b/scripts/dependencies/sync_internal_deps.py @@ -15,6 +15,10 @@ from pathlib import Path GIT_BIN = shutil.which("git") or "git" +GIT_REF_RE = re.compile(r"^[A-Za-z0-9][A-Za-z0-9._/-]{0,127}$") +GITHUB_REPO_URL_RE = re.compile( + r"^(?:git@github\.com:[A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+(?:\.git)?|https://github\.com/[A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+(?:\.git)?)$" +) def _run_git(args: list[str], cwd: Path) -> subprocess.CompletedProcess[str]: @@ -23,6 +27,20 @@ def _run_git(args: list[str], cwd: Path) -> subprocess.CompletedProcess[str]: ) +def _validate_git_ref(ref_name: str) -> str: + if not GIT_REF_RE.fullmatch(ref_name): + error_msg = f"invalid git ref: {ref_name!r}" + raise RuntimeError(error_msg) + return ref_name + + +def _validate_repo_url(repo_url: str) -> str: + if not GITHUB_REPO_URL_RE.fullmatch(repo_url): + error_msg = f"invalid repository URL: {repo_url!r}" + raise RuntimeError(error_msg) + return repo_url + + def _ssh_to_https(url: str) -> str: if url.startswith("git@github.com:"): return f"https://github.com/{url.removeprefix('git@github.com:')}" @@ -76,9 +94,41 @@ def _resolve_ref(project_root: Path) -> str: return "main" +def _is_relative_to(path: Path, parent: Path) -> bool: + try: + path.relative_to(parent) + except ValueError: + return False + return True + + +def _workspace_root_from_env(project_root: Path) -> Path | None: + env_root = os.getenv("FLEXT_WORKSPACE_ROOT") + if not env_root: + return None + candidate = Path(env_root).expanduser().resolve() + if not candidate.exists() or not candidate.is_dir(): + return None + if _is_relative_to(project_root, candidate): + return candidate + return None + + +def _workspace_root_from_parents(project_root: Path) -> Path | None: + for candidate in (project_root, *project_root.parents): + if (candidate / ".gitmodules").exists(): + return candidate + return None + + def _is_workspace_mode(project_root: Path) -> tuple[bool, Path | None]: if os.getenv("FLEXT_STANDALONE") == "1": return False, None + + env_workspace_root = _workspace_root_from_env(project_root) + if env_workspace_root is not None: + return True, env_workspace_root + superproject = _run_git( ["rev-parse", "--show-superproject-working-tree"], project_root ) @@ -86,11 +136,46 @@ def _is_workspace_mode(project_root: Path) -> tuple[bool, Path | None]: value = superproject.stdout.strip() if value: return True, Path(value) - if (project_root / ".gitmodules").exists(): - return True, project_root + heuristic_workspace_root = _workspace_root_from_parents(project_root) + if heuristic_workspace_root is not None: + return True, heuristic_workspace_root + return False, None +def _owner_from_remote_url(remote_url: str) -> str | None: + patterns = ( + r"^git@github\.com:(?P[^/]+)/[^/]+(?:\.git)?$", + r"^https://github\.com/(?P[^/]+)/[^/]+(?:\.git)?$", + r"^http://github\.com/(?P[^/]+)/[^/]+(?:\.git)?$", + ) + for pattern in patterns: + match = re.match(pattern, remote_url) + if match: + return match.group("owner") + return None + + +def _infer_owner_from_origin(project_root: Path) -> str | None: + remote = _run_git(["config", "--get", "remote.origin.url"], project_root) + if remote.returncode != 0: + return None + return _owner_from_remote_url(remote.stdout.strip()) + + +def _synthesized_repo_map( + owner: str, repo_names: set[str] +) -> dict[str, dict[str, str]]: + result: dict[str, dict[str, str]] = {} + for repo_name in sorted(repo_names): + ssh_url = f"git@github.com:{owner}/{repo_name}.git" + result[repo_name] = { + "ssh_url": ssh_url, + "https_url": _ssh_to_https(ssh_url), + } + return result + + def _ensure_symlink(target: Path, source: Path) -> None: target.parent.mkdir(parents=True, exist_ok=True) if target.is_symlink() and target.resolve() == source.resolve(): @@ -104,8 +189,15 @@ def _ensure_symlink(target: Path, source: Path) -> None: def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: + safe_repo_url = _validate_repo_url(repo_url) + safe_ref_name = _validate_git_ref(ref_name) dep_path.parent.mkdir(parents=True, exist_ok=True) if not (dep_path / ".git").exists(): + if dep_path.exists() or dep_path.is_symlink(): + if dep_path.is_dir() and not dep_path.is_symlink(): + shutil.rmtree(dep_path) + else: + dep_path.unlink() cloned = subprocess.run( [ GIT_BIN, @@ -113,8 +205,8 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: "--depth", "1", "--branch", - ref_name, - repo_url, + safe_ref_name, + safe_repo_url, str(dep_path), ], text=True, @@ -131,7 +223,7 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: "1", "--branch", "main", - repo_url, + safe_repo_url, str(dep_path), ], text=True, @@ -142,7 +234,7 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: error_msg = f"clone failed for {dep_path.name}: {fallback.stderr.strip()}" raise RuntimeError(error_msg) print( - f"[sync-deps] warning: {dep_path.name} missing ref '{ref_name}', using 'main'" + f"[sync-deps] warning: {dep_path.name} missing ref '{safe_ref_name}', using 'main'" ) return @@ -151,9 +243,9 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: error_msg = f"fetch failed for {dep_path.name}: {fetch.stderr.strip()}" raise RuntimeError(error_msg) - checkout = _run_git(["checkout", ref_name], dep_path) + checkout = _run_git(["checkout", safe_ref_name], dep_path) if checkout.returncode == 0: - _run_git(["pull", "--ff-only", "origin", ref_name], dep_path) + _run_git(["pull", "--ff-only", "origin", safe_ref_name], dep_path) return fallback_checkout = _run_git(["checkout", "main"], dep_path) @@ -162,7 +254,7 @@ def _ensure_checkout(dep_path: Path, repo_url: str, ref_name: str) -> None: raise RuntimeError(error_msg) _run_git(["pull", "--ff-only", "origin", "main"], dep_path) print( - f"[sync-deps] warning: {dep_path.name} missing ref '{ref_name}', using 'main'" + f"[sync-deps] warning: {dep_path.name} missing ref '{safe_ref_name}', using 'main'" ) @@ -215,11 +307,21 @@ def _main() -> int: repo_map = {**_parse_repo_map(map_file), **repo_map} else: if not map_file.exists(): - error_msg = ( - "missing flext-repo-map.toml for standalone dependency resolution" + owner = _infer_owner_from_origin(project_root) + if owner is None: + error_msg = ( + "missing flext-repo-map.toml for standalone dependency resolution " + "and unable to infer GitHub owner from remote.origin.url" + ) + raise RuntimeError(error_msg) + repo_map = _synthesized_repo_map( + owner, {dep_path.name for dep_path in deps.values()} ) - raise RuntimeError(error_msg) - repo_map = _parse_repo_map(map_file) + print( + f"[sync-deps] warning: using synthesized standalone repo map for owner '{owner}'" + ) + else: + repo_map = _parse_repo_map(map_file) ref_name = _resolve_ref(project_root) force_https = ( diff --git a/scripts/github/lint_workflows.py b/scripts/github/lint_workflows.py new file mode 100644 index 000000000..98f29af31 --- /dev/null +++ b/scripts/github/lint_workflows.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import shutil +import subprocess +from pathlib import Path + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument( + "--report", + type=Path, + default=Path(".reports/workflows/actionlint.json"), + ) + _ = parser.add_argument("--strict", type=int, default=0) + return parser.parse_args() + + +def main() -> int: + args = _parse_args() + root = args.root.resolve() + report = args.report if args.report.is_absolute() else root / args.report + report.parent.mkdir(parents=True, exist_ok=True) + + actionlint = shutil.which("actionlint") + if actionlint is None: + payload = { + "status": "skipped", + "reason": "actionlint not installed", + } + report.write_text( + json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + _ = print(f"wrote: {report}") + return 0 + + result = subprocess.run( + [actionlint], + cwd=root, + capture_output=True, + text=True, + check=False, + ) + payload = { + "status": "ok" if result.returncode == 0 else "fail", + "exit_code": result.returncode, + "stdout": result.stdout, + "stderr": result.stderr, + } + report.write_text( + json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + _ = print(f"wrote: {report}") + if result.returncode != 0: + _ = print(result.stdout) + _ = print(result.stderr) + if args.strict == 1: + return result.returncode + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/github/sync_workflows.py b/scripts/github/sync_workflows.py new file mode 100644 index 000000000..b6ba76846 --- /dev/null +++ b/scripts/github/sync_workflows.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python3 +# Owner-Skill: .claude/skills/rules-github/SKILL.md +from __future__ import annotations + +import argparse +import json +import sys +from dataclasses import dataclass +from pathlib import Path +from subprocess import CalledProcessError, run + +GENERATED_HEADER = "# Generated by scripts/github/sync_workflows.py - DO NOT EDIT\n" +MANAGED_FILES = {"ci.yml"} + + +@dataclass(frozen=True) +class Operation: + project: str + path: str + action: str + reason: str + + +def _discover_projects(workspace_root: Path) -> list[tuple[str, Path]]: + discover_script = workspace_root / "scripts" / "maintenance" / "_discover.py" + command = [ + sys.executable, + str(discover_script), + "--workspace-root", + str(workspace_root), + "--kind", + "all", + "--format", + "json", + ] + try: + result = run(command, check=True, capture_output=True, text=True) + except CalledProcessError as exc: + message = (exc.stderr or exc.stdout or str(exc)).strip() + raise RuntimeError(f"project discovery failed: {message}") from exc + payload = json.loads(result.stdout) + projects: list[tuple[str, Path]] = [] + for item in payload.get("projects", []): + if not isinstance(item, dict): + continue + name = item.get("name") + path_value = item.get("path") + if not isinstance(name, str) or not isinstance(path_value, str): + continue + projects.append((name, Path(path_value).resolve())) + return projects + + +def _render_template(template_path: Path) -> str: + body = template_path.read_text(encoding="utf-8") + if body.startswith(GENERATED_HEADER): + return body + return GENERATED_HEADER + body + + +def _resolve_source_workflow( + workspace_root: Path, source_workflow: Path | None +) -> Path: + if source_workflow is not None: + candidate = ( + source_workflow + if source_workflow.is_absolute() + else (workspace_root / source_workflow) + ).resolve() + if candidate.exists(): + return candidate + raise RuntimeError(f"missing source workflow: {candidate}") + + default_source = (workspace_root / ".github" / "workflows" / "ci.yml").resolve() + if default_source.exists(): + return default_source + raise RuntimeError(f"missing source workflow: {default_source}") + + +def _sync_project( + *, + project_name: str, + project_root: Path, + rendered_template: str, + apply: bool, + prune: bool, +) -> list[Operation]: + operations: list[Operation] = [] + workflows_dir = project_root / ".github" / "workflows" + destination = workflows_dir / "ci.yml" + + if destination.exists(): + current = destination.read_text(encoding="utf-8") + if current != rendered_template: + if apply: + _ = destination.write_text(rendered_template, encoding="utf-8") + operations.append( + Operation( + project=project_name, + path=str(destination.relative_to(project_root)), + action="update", + reason="force overwrite ci.yml", + ) + ) + else: + operations.append( + Operation( + project=project_name, + path=str(destination.relative_to(project_root)), + action="noop", + reason="already synced", + ) + ) + else: + if apply: + workflows_dir.mkdir(parents=True, exist_ok=True) + _ = destination.write_text(rendered_template, encoding="utf-8") + operations.append( + Operation( + project=project_name, + path=str(destination.relative_to(project_root)), + action="create", + reason="missing ci.yml", + ) + ) + + if prune and workflows_dir.exists(): + candidates = sorted(workflows_dir.glob("*.yml")) + sorted( + workflows_dir.glob("*.yaml") + ) + for path in candidates: + if path.name in MANAGED_FILES: + continue + if apply: + path.unlink() + operations.append( + Operation( + project=project_name, + path=str(path.relative_to(project_root)), + action="prune", + reason="remove non-canonical workflow", + ) + ) + + return operations + + +def _write_report(report_path: Path, mode: str, operations: list[Operation]) -> None: + report_path.parent.mkdir(parents=True, exist_ok=True) + by_action: dict[str, int] = {} + for operation in operations: + by_action[operation.action] = by_action.get(operation.action, 0) + 1 + payload = { + "mode": mode, + "summary": by_action, + "operations": [ + { + "project": operation.project, + "path": operation.path, + "action": operation.action, + "reason": operation.reason, + } + for operation in operations + ], + } + _ = report_path.write_text( + json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + + +def _parse_args(argv: list[str]) -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--workspace-root", default=".", type=Path) + _ = parser.add_argument("--source-workflow", type=Path) + _ = parser.add_argument( + "--report", + default=".reports/workflows/sync.json", + type=Path, + ) + _ = parser.add_argument("--apply", action="store_true") + _ = parser.add_argument("--prune", action="store_true") + return parser.parse_args(argv) + + +def main(argv: list[str] | None = None) -> int: + args = _parse_args(sys.argv[1:] if argv is None else argv) + workspace_root = args.workspace_root.resolve() + report = ( + args.report if args.report.is_absolute() else (workspace_root / args.report) + ) + + source_workflow = _resolve_source_workflow(workspace_root, args.source_workflow) + + projects = _discover_projects(workspace_root) + rendered_template = _render_template(source_workflow) + operations: list[Operation] = [] + + for project_name, project_root in projects: + operations.extend( + _sync_project( + project_name=project_name, + project_root=project_root, + rendered_template=rendered_template, + apply=args.apply, + prune=args.prune, + ) + ) + + mode = "apply" if args.apply else "dry-run" + _write_report(report.resolve(), mode, operations) + print(f"Wrote: {report}") + for operation in operations: + print( + f"[{operation.project}] {operation.action}: {operation.path} ({operation.reason})" + ) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/maintenance/_discover.py b/scripts/maintenance/_discover.py index 0a49e55f6..07fa9429c 100644 --- a/scripts/maintenance/_discover.py +++ b/scripts/maintenance/_discover.py @@ -3,6 +3,7 @@ from __future__ import annotations import argparse +import json import re import sys from dataclasses import dataclass @@ -53,7 +54,9 @@ def main() -> int: _ = parser.add_argument( "--kind", choices=("submodule", "external", "all"), default="all" ) - _ = parser.add_argument("--format", choices=("human", "makefile"), default="human") + _ = parser.add_argument( + "--format", choices=("human", "makefile", "json"), default="human" + ) _ = parser.add_argument("--workspace-root", type=Path, default=Path.cwd()) args = parser.parse_args() @@ -65,6 +68,23 @@ def main() -> int: print(" ".join(project.name for project in projects)) return 0 + if args.format == "json": + payload = { + "workspace_root": str(args.workspace_root.resolve()), + "kind": args.kind, + "count": len(projects), + "projects": [ + { + "name": project.name, + "kind": project.kind, + "path": str(project.path.resolve()), + } + for project in projects + ], + } + print(json.dumps(payload, indent=2, sort_keys=True)) + return 0 + for project in projects: print(project.name) return 0 diff --git a/scripts/release/build.py b/scripts/release/build.py new file mode 100644 index 000000000..095194063 --- /dev/null +++ b/scripts/release/build.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import subprocess +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import discover_projects, workspace_root + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--version", required=True) + _ = parser.add_argument("--output-dir", type=Path, required=True) + return parser.parse_args() + + +def _run_make(project_path: Path, verb: str) -> tuple[int, str]: + command = ["make", "-C", str(project_path), verb] + result = subprocess.run(command, capture_output=True, text=True, check=False) + output = (result.stdout + "\n" + result.stderr).strip() + return result.returncode, output + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + output_dir = ( + args.output_dir if args.output_dir.is_absolute() else root / args.output_dir + ) + output_dir.mkdir(parents=True, exist_ok=True) + report_path = output_dir / "build-report.json" + + projects = discover_projects(root) + targets = [ + ("root", root), + ("algar-oud-mig", root / "algar-oud-mig"), + *[(project.name, project.path) for project in projects], + ("gruponos-meltano-native", root / "gruponos-meltano-native"), + ] + + seen: set[str] = set() + unique_targets: list[tuple[str, Path]] = [] + for name, path in targets: + if name in seen: + continue + seen.add(name) + if not path.exists(): + continue + unique_targets.append((name, path)) + + records: list[dict[str, str | int]] = [] + failures = 0 + for name, path in unique_targets: + code, output = _run_make(path, "build") + if code != 0: + failures += 1 + log = output_dir / f"build-{name}.log" + log.write_text(output + "\n", encoding="utf-8") + records.append({ + "project": name, + "path": str(path), + "exit_code": code, + "log": str(log), + }) + _ = print(f"[{name}] build exit={code}") + + report = { + "version": args.version, + "total": len(records), + "failures": failures, + "records": records, + } + report_path.write_text( + json.dumps(report, indent=2, sort_keys=True) + "\n", encoding="utf-8" + ) + _ = print(f"report: {report_path}") + return 1 if failures else 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/changelog.py b/scripts/release/changelog.py new file mode 100644 index 000000000..4d01ea592 --- /dev/null +++ b/scripts/release/changelog.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +from datetime import UTC, datetime +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import workspace_root + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--version", required=True) + _ = parser.add_argument("--tag", required=True) + _ = parser.add_argument("--notes", type=Path, required=True) + _ = parser.add_argument("--apply", action="store_true") + return parser.parse_args() + + +def _update_changelog(existing: str, version: str, tag: str) -> str: + date = datetime.now(UTC).date().isoformat() + section = ( + f"## {version} - {date}\n\n" + f"- Workspace release tag: `{tag}`\n" + "- Status: Alpha, non-production\n\n" + f"Full notes: `docs/releases/{tag}.md`\n\n" + ) + if section in existing: + return existing + marker = "# Changelog\n\n" + if marker in existing: + return existing.replace(marker, marker + section, 1) + return "# Changelog\n\n" + section + existing + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + changelog_path = root / "docs" / "CHANGELOG.md" + latest_path = root / "docs" / "releases" / "latest.md" + tagged_notes_path = root / "docs" / "releases" / f"{args.tag}.md" + notes_path = args.notes if args.notes.is_absolute() else root / args.notes + + notes_text = notes_path.read_text(encoding="utf-8") + existing = ( + changelog_path.read_text(encoding="utf-8") + if changelog_path.exists() + else "# Changelog\n\n" + ) + updated = _update_changelog(existing, args.version, args.tag) + + if args.apply: + changelog_path.parent.mkdir(parents=True, exist_ok=True) + _ = changelog_path.write_text(updated, encoding="utf-8") + latest_path.parent.mkdir(parents=True, exist_ok=True) + _ = latest_path.write_text(notes_text, encoding="utf-8") + _ = tagged_notes_path.write_text(notes_text, encoding="utf-8") + + _ = print(f"changelog: {changelog_path}") + _ = print(f"latest: {latest_path}") + _ = print(f"release_notes: {tagged_notes_path}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/notes.py b/scripts/release/notes.py new file mode 100644 index 000000000..50741f647 --- /dev/null +++ b/scripts/release/notes.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import discover_projects, run_capture, workspace_root + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--tag", required=True) + _ = parser.add_argument("--output", type=Path, required=True) + _ = parser.add_argument("--version", default="") + return parser.parse_args() + + +def _tag_exists(root: Path, tag: str) -> bool: + try: + _ = run_capture(["git", "rev-parse", "--verify", f"refs/tags/{tag}"], cwd=root) + return True + except RuntimeError: + return False + + +def _previous_tag(root: Path, tag: str) -> str: + output = run_capture(["git", "tag", "--sort=-v:refname"], cwd=root) + tags = [line.strip() for line in output.splitlines() if line.strip()] + if tag in tags: + idx = tags.index(tag) + if idx + 1 < len(tags): + return tags[idx + 1] + for candidate in tags: + if candidate != tag: + return candidate + return "" + + +def _collect_changes(root: Path, previous: str, tag: str) -> str: + target = tag if _tag_exists(root, tag) else "HEAD" + rev = f"{previous}..{target}" if previous else target + return run_capture(["git", "log", "--pretty=format:- %h %s (%an)", rev], cwd=root) + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + output_path = args.output if args.output.is_absolute() else root / args.output + output_path.parent.mkdir(parents=True, exist_ok=True) + + previous = _previous_tag(root, args.tag) + changes = _collect_changes(root, previous, args.tag) + projects = discover_projects(root) + + version = args.version or args.tag.removeprefix("v") + lines: list[str] = [ + f"# Release {args.tag}", + "", + "## Status", + "", + "- Quality: Alpha", + "- Usage: Non-production", + "", + "## Scope", + "", + f"- Workspace release version: {version}", + f"- Projects packaged: {len(projects) + 2}", + "", + "## Projects impacted", + "", + ] + lines.extend( + f"- {name}" + for name in [ + "root", + "algar-oud-mig", + *[project.name for project in projects], + "gruponos-meltano-native", + ] + ) + lines.extend([ + "", + "## Changes since last tag", + "", + changes or "- Initial tagged release", + "", + "## Verification", + "", + "- make release-ci RELEASE_PHASE=all", + "- make validate VALIDATE_SCOPE=workspace", + "- make build", + ]) + + output_path.write_text("\n".join(lines).rstrip() + "\n", encoding="utf-8") + _ = print(f"wrote: {output_path}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/run.py b/scripts/release/run.py new file mode 100644 index 000000000..acaad58bf --- /dev/null +++ b/scripts/release/run.py @@ -0,0 +1,202 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import re +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import ( + bump_version, + discover_projects, + parse_semver, + run_capture, + run_checked, + workspace_root, +) + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--phase", default="all") + _ = parser.add_argument("--version", default="") + _ = parser.add_argument("--tag", default="") + _ = parser.add_argument("--bump", default="") + _ = parser.add_argument("--interactive", type=int, default=1) + _ = parser.add_argument("--push", type=int, default=0) + _ = parser.add_argument("--dry-run", type=int, default=0) + _ = parser.add_argument("--create-branches", type=int, default=1) + return parser.parse_args() + + +def _current_version(root: Path) -> str: + pyproject = root / "pyproject.toml" + content = pyproject.read_text(encoding="utf-8") + match = re.search(r'^version\s*=\s*"(?P[^"]+)"', content, flags=re.M) + if not match: + raise RuntimeError("unable to detect version from pyproject.toml") + value = match.group("version") + return value.removesuffix("-dev") + + +def _resolve_version(args: argparse.Namespace, root: Path) -> str: + if args.version: + _ = parse_semver(args.version) + return args.version + + current = _current_version(root) + if args.bump: + return bump_version(current, args.bump) + + if args.interactive != 1: + return current + + print("Select version bump type: [major|minor|patch]") + bump = input("bump> ").strip().lower() + if bump not in {"major", "minor", "patch"}: + raise RuntimeError("invalid bump type") + return bump_version(current, bump) + + +def _resolve_tag(args: argparse.Namespace, version: str) -> str: + if args.tag: + if not args.tag.startswith("v"): + raise RuntimeError("tag must start with v") + return args.tag + return f"v{version}" + + +def _create_release_branches(root: Path, version: str) -> None: + branch = f"release/{version}" + run_checked(["git", "checkout", "-B", branch], cwd=root) + for project in discover_projects(root): + run_checked(["git", "checkout", "-B", branch], cwd=project.path) + for extra in ("algar-oud-mig", "gruponos-meltano-native"): + project_root = root / extra + if project_root.exists(): + run_checked(["git", "checkout", "-B", branch], cwd=project_root) + + +def _phase_version(root: Path, version: str, dry_run: bool) -> None: + command = [ + "python", + "scripts/release/version.py", + "--root", + str(root), + "--version", + version, + "--check" if dry_run else "--apply", + ] + run_checked(command, cwd=root) + + +def _phase_validate(root: Path) -> None: + run_checked(["make", "validate", "VALIDATE_SCOPE=workspace"], cwd=root) + + +def _phase_build(root: Path, version: str) -> None: + output = root / ".reports" / "release" / f"v{version}" + run_checked( + [ + "python", + "scripts/release/build.py", + "--root", + str(root), + "--version", + version, + "--output-dir", + str(output), + ], + cwd=root, + ) + + +def _phase_publish( + root: Path, version: str, tag: str, push: bool, dry_run: bool +) -> None: + notes = root / ".reports" / "release" / tag / "RELEASE_NOTES.md" + notes.parent.mkdir(parents=True, exist_ok=True) + run_checked( + [ + "python", + "scripts/release/notes.py", + "--root", + str(root), + "--tag", + tag, + "--version", + version, + "--output", + str(notes), + ], + cwd=root, + ) + if not dry_run: + run_checked( + [ + "python", + "scripts/release/changelog.py", + "--root", + str(root), + "--version", + version, + "--tag", + tag, + "--notes", + str(notes), + "--apply", + ], + cwd=root, + ) + tag_exists = run_capture(["git", "tag", "-l", tag], cwd=root) + if tag_exists.strip() != tag: + run_checked(["git", "tag", "-a", tag, "-m", f"release: {tag}"], cwd=root) + if push: + run_checked(["git", "push", "origin", "HEAD"], cwd=root) + run_checked(["git", "push", "origin", tag], cwd=root) + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + version = _resolve_version(args, root) + tag = _resolve_tag(args, version) + phases = ( + ["validate", "version", "build", "publish"] + if args.phase == "all" + else [part.strip() for part in args.phase.split(",") if part.strip()] + ) + + _ = print(f"release_version={version}") + _ = print(f"release_tag={tag}") + _ = print(f"phases={','.join(phases)}") + + if args.create_branches == 1 and args.dry_run == 0: + _create_release_branches(root, version) + + for phase in phases: + if phase == "validate": + _phase_validate(root) + continue + if phase == "version": + _phase_version(root, version, args.dry_run == 1) + continue + if phase == "build": + _phase_build(root, version) + continue + if phase == "publish": + _phase_publish(root, version, tag, args.push == 1, args.dry_run == 1) + continue + raise RuntimeError(f"invalid phase: {phase}") + + _ = print("release_run=ok") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/release/shared.py b/scripts/release/shared.py new file mode 100644 index 000000000..0598b719c --- /dev/null +++ b/scripts/release/shared.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +# Owner-Skill: .claude/skills/scripts-maintenance/SKILL.md +from __future__ import annotations + +import json +import re +import subprocess +import sys +from dataclasses import dataclass +from pathlib import Path + + +SEMVER_RE = re.compile( + r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)$" +) + + +@dataclass(frozen=True) +class Project: + name: str + path: Path + + +def workspace_root(path: str | Path = ".") -> Path: + return Path(path).resolve() + + +def discover_projects(root: Path) -> list[Project]: + discover = root / "scripts" / "maintenance" / "_discover.py" + command = [ + sys.executable, + str(discover), + "--workspace-root", + str(root), + "--kind", + "all", + "--format", + "json", + ] + result = subprocess.run(command, capture_output=True, text=True, check=False) + if result.returncode != 0: + msg = (result.stderr or result.stdout).strip() + raise RuntimeError(f"project discovery failed: {msg}") + payload = json.loads(result.stdout) + projects: list[Project] = [] + for item in payload.get("projects", []): + if not isinstance(item, dict): + continue + name = item.get("name") + path_value = item.get("path") + if not isinstance(name, str) or not isinstance(path_value, str): + continue + projects.append(Project(name=name, path=Path(path_value).resolve())) + return sorted(projects, key=lambda project: project.name) + + +def parse_semver(version: str) -> tuple[int, int, int]: + match = SEMVER_RE.match(version) + if not match: + raise ValueError(f"invalid semver version: {version}") + return ( + int(match.group("major")), + int(match.group("minor")), + int(match.group("patch")), + ) + + +def bump_version(current_version: str, bump: str) -> str: + major, minor, patch = parse_semver(current_version) + if bump == "major": + return f"{major + 1}.0.0" + if bump == "minor": + return f"{major}.{minor + 1}.0" + if bump == "patch": + return f"{major}.{minor}.{patch + 1}" + raise ValueError(f"unsupported bump: {bump}") + + +def run_checked(command: list[str], cwd: Path | None = None) -> None: + result = subprocess.run(command, cwd=cwd, check=False) + if result.returncode != 0: + cmd = " ".join(command) + raise RuntimeError(f"command failed ({result.returncode}): {cmd}") + + +def run_capture(command: list[str], cwd: Path | None = None) -> str: + result = subprocess.run( + command, cwd=cwd, capture_output=True, text=True, check=False + ) + if result.returncode != 0: + cmd = " ".join(command) + detail = (result.stderr or result.stdout).strip() + raise RuntimeError(f"command failed ({result.returncode}): {cmd}: {detail}") + return result.stdout.strip() diff --git a/scripts/release/version.py b/scripts/release/version.py new file mode 100644 index 000000000..48f497756 --- /dev/null +++ b/scripts/release/version.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +from pathlib import Path +import sys + +SCRIPTS_ROOT = Path(__file__).resolve().parents[1] +if str(SCRIPTS_ROOT) not in sys.path: + sys.path.insert(0, str(SCRIPTS_ROOT)) + +from release.shared import discover_projects, parse_semver, workspace_root + + +def _replace_version(content: str, version: str) -> tuple[str, bool]: + old = 'version = "0.10.0-dev"' + new = f'version = "{version}"' + if old in content: + return content.replace(old, new), True + + marker = 'version = "' + start = content.find(marker) + if start < 0: + return content, False + value_start = start + len(marker) + value_end = content.find('"', value_start) + if value_end < 0: + return content, False + + current = content[value_start:value_end] + current_clean = current.removesuffix("-dev") + _ = parse_semver(current_clean) + if current == version: + return content, False + updated = content[:value_start] + version + content[value_end:] + return updated, True + + +def _version_files(root: Path) -> list[Path]: + files: list[Path] = [root / "pyproject.toml"] + for project in discover_projects(root): + pyproject = project.path / "pyproject.toml" + if pyproject.exists(): + files.append(pyproject) + for extra in ("algar-oud-mig", "gruponos-meltano-native"): + pyproject = root / extra / "pyproject.toml" + if pyproject.exists(): + files.append(pyproject) + dedup = sorted({path.resolve() for path in files}) + return dedup + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser() + _ = parser.add_argument("--root", type=Path, default=Path(".")) + _ = parser.add_argument("--version", required=True) + _ = parser.add_argument("--apply", action="store_true") + _ = parser.add_argument("--check", action="store_true") + return parser.parse_args() + + +def main() -> int: + args = _parse_args() + root = workspace_root(args.root) + _ = parse_semver(args.version) + + changed = 0 + for file_path in _version_files(root): + content = file_path.read_text(encoding="utf-8") + updated, did_change = _replace_version(content, args.version) + if did_change: + changed += 1 + if args.apply: + _ = file_path.write_text(updated, encoding="utf-8") + _ = print(f"update: {file_path}") + + if args.check: + _ = print(f"checked_version={args.version}") + _ = print(f"files_changed={changed}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tests/scripts/dependencies/test_sync_internal_deps.py b/tests/scripts/dependencies/test_sync_internal_deps.py new file mode 100644 index 000000000..7b94f98a9 --- /dev/null +++ b/tests/scripts/dependencies/test_sync_internal_deps.py @@ -0,0 +1,157 @@ +from __future__ import annotations + +import importlib.util +import subprocess +import sys +from pathlib import Path +from typing import Any + +from _pytest.monkeypatch import MonkeyPatch + + +def load_module() -> Any: + module_path = ( + Path(__file__).resolve().parents[3] + / "scripts" + / "dependencies" + / "sync_internal_deps.py" + ) + spec = importlib.util.spec_from_file_location("sync_internal_deps", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _git_fail_result() -> subprocess.CompletedProcess[str]: + return subprocess.CompletedProcess(args=["git"], returncode=1, stdout="", stderr="") + + +def _git_fail(*_args: object, **_kwargs: object) -> subprocess.CompletedProcess[str]: + return _git_fail_result() + + +def _owner_flext_sh(_root: Path) -> str: + return "flext-sh" + + +def _ref_main(_root: Path) -> str: + return "main" + + +def _capture_checkout( + captured: list[tuple[Path, str, str]], +) -> Any: + def _inner(dep_path: Path, repo_url: str, ref_name: str) -> None: + captured.append((dep_path, repo_url, ref_name)) + + return _inner + + +def test_workspace_mode_uses_explicit_workspace_env( + tmp_path: Path, monkeypatch: MonkeyPatch +) -> None: + mod = load_module() + workspace_root = tmp_path / "workspace" + project_root = workspace_root / "nested" / "project" + _ = project_root.mkdir(parents=True) + + monkeypatch.setenv("FLEXT_WORKSPACE_ROOT", str(workspace_root)) + monkeypatch.delenv("FLEXT_STANDALONE", raising=False) + monkeypatch.setattr(mod, "_run_git", _git_fail) + + workspace_mode, resolved_root = mod._is_workspace_mode(project_root) + + assert workspace_mode is True + assert resolved_root == workspace_root + + +def test_workspace_mode_finds_parent_gitmodules( + tmp_path: Path, monkeypatch: MonkeyPatch +) -> None: + mod = load_module() + workspace_root = tmp_path / "workspace" + project_root = workspace_root / "nested" / "project" + _ = project_root.mkdir(parents=True) + _ = (workspace_root / ".gitmodules").write_text("", encoding="utf-8") + + monkeypatch.delenv("FLEXT_WORKSPACE_ROOT", raising=False) + monkeypatch.delenv("FLEXT_STANDALONE", raising=False) + monkeypatch.setattr(mod, "_run_git", _git_fail) + + workspace_mode, resolved_root = mod._is_workspace_mode(project_root) + + assert workspace_mode is True + assert resolved_root == workspace_root + + +def test_standalone_fallback_synthesizes_repo_urls( + tmp_path: Path, monkeypatch: MonkeyPatch +) -> None: + mod = load_module() + project_root = tmp_path / "flext-cli" + _ = project_root.mkdir(parents=True) + _ = (project_root / "pyproject.toml").write_text( + """ +[tool.poetry] +name = "flext-cli" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.13" +flext-core = { path = ".flext-deps/flext-core" } +""".strip() + + "\n", + encoding="utf-8", + ) + + captured: list[tuple[Path, str, str]] = [] + + monkeypatch.setenv("FLEXT_STANDALONE", "1") + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + monkeypatch.setattr(mod, "_infer_owner_from_origin", _owner_flext_sh) + monkeypatch.setattr(mod, "_resolve_ref", _ref_main) + monkeypatch.setattr(mod, "_ensure_checkout", _capture_checkout(captured)) + monkeypatch.setattr( + mod.sys, + "argv", + ["sync_internal_deps.py", "--project-root", str(project_root)], + ) + + assert mod._main() == 0 + assert len(captured) == 1 + dep_path, repo_url, ref_name = captured[0] + assert dep_path == project_root / ".flext-deps" / "flext-core" + assert repo_url == "git@github.com:flext-sh/flext-core.git" + assert ref_name == "main" + + +def test_ensure_checkout_removes_preexisting_non_git_path( + tmp_path: Path, monkeypatch: MonkeyPatch +) -> None: + mod = load_module() + dep_path = tmp_path / ".flext-deps" / "flext-core" + dep_path.parent.mkdir(parents=True) + _ = dep_path.write_text("placeholder", encoding="utf-8") + + calls: list[list[str]] = [] + + def _fake_run( + args: list[str], *, text: bool, capture_output: bool, check: bool + ) -> subprocess.CompletedProcess[str]: + _ = text, capture_output, check + calls.append(args) + return subprocess.CompletedProcess( + args=args, returncode=0, stdout="", stderr="" + ) + + monkeypatch.setattr(mod.subprocess, "run", _fake_run) + + mod._ensure_checkout(dep_path, "git@github.com:flext-sh/flext-core.git", "main") + + assert dep_path.exists() is False + assert calls + assert calls[0][0] == mod.GIT_BIN + assert calls[0][1] == "clone" diff --git a/tests/scripts/github/test_sync_workflows.py b/tests/scripts/github/test_sync_workflows.py new file mode 100644 index 000000000..283d05f2b --- /dev/null +++ b/tests/scripts/github/test_sync_workflows.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +import importlib.util +import json +import sys +from pathlib import Path +from typing import Any + + +def load_module() -> Any: + module_path = ( + Path(__file__).resolve().parents[3] / "scripts" / "github" / "sync_workflows.py" + ) + spec = importlib.util.spec_from_file_location("sync_workflows", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def test_sync_project_creates_ci_from_template(tmp_path: Path) -> None: + mod = load_module() + project_root = tmp_path / "flext-core" + _ = project_root.mkdir(parents=True) + + rendered_template = ( + "# Generated by scripts/github/sync_workflows.py - DO NOT EDIT\nname: CI\n" + ) + operations = mod._sync_project( + project_name="flext-core", + project_root=project_root, + rendered_template=rendered_template, + apply=True, + prune=True, + ) + + ci_file = project_root / ".github" / "workflows" / "ci.yml" + assert ci_file.exists() + assert ci_file.read_text(encoding="utf-8") == rendered_template + assert any(op.action == "create" for op in operations) + + +def test_sync_project_overwrites_existing_ci(tmp_path: Path) -> None: + mod = load_module() + project_root = tmp_path / "flext-api" + workflows_dir = project_root / ".github" / "workflows" + _ = workflows_dir.mkdir(parents=True) + _ = (workflows_dir / "ci.yml").write_text("name: Custom\n", encoding="utf-8") + + operations = mod._sync_project( + project_name="flext-api", + project_root=project_root, + rendered_template="name: Canonical\n", + apply=True, + prune=True, + ) + + ci_file = workflows_dir / "ci.yml" + assert ci_file.read_text(encoding="utf-8") == "name: Canonical\n" + assert any( + op.action == "update" and op.reason == "force overwrite ci.yml" + for op in operations + ) + + +def test_sync_project_prunes_other_workflows(tmp_path: Path) -> None: + mod = load_module() + project_root = tmp_path / "flext-plugin" + workflows_dir = project_root / ".github" / "workflows" + _ = workflows_dir.mkdir(parents=True) + _ = (workflows_dir / "legacy.yml").write_text("name: Legacy\n", encoding="utf-8") + _ = (workflows_dir / "extra.yaml").write_text("name: Extra\n", encoding="utf-8") + + operations = mod._sync_project( + project_name="flext-plugin", + project_root=project_root, + rendered_template="name: Canonical\n", + apply=True, + prune=True, + ) + + assert not (workflows_dir / "legacy.yml").exists() + assert not (workflows_dir / "extra.yaml").exists() + assert any( + op.action == "prune" and op.reason == "remove non-canonical workflow" + for op in operations + ) + + +def test_write_report_persists_summary(tmp_path: Path) -> None: + mod = load_module() + report_path = tmp_path / "report.json" + operations = [ + mod.Operation( + project="flext-core", + path=".github/workflows/ci.yml", + action="create", + reason="missing ci.yml", + ), + mod.Operation( + project="flext-api", + path=".github/workflows/ci.yml", + action="update", + reason="force overwrite ci.yml", + ), + ] + + mod._write_report(report_path=report_path, mode="apply", operations=operations) + payload = json.loads(report_path.read_text(encoding="utf-8")) + + assert payload["mode"] == "apply" + assert payload["summary"] == {"create": 1, "update": 1} + assert len(payload["operations"]) == 2 + + +def test_resolve_source_workflow_uses_workspace_ci(tmp_path: Path) -> None: + mod = load_module() + source = tmp_path / ".github" / "workflows" + _ = source.mkdir(parents=True) + ci_path = source / "ci.yml" + _ = ci_path.write_text("name: CI\n", encoding="utf-8") + + resolved = mod._resolve_source_workflow(tmp_path, None) + + assert resolved == ci_path.resolve() diff --git a/tests/scripts/maintenance/test_discover.py b/tests/scripts/maintenance/test_discover.py new file mode 100644 index 000000000..127426901 --- /dev/null +++ b/tests/scripts/maintenance/test_discover.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +import importlib.util +import json +import sys +from pathlib import Path +from typing import Any + +from _pytest.capture import CaptureFixture +from _pytest.monkeypatch import MonkeyPatch + + +def load_module() -> Any: + module_path = ( + Path(__file__).resolve().parents[3] / "scripts" / "maintenance" / "_discover.py" + ) + spec = importlib.util.spec_from_file_location("_discover", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +def _create_project(root: Path, name: str) -> None: + project = root / name + _ = project.mkdir(parents=True) + _ = (project / ".git").mkdir() + _ = (project / "Makefile").write_text("all:\n\t@true\n", encoding="utf-8") + _ = (project / "pyproject.toml").write_text( + "[project]\nname='demo'\nversion='0.1.0'\n", encoding="utf-8" + ) + + +def test_discover_supports_json_output( + tmp_path: Path, monkeypatch: MonkeyPatch, capsys: CaptureFixture[str] +) -> None: + mod = load_module() + _create_project(tmp_path, "subproj") + _create_project(tmp_path, "external-proj") + _ = (tmp_path / ".gitmodules").write_text( + '[submodule "subproj"]\n\tpath = subproj\n\turl = git@github.com:flext-sh/subproj.git\n', + encoding="utf-8", + ) + + monkeypatch.setattr( + mod.sys, + "argv", + [ + "_discover.py", + "--workspace-root", + str(tmp_path), + "--kind", + "all", + "--format", + "json", + ], + ) + + assert mod.main() == 0 + payload = json.loads(capsys.readouterr().out) + + assert payload["count"] == 2 + assert payload["kind"] == "all" + discovered = {item["name"]: item["kind"] for item in payload["projects"]} + assert discovered == {"subproj": "submodule", "external-proj": "external"}