From dd6ad2fa18b98c010aa758cdbd843426ea60e10e Mon Sep 17 00:00:00 2001 From: Prekzursil Date: Tue, 3 Mar 2026 08:21:37 +0200 Subject: [PATCH 1/5] chore: add strict codecov and zero-issue quality gates Co-authored-by: Codex --- .github/workflows/codacy-zero.yml | 33 ++++ .github/workflows/codecov-analytics.yml | 51 ++++++ .github/workflows/coverage-100.yml | 52 ++++++ .github/workflows/deepscan-zero.yml | 32 ++++ .github/workflows/quality-zero-gate.yml | 86 ++++++++++ .github/workflows/sentry-zero.yml | 34 ++++ .github/workflows/snyk-zero.yml | 36 +++++ .github/workflows/sonar-zero.yml | 36 +++++ codecov.yml | 33 ++++ docs/quality/QUALITY_ZERO_GATES.md | 10 ++ scripts/quality/assert_coverage_100.py | 192 ++++++++++++++++++++++ scripts/quality/check_codacy_zero.py | 198 +++++++++++++++++++++++ scripts/quality/check_deepscan_zero.py | 155 ++++++++++++++++++ scripts/quality/check_quality_secrets.py | 135 ++++++++++++++++ scripts/quality/check_required_checks.py | 196 ++++++++++++++++++++++ scripts/quality/check_sentry_zero.py | 181 +++++++++++++++++++++ scripts/quality/check_sonar_zero.py | 166 +++++++++++++++++++ scripts/security_helpers.py | 60 +++++++ 18 files changed, 1686 insertions(+) create mode 100644 .github/workflows/codacy-zero.yml create mode 100644 .github/workflows/codecov-analytics.yml create mode 100644 .github/workflows/coverage-100.yml create mode 100644 .github/workflows/deepscan-zero.yml create mode 100644 .github/workflows/quality-zero-gate.yml create mode 100644 .github/workflows/sentry-zero.yml create mode 100644 .github/workflows/snyk-zero.yml create mode 100644 .github/workflows/sonar-zero.yml create mode 100644 codecov.yml create mode 100644 docs/quality/QUALITY_ZERO_GATES.md create mode 100644 scripts/quality/assert_coverage_100.py create mode 100644 scripts/quality/check_codacy_zero.py create mode 100644 scripts/quality/check_deepscan_zero.py create mode 100644 scripts/quality/check_quality_secrets.py create mode 100644 scripts/quality/check_required_checks.py create mode 100644 scripts/quality/check_sentry_zero.py create mode 100644 scripts/quality/check_sonar_zero.py create mode 100644 scripts/security_helpers.py diff --git a/.github/workflows/codacy-zero.yml b/.github/workflows/codacy-zero.yml new file mode 100644 index 00000000..3184250a --- /dev/null +++ b/.github/workflows/codacy-zero.yml @@ -0,0 +1,33 @@ +name: Codacy Zero + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + codacy-zero: + name: Codacy Zero + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - name: Assert Codacy zero-open gate + env: + CODACY_API_TOKEN: ${{ secrets.CODACY_API_TOKEN }} + run: | + python3 scripts/quality/check_codacy_zero.py \ + --owner "${GITHUB_REPOSITORY_OWNER}" \ + --repo "${GITHUB_REPOSITORY#*/}" \ + --out-json "codacy-zero/codacy.json" \ + --out-md "codacy-zero/codacy.md" + - name: Upload Codacy artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: codacy-zero + path: codacy-zero diff --git a/.github/workflows/codecov-analytics.yml b/.github/workflows/codecov-analytics.yml new file mode 100644 index 00000000..fd348322 --- /dev/null +++ b/.github/workflows/codecov-analytics.yml @@ -0,0 +1,51 @@ +name: Codecov Analytics + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + codecov-analytics: + name: Codecov Analytics + runs-on: ubuntu-latest + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + steps: + - uses: actions/checkout@v6 + - uses: actions/setup-python@v6 + with: + python-version: '3.12' + - uses: actions/setup-node@v6 + with: + node-version: '20' + - name: Validate Codecov token + run: | + if [ -z "${CODECOV_TOKEN}" ]; then + echo "Missing CODECOV_TOKEN" >&2 + exit 1 + fi + + - name: Backend coverage + run: | + mkdir -p coverage + python -m pip install --upgrade pip + python -m pip install -r backend/requirements.txt pytest pytest-cov + python -m pytest backend --cov=backend --cov-report=xml:backend/coverage.xml + - name: Frontend coverage + run: | + npm --prefix frontend/webcoder_ui ci + npm --prefix frontend/webcoder_ui test -- --coverage --watch=false + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: backend/coverage.xml,frontend/webcoder_ui/coverage/lcov.info + flags: backend,frontend + fail_ci_if_error: true + verbose: true diff --git a/.github/workflows/coverage-100.yml b/.github/workflows/coverage-100.yml new file mode 100644 index 00000000..d2c736eb --- /dev/null +++ b/.github/workflows/coverage-100.yml @@ -0,0 +1,52 @@ +name: Coverage 100 + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + coverage-100: + name: Coverage 100 Gate + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - uses: actions/setup-python@v6 + with: + python-version: '3.12' + - uses: actions/setup-node@v6 + with: + node-version: '20' + + - name: Backend coverage + run: | + mkdir -p coverage + python -m pip install --upgrade pip + python -m pip install -r backend/requirements.txt pytest pytest-cov + python -m pytest backend --cov=backend --cov-report=xml:backend/coverage.xml + - name: Frontend coverage + run: | + npm --prefix frontend/webcoder_ui ci + npm --prefix frontend/webcoder_ui test -- --coverage --watch=false + + - name: Enforce 100% coverage + run: | + python3 scripts/quality/assert_coverage_100.py \ + --xml "backend=backend/coverage.xml" \ + --lcov "frontend=frontend/webcoder_ui/coverage/lcov.info" \ + --out-json "coverage-100/coverage.json" \ + --out-md "coverage-100/coverage.md" + - name: Upload coverage artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: coverage-100 + path: | + coverage + **/coverage + **/TestResults diff --git a/.github/workflows/deepscan-zero.yml b/.github/workflows/deepscan-zero.yml new file mode 100644 index 00000000..0f2569bb --- /dev/null +++ b/.github/workflows/deepscan-zero.yml @@ -0,0 +1,32 @@ +name: DeepScan Zero + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + deepscan-zero: + name: DeepScan Zero + runs-on: ubuntu-latest + env: + DEEPSCAN_API_TOKEN: ${{ secrets.DEEPSCAN_API_TOKEN }} + DEEPSCAN_OPEN_ISSUES_URL: ${{ vars.DEEPSCAN_OPEN_ISSUES_URL }} + steps: + - uses: actions/checkout@v6 + - name: Assert DeepScan zero-open gate + run: | + python3 scripts/quality/check_deepscan_zero.py \ + --out-json "deepscan-zero/deepscan.json" \ + --out-md "deepscan-zero/deepscan.md" + - name: Upload DeepScan artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: deepscan-zero + path: deepscan-zero diff --git a/.github/workflows/quality-zero-gate.yml b/.github/workflows/quality-zero-gate.yml new file mode 100644 index 00000000..1a0d0b87 --- /dev/null +++ b/.github/workflows/quality-zero-gate.yml @@ -0,0 +1,86 @@ +name: Quality Zero Gate + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + secrets-preflight: + name: Quality Secrets Preflight + runs-on: ubuntu-latest + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + CODACY_API_TOKEN: ${{ secrets.CODACY_API_TOKEN }} + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + SENTRY_ORG: ${{ vars.SENTRY_ORG }} + SENTRY_PROJECT: ${{ vars.SENTRY_PROJECT }} + DEEPSCAN_POLICY_MODE: ${{ vars.DEEPSCAN_POLICY_MODE }} + DEEPSCAN_OPEN_ISSUES_URL: ${{ vars.DEEPSCAN_OPEN_ISSUES_URL }} + DEEPSCAN_API_TOKEN: ${{ secrets.DEEPSCAN_API_TOKEN }} + steps: + - uses: actions/checkout@v6 + - name: Run quality secrets preflight + run: | + python3 scripts/quality/check_quality_secrets.py \ + --required-secret DEEPSCAN_API_TOKEN \ + --required-var SENTRY_PROJECT \ + --required-var DEEPSCAN_POLICY_MODE \ + --required-var DEEPSCAN_OPEN_ISSUES_URL \ + --out-json quality-secrets/secrets.json \ + --out-md quality-secrets/secrets.md + - name: Upload secrets preflight artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: quality-secrets + path: quality-secrets + + quality-zero-gate: + name: Quality Zero Gate + if: always() + runs-on: ubuntu-latest + needs: + - secrets-preflight + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v6 + - name: Assert secrets preflight succeeded + run: | + if [ "${{ needs.secrets-preflight.result }}" != "success" ]; then + echo "Quality Secrets Preflight failed or was not successful." >&2 + exit 1 + fi + - name: Assert required quality contexts are green + run: | + python3 scripts/quality/check_required_checks.py \ + --repo "${GITHUB_REPOSITORY}" \ + --sha "${GITHUB_SHA}" \ + --required-context "Coverage 100 Gate" \ + --required-context "Codecov Analytics" \ + --required-context "Sonar Zero" \ + --required-context "Codacy Zero" \ + --required-context "Snyk Zero" \ + --required-context "Sentry Zero" \ + --required-context "DeepScan Zero" \ + --required-context "SonarCloud Code Analysis" \ + --required-context "Codacy Static Code Analysis" \ + --required-context "DeepScan" \ + --timeout-seconds 1500 \ + --poll-seconds 20 \ + --out-json quality-zero-gate/required-checks.json \ + --out-md quality-zero-gate/required-checks.md + - name: Upload aggregate artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: quality-zero-gate + path: quality-zero-gate diff --git a/.github/workflows/sentry-zero.yml b/.github/workflows/sentry-zero.yml new file mode 100644 index 00000000..d9aeea83 --- /dev/null +++ b/.github/workflows/sentry-zero.yml @@ -0,0 +1,34 @@ +name: Sentry Zero + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + sentry-zero: + name: Sentry Zero + runs-on: ubuntu-latest + env: + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + SENTRY_ORG: ${{ vars.SENTRY_ORG }} + SENTRY_PROJECT: ${{ vars.SENTRY_PROJECT }} + steps: + - uses: actions/checkout@v6 + - name: Assert Sentry unresolved issues are zero + run: | + python3 scripts/quality/check_sentry_zero.py \ + --project "${SENTRY_PROJECT}" \ + --out-json "sentry-zero/sentry.json" \ + --out-md "sentry-zero/sentry.md" + - name: Upload Sentry artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: sentry-zero + path: sentry-zero diff --git a/.github/workflows/snyk-zero.yml b/.github/workflows/snyk-zero.yml new file mode 100644 index 00000000..5ccda58f --- /dev/null +++ b/.github/workflows/snyk-zero.yml @@ -0,0 +1,36 @@ +name: Snyk Zero + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + snyk-zero: + name: Snyk Zero + runs-on: ubuntu-latest + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + steps: + - uses: actions/checkout@v6 + - name: Set up Node + uses: actions/setup-node@v6 + with: + node-version: '20' + - name: Install Snyk CLI + run: npm install -g snyk + - name: Validate token + run: | + if [ -z "${SNYK_TOKEN}" ]; then + echo "Missing SNYK_TOKEN" >&2 + exit 1 + fi + - name: Snyk OSS test + run: snyk test --all-projects --severity-threshold=low + - name: Snyk code test + run: snyk code test --severity-threshold=low diff --git a/.github/workflows/sonar-zero.yml b/.github/workflows/sonar-zero.yml new file mode 100644 index 00000000..0e7bb386 --- /dev/null +++ b/.github/workflows/sonar-zero.yml @@ -0,0 +1,36 @@ +name: Sonar Zero + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + workflow_dispatch: + +permissions: + contents: read + +jobs: + sonar-zero: + name: Sonar Zero + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - name: Run Sonar scan + uses: SonarSource/sonarqube-scan-action@v6 + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + - name: Assert Sonar zero-open gate + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + run: | + python3 scripts/quality/check_sonar_zero.py \ + --project-key "Prekzursil_WebCoder" \ + --out-json "sonar-zero/sonar.json" \ + --out-md "sonar-zero/sonar.md" + - name: Upload Sonar artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: sonar-zero + path: sonar-zero diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..181a1ca5 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,33 @@ +coverage: + status: + project: + default: + target: 100% + threshold: 0% + patch: + default: + target: 100% + threshold: 0% +comment: + layout: "reach,diff,flags,files" + behavior: default +flag_management: + default_rules: + carryforward: false + individual_flags: + - name: backend + statuses: + - type: project + target: 100% + threshold: 0% + - type: patch + target: 100% + threshold: 0% + - name: frontend + statuses: + - type: project + target: 100% + threshold: 0% + - type: patch + target: 100% + threshold: 0% diff --git a/docs/quality/QUALITY_ZERO_GATES.md b/docs/quality/QUALITY_ZERO_GATES.md new file mode 100644 index 00000000..8dcbc453 --- /dev/null +++ b/docs/quality/QUALITY_ZERO_GATES.md @@ -0,0 +1,10 @@ +# Quality Zero Gates + +This repository is configured for strict quality enforcement: + +- Coverage target: 100% (project + patch) +- Mandatory zero-open findings: Sonar, Codacy, Snyk, Sentry, DeepScan +- Fail-closed secrets preflight +- Aggregated required-context assertion via `Quality Zero Gate` + +If required tokens/variables are missing, workflows fail by design. diff --git a/scripts/quality/assert_coverage_100.py b/scripts/quality/assert_coverage_100.py new file mode 100644 index 00000000..965faf02 --- /dev/null +++ b/scripts/quality/assert_coverage_100.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import re +import sys +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path + + +@dataclass +class CoverageStats: + name: str + path: str + covered: int + total: int + + @property + def percent(self) -> float: + if self.total <= 0: + return 100.0 + return (self.covered / self.total) * 100.0 + + +_PAIR_RE = re.compile(r"^(?P[^=]+)=(?P.+)$") +_XML_LINES_VALID_RE = re.compile(r'lines-valid="([0-9]+(?:\\.[0-9]+)?)"') +_XML_LINES_COVERED_RE = re.compile(r'lines-covered="([0-9]+(?:\\.[0-9]+)?)"') +_XML_LINE_HITS_RE = re.compile(r"]*\\bhits=\"([0-9]+(?:\\.[0-9]+)?)\"") + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Assert 100% coverage for all declared components.") + parser.add_argument("--xml", action="append", default=[], help="Coverage XML input: name=path") + parser.add_argument("--lcov", action="append", default=[], help="LCOV input: name=path") + parser.add_argument("--out-json", default="coverage-100/coverage.json", help="Output JSON path") + parser.add_argument("--out-md", default="coverage-100/coverage.md", help="Output markdown path") + return parser.parse_args() + + +def parse_named_path(value: str) -> tuple[str, Path]: + match = _PAIR_RE.match(value.strip()) + if not match: + raise ValueError(f"Invalid input '{value}'. Expected format: name=path") + return match.group("name").strip(), Path(match.group("path").strip()) + + +def parse_coverage_xml(name: str, path: Path) -> CoverageStats: + text = path.read_text(encoding="utf-8") + lines_valid_match = _XML_LINES_VALID_RE.search(text) + lines_covered_match = _XML_LINES_COVERED_RE.search(text) + + if lines_valid_match and lines_covered_match: + total = int(float(lines_valid_match.group(1))) + covered = int(float(lines_covered_match.group(1))) + return CoverageStats(name=name, path=str(path), covered=covered, total=total) + + total = 0 + covered = 0 + for hits_raw in _XML_LINE_HITS_RE.findall(text): + total += 1 + try: + if int(float(hits_raw)) > 0: + covered += 1 + except ValueError: + continue + + return CoverageStats(name=name, path=str(path), covered=covered, total=total) + + +def parse_lcov(name: str, path: Path) -> CoverageStats: + total = 0 + covered = 0 + + for raw in path.read_text(encoding="utf-8").splitlines(): + line = raw.strip() + if line.startswith("LF:"): + total += int(line.split(":", 1)[1]) + elif line.startswith("LH:"): + covered += int(line.split(":", 1)[1]) + + return CoverageStats(name=name, path=str(path), covered=covered, total=total) + + +def evaluate(stats: list[CoverageStats]) -> tuple[str, list[str]]: + findings: list[str] = [] + for item in stats: + if item.percent < 100.0: + findings.append(f"{item.name} coverage below 100%: {item.percent:.2f}% ({item.covered}/{item.total})") + + combined_total = sum(item.total for item in stats) + combined_covered = sum(item.covered for item in stats) + combined = 100.0 if combined_total <= 0 else (combined_covered / combined_total) * 100.0 + + if combined < 100.0: + findings.append(f"combined coverage below 100%: {combined:.2f}% ({combined_covered}/{combined_total})") + + status = "pass" if not findings else "fail" + return status, findings + + +def _render_md(payload: dict) -> str: + lines = [ + "# Coverage 100 Gate", + "", + f"- Status: `{payload['status']}`", + f"- Timestamp (UTC): `{payload['timestamp_utc']}`", + "", + "## Components", + ] + + for item in payload.get("components", []): + lines.append( + f"- `{item['name']}`: `{item['percent']:.2f}%` ({item['covered']}/{item['total']}) from `{item['path']}`" + ) + + if not payload.get("components"): + lines.append("- None") + + lines.extend(["", "## Findings"]) + findings = payload.get("findings") or [] + if findings: + lines.extend(f"- {finding}" for finding in findings) + else: + lines.append("- None") + + return "\n".join(lines) + "\n" + + +def _safe_output_path(raw: str, fallback: str, base: Path | None = None) -> Path: + root = (base or Path.cwd()).resolve() + candidate = Path((raw or "").strip() or fallback).expanduser() + if not candidate.is_absolute(): + candidate = root / candidate + resolved = candidate.resolve(strict=False) + try: + resolved.relative_to(root) + except ValueError as exc: + raise ValueError(f"Output path escapes workspace root: {candidate}") from exc + return resolved + + +def main() -> int: + args = _parse_args() + + stats: list[CoverageStats] = [] + for item in args.xml: + name, path = parse_named_path(item) + stats.append(parse_coverage_xml(name, path)) + for item in args.lcov: + name, path = parse_named_path(item) + stats.append(parse_lcov(name, path)) + + if not stats: + raise SystemExit("No coverage files were provided; pass --xml and/or --lcov inputs.") + + status, findings = evaluate(stats) + payload = { + "status": status, + "timestamp_utc": datetime.now(timezone.utc).isoformat(), + "components": [ + { + "name": item.name, + "path": item.path, + "covered": item.covered, + "total": item.total, + "percent": item.percent, + } + for item in stats + ], + "findings": findings, + } + + try: + out_json = _safe_output_path(args.out_json, "coverage-100/coverage.json") + out_md = _safe_output_path(args.out_md, "coverage-100/coverage.md") + except ValueError as exc: + print(str(exc), file=sys.stderr) + return 1 + + out_json.parent.mkdir(parents=True, exist_ok=True) + out_md.parent.mkdir(parents=True, exist_ok=True) + out_json.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + out_md.write_text(_render_md(payload), encoding="utf-8") + print(out_md.read_text(encoding="utf-8"), end="") + + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/quality/check_codacy_zero.py b/scripts/quality/check_codacy_zero.py new file mode 100644 index 00000000..500faec3 --- /dev/null +++ b/scripts/quality/check_codacy_zero.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import sys +import urllib.error +import urllib.parse +import urllib.request +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +_SCRIPT_DIR = Path(__file__).resolve().parent +_HELPER_ROOT = _SCRIPT_DIR if (_SCRIPT_DIR / "security_helpers.py").exists() else _SCRIPT_DIR.parent +if str(_HELPER_ROOT) not in sys.path: + sys.path.insert(0, str(_HELPER_ROOT)) + +from security_helpers import normalize_https_url + + +TOTAL_KEYS = {"total", "totalItems", "total_items", "count", "hits", "open_issues"} +CODACY_API_BASE = "https://api.codacy.com" + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Assert Codacy has zero total open issues.") + parser.add_argument("--provider", default="gh", help="Organization provider, for example gh") + parser.add_argument("--owner", required=True, help="Repository owner") + parser.add_argument("--repo", required=True, help="Repository name") + parser.add_argument("--token", default="", help="Codacy API token (falls back to CODACY_API_TOKEN env)") + parser.add_argument("--out-json", default="codacy-zero/codacy.json", help="Output JSON path") + parser.add_argument("--out-md", default="codacy-zero/codacy.md", help="Output markdown path") + return parser.parse_args() + + +def _request_json(url: str, token: str, *, method: str = "GET", data: dict[str, Any] | None = None) -> dict[str, Any]: + safe_url = normalize_https_url(url, allowed_host_suffixes={"codacy.com"}).rstrip("/") + body = None + headers = { + "Accept": "application/json", + "api-token": token, + "User-Agent": "reframe-codacy-zero-gate", + } + if data is not None: + body = json.dumps(data).encode("utf-8") + headers["Content-Type"] = "application/json" + req = urllib.request.Request( + safe_url, + headers=headers, + method=method, + data=body, + ) + with urllib.request.urlopen(req, timeout=30) as resp: + return json.loads(resp.read().decode("utf-8")) + + +def extract_total_open(payload: Any) -> int | None: + if isinstance(payload, dict): + for key, value in payload.items(): + if key in TOTAL_KEYS and isinstance(value, (int, float)): + return int(value) + + # common pagination structures + for key in ("pagination", "page", "meta"): + nested = payload.get(key) + total = extract_total_open(nested) + if total is not None: + return total + + for value in payload.values(): + total = extract_total_open(value) + if total is not None: + return total + + if isinstance(payload, list): + for item in payload: + total = extract_total_open(item) + if total is not None: + return total + + return None + + +def _render_md(payload: dict) -> str: + lines = [ + "# Codacy Zero Gate", + "", + f"- Status: `{payload['status']}`", + f"- Owner/repo: `{payload['owner']}/{payload['repo']}`", + f"- Open issues: `{payload.get('open_issues')}`", + f"- Timestamp (UTC): `{payload['timestamp_utc']}`", + "", + "## Findings", + ] + findings = payload.get("findings") or [] + if findings: + lines.extend(f"- {item}" for item in findings) + else: + lines.append("- None") + return "\n".join(lines) + "\n" + + +def _safe_output_path(raw: str, fallback: str, base: Path | None = None) -> Path: + root = (base or Path.cwd()).resolve() + candidate = Path((raw or "").strip() or fallback).expanduser() + if not candidate.is_absolute(): + candidate = root / candidate + resolved = candidate.resolve(strict=False) + try: + resolved.relative_to(root) + except ValueError as exc: + raise ValueError(f"Output path escapes workspace root: {candidate}") from exc + return resolved + + +def main() -> int: + import os + + args = _parse_args() + token = (args.token or os.environ.get("CODACY_API_TOKEN", "")).strip() + api_base = normalize_https_url(CODACY_API_BASE, allowed_hosts={"api.codacy.com"}).rstrip("/") + owner = urllib.parse.quote(args.owner.strip(), safe="") + repo = urllib.parse.quote(args.repo.strip(), safe="") + + findings: list[str] = [] + open_issues: int | None = None + + if not token: + findings.append("CODACY_API_TOKEN is missing.") + status = "fail" + else: + query = urllib.parse.urlencode({"limit": "1"}) + provider_candidates = [args.provider, "gh", "github"] + provider_candidates = list(dict.fromkeys(p for p in provider_candidates if p)) + + last_exc: Exception | None = None + for provider in provider_candidates: + url = ( + f"{api_base}/api/v3/analysis/organizations/{provider}/" + f"{owner}/repositories/{repo}/issues/search?{query}" + ) + try: + payload = _request_json(url, token, method="POST", data={}) + open_issues = extract_total_open(payload) + if open_issues is None: + findings.append("Codacy response did not include a parseable total issue count.") + elif open_issues != 0: + findings.append(f"Codacy reports {open_issues} open issues (expected 0).") + status = "pass" if not findings else "fail" + break + except urllib.error.HTTPError as exc: + last_exc = exc + if exc.code == 404: + continue + findings.append(f"Codacy API request failed: HTTP {exc.code}") + status = "fail" + break + except Exception as exc: # pragma: no cover - network/runtime surface + last_exc = exc + findings.append(f"Codacy API request failed: {exc}") + status = "fail" + break + else: + findings.append( + f"Codacy API endpoint was not found for provider(s): {', '.join(provider_candidates)}." + ) + if last_exc is not None: + findings.append(f"Last Codacy API error: {last_exc}") + status = "fail" + + payload = { + "status": status, + "owner": args.owner, + "repo": args.repo, + "provider": args.provider, + "open_issues": open_issues, + "timestamp_utc": datetime.now(timezone.utc).isoformat(), + "findings": findings, + } + + try: + out_json = _safe_output_path(args.out_json, "codacy-zero/codacy.json") + out_md = _safe_output_path(args.out_md, "codacy-zero/codacy.md") + except ValueError as exc: + print(str(exc), file=sys.stderr) + return 1 + + out_json.parent.mkdir(parents=True, exist_ok=True) + out_md.parent.mkdir(parents=True, exist_ok=True) + out_json.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + out_md.write_text(_render_md(payload), encoding="utf-8") + print(out_md.read_text(encoding="utf-8"), end="") + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/quality/check_deepscan_zero.py b/scripts/quality/check_deepscan_zero.py new file mode 100644 index 00000000..9f17a384 --- /dev/null +++ b/scripts/quality/check_deepscan_zero.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import sys +import urllib.request +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +_SCRIPT_DIR = Path(__file__).resolve().parent +_HELPER_ROOT = _SCRIPT_DIR if (_SCRIPT_DIR / "security_helpers.py").exists() else _SCRIPT_DIR.parent +if str(_HELPER_ROOT) not in sys.path: + sys.path.insert(0, str(_HELPER_ROOT)) + +from security_helpers import normalize_https_url + +TOTAL_KEYS = {"total", "totalItems", "total_items", "count", "hits", "open_issues"} + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Assert DeepScan has zero total open issues.") + parser.add_argument("--token", default="", help="DeepScan API token (falls back to DEEPSCAN_API_TOKEN env)") + parser.add_argument("--out-json", default="deepscan-zero/deepscan.json", help="Output JSON path") + parser.add_argument("--out-md", default="deepscan-zero/deepscan.md", help="Output markdown path") + return parser.parse_args() + + +def extract_total_open(payload: Any) -> int | None: + if isinstance(payload, dict): + for key, value in payload.items(): + if key in TOTAL_KEYS and isinstance(value, (int, float)): + return int(value) + for nested in payload.values(): + total = extract_total_open(nested) + if total is not None: + return total + elif isinstance(payload, list): + for nested in payload: + total = extract_total_open(nested) + if total is not None: + return total + return None + + +def _request_json(url: str, token: str) -> dict[str, Any]: + safe_url = normalize_https_url(url, allowed_host_suffixes={"deepscan.io"}) + req = urllib.request.Request( + safe_url, + headers={ + "Accept": "application/json", + "Authorization": f"Bearer {token}", + "User-Agent": "reframe-deepscan-zero-gate", + }, + method="GET", + ) + with urllib.request.urlopen(req, timeout=30) as resp: + return json.loads(resp.read().decode("utf-8")) + + +def _render_md(payload: dict) -> str: + lines = [ + "# DeepScan Zero Gate", + "", + f"- Status: `{payload['status']}`", + f"- Open issues: `{payload.get('open_issues')}`", + f"- Source URL: `{payload.get('open_issues_url') or 'n/a'}`", + f"- Timestamp (UTC): `{payload['timestamp_utc']}`", + "", + "## Findings", + ] + findings = payload.get("findings") or [] + if findings: + lines.extend(f"- {item}" for item in findings) + else: + lines.append("- None") + return "\n".join(lines) + "\n" + + +def _safe_output_path(raw: str, fallback: str, base: Path | None = None) -> Path: + root = (base or Path.cwd()).resolve() + candidate = Path((raw or "").strip() or fallback).expanduser() + if not candidate.is_absolute(): + candidate = root / candidate + resolved = candidate.resolve(strict=False) + try: + resolved.relative_to(root) + except ValueError as exc: + raise ValueError(f"Output path escapes workspace root: {candidate}") from exc + return resolved + + +def main() -> int: + import os + + args = _parse_args() + token = (args.token or os.environ.get("DEEPSCAN_API_TOKEN", "")).strip() + open_issues_url = os.environ.get("DEEPSCAN_OPEN_ISSUES_URL", "").strip() + + findings: list[str] = [] + open_issues: int | None = None + + if not token: + findings.append("DEEPSCAN_API_TOKEN is missing.") + if not open_issues_url: + findings.append("DEEPSCAN_OPEN_ISSUES_URL is missing.") + else: + try: + open_issues_url = normalize_https_url( + open_issues_url, + allowed_host_suffixes={"deepscan.io"}, + ) + except ValueError as exc: + findings.append(str(exc)) + + status = "fail" + if not findings: + try: + payload = _request_json(open_issues_url, token) + open_issues = extract_total_open(payload) + if open_issues is None: + findings.append("DeepScan response did not include a parseable total issue count.") + elif open_issues != 0: + findings.append(f"DeepScan reports {open_issues} open issues (expected 0).") + status = "pass" if not findings else "fail" + except Exception as exc: # pragma: no cover - network/runtime surface + findings.append(f"DeepScan API request failed: {exc}") + status = "fail" + + payload = { + "status": status, + "open_issues": open_issues, + "open_issues_url": open_issues_url, + "timestamp_utc": datetime.now(timezone.utc).isoformat(), + "findings": findings, + } + + try: + out_json = _safe_output_path(args.out_json, "deepscan-zero/deepscan.json") + out_md = _safe_output_path(args.out_md, "deepscan-zero/deepscan.md") + except ValueError as exc: + print(str(exc), file=sys.stderr) + return 1 + + out_json.parent.mkdir(parents=True, exist_ok=True) + out_md.parent.mkdir(parents=True, exist_ok=True) + out_json.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + out_md.write_text(_render_md(payload), encoding="utf-8") + print(out_md.read_text(encoding="utf-8"), end="") + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/quality/check_quality_secrets.py b/scripts/quality/check_quality_secrets.py new file mode 100644 index 00000000..8e896f30 --- /dev/null +++ b/scripts/quality/check_quality_secrets.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import os +import sys +from datetime import datetime, timezone +from pathlib import Path + +DEFAULT_REQUIRED_SECRETS = [ + "SONAR_TOKEN", + "CODACY_API_TOKEN", + "CODECOV_TOKEN", + "SNYK_TOKEN", + "SENTRY_AUTH_TOKEN", + "APPLITOOLS_API_KEY", + "PERCY_TOKEN", + "BROWSERSTACK_USERNAME", + "BROWSERSTACK_ACCESS_KEY", +] + +DEFAULT_REQUIRED_VARS = [ + "SENTRY_ORG", + "SENTRY_PROJECT_BACKEND", + "SENTRY_PROJECT_WEB", +] + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Validate required quality-gate secrets/variables are configured.") + parser.add_argument("--required-secret", action="append", default=[], help="Additional required secret env var name") + parser.add_argument("--required-var", action="append", default=[], help="Additional required variable env var name") + parser.add_argument("--out-json", default="quality-secrets/secrets.json", help="Output JSON path") + parser.add_argument("--out-md", default="quality-secrets/secrets.md", help="Output markdown path") + return parser.parse_args() + + +def _dedupe(items: list[str]) -> list[str]: + seen: set[str] = set() + out: list[str] = [] + for item in items: + key = str(item or "").strip() + if not key or key in seen: + continue + seen.add(key) + out.append(key) + return out + + +def evaluate_env(required_secrets: list[str], required_vars: list[str]) -> dict[str, list[str]]: + missing_secrets = [name for name in required_secrets if not str(os.environ.get(name, "")).strip()] + missing_vars = [name for name in required_vars if not str(os.environ.get(name, "")).strip()] + present_secrets = [name for name in required_secrets if name not in missing_secrets] + present_vars = [name for name in required_vars if name not in missing_vars] + return { + "missing_secrets": missing_secrets, + "missing_vars": missing_vars, + "present_secrets": present_secrets, + "present_vars": present_vars, + } + + +def _render_md(payload: dict) -> str: + lines = [ + "# Quality Secrets Preflight", + "", + f"- Status: `{payload['status']}`", + f"- Timestamp (UTC): `{payload['timestamp_utc']}`", + "", + "## Missing secrets", + ] + missing_secrets = payload.get("missing_secrets") or [] + if missing_secrets: + lines.extend(f"- `{name}`" for name in missing_secrets) + else: + lines.append("- None") + + lines.extend(["", "## Missing variables"]) + missing_vars = payload.get("missing_vars") or [] + if missing_vars: + lines.extend(f"- `{name}`" for name in missing_vars) + else: + lines.append("- None") + + return "\n".join(lines) + "\n" + + +def _safe_output_path(raw: str, fallback: str, base: Path | None = None) -> Path: + root = (base or Path.cwd()).resolve() + candidate = Path((raw or "").strip() or fallback).expanduser() + if not candidate.is_absolute(): + candidate = root / candidate + resolved = candidate.resolve(strict=False) + try: + resolved.relative_to(root) + except ValueError as exc: + raise ValueError(f"Output path escapes workspace root: {candidate}") from exc + return resolved + + +def main() -> int: + args = _parse_args() + required_secrets = _dedupe(DEFAULT_REQUIRED_SECRETS + list(args.required_secret or [])) + required_vars = _dedupe(DEFAULT_REQUIRED_VARS + list(args.required_var or [])) + + result = evaluate_env(required_secrets, required_vars) + status = "pass" if not result["missing_secrets"] and not result["missing_vars"] else "fail" + payload = { + "status": status, + "timestamp_utc": datetime.now(timezone.utc).isoformat(), + "required_secrets": required_secrets, + "required_vars": required_vars, + **result, + } + + try: + out_json = _safe_output_path(args.out_json, "quality-secrets/secrets.json") + out_md = _safe_output_path(args.out_md, "quality-secrets/secrets.md") + except ValueError as exc: + print(str(exc), file=sys.stderr) + return 1 + + out_json.parent.mkdir(parents=True, exist_ok=True) + out_md.parent.mkdir(parents=True, exist_ok=True) + + out_json.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + out_md.write_text(_render_md(payload), encoding="utf-8") + print(out_md.read_text(encoding="utf-8"), end="") + + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/quality/check_required_checks.py b/scripts/quality/check_required_checks.py new file mode 100644 index 00000000..4fd83e95 --- /dev/null +++ b/scripts/quality/check_required_checks.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import os +import sys +import time +import urllib.parse +import urllib.request +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Wait for required GitHub check contexts and assert they are successful.") + parser.add_argument("--repo", required=True, help="owner/repo") + parser.add_argument("--sha", required=True, help="commit SHA") + parser.add_argument("--required-context", action="append", default=[], help="Required context name") + parser.add_argument("--timeout-seconds", type=int, default=900) + parser.add_argument("--poll-seconds", type=int, default=20) + parser.add_argument("--out-json", default="quality-zero-gate/required-checks.json") + parser.add_argument("--out-md", default="quality-zero-gate/required-checks.md") + return parser.parse_args() + + +def _api_get(repo: str, path: str, token: str) -> dict[str, Any]: + url = f"https://api.github.com/repos/{repo}/{path.lstrip('/')}" + req = urllib.request.Request( + url, + headers={ + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {token}", + "X-GitHub-Api-Version": "2022-11-28", + "User-Agent": "reframe-quality-zero-gate", + }, + method="GET", + ) + with urllib.request.urlopen(req, timeout=30) as resp: + return json.loads(resp.read().decode("utf-8")) + + +def _collect_contexts(check_runs_payload: dict[str, Any], status_payload: dict[str, Any]) -> dict[str, dict[str, str]]: + contexts: dict[str, dict[str, str]] = {} + + for run in check_runs_payload.get("check_runs", []) or []: + name = str(run.get("name") or "").strip() + if not name: + continue + contexts[name] = { + "state": str(run.get("status") or ""), + "conclusion": str(run.get("conclusion") or ""), + "source": "check_run", + } + + for status in status_payload.get("statuses", []) or []: + name = str(status.get("context") or "").strip() + if not name: + continue + contexts[name] = { + "state": str(status.get("state") or ""), + "conclusion": str(status.get("state") or ""), + "source": "status", + } + + return contexts + + +def _evaluate(required: list[str], contexts: dict[str, dict[str, str]]) -> tuple[str, list[str], list[str]]: + missing: list[str] = [] + failed: list[str] = [] + + for context in required: + observed = contexts.get(context) + if not observed: + missing.append(context) + continue + + source = observed.get("source") + if source == "check_run": + state = observed.get("state") + conclusion = observed.get("conclusion") + if state != "completed": + failed.append(f"{context}: status={state}") + elif conclusion != "success": + failed.append(f"{context}: conclusion={conclusion}") + else: + conclusion = observed.get("conclusion") + if conclusion != "success": + failed.append(f"{context}: state={conclusion}") + + status = "pass" if not missing and not failed else "fail" + return status, missing, failed + + +def _render_md(payload: dict) -> str: + lines = [ + "# Quality Zero Gate - Required Contexts", + "", + f"- Status: `{payload['status']}`", + f"- Repo/SHA: `{payload['repo']}@{payload['sha']}`", + f"- Timestamp (UTC): `{payload['timestamp_utc']}`", + "", + "## Missing contexts", + ] + + missing = payload.get("missing") or [] + if missing: + lines.extend(f"- `{name}`" for name in missing) + else: + lines.append("- None") + + lines.extend(["", "## Failed contexts"]) + failed = payload.get("failed") or [] + if failed: + lines.extend(f"- {entry}" for entry in failed) + else: + lines.append("- None") + + return "\n".join(lines) + "\n" + + +def _safe_output_path(raw: str, fallback: str, base: Path | None = None) -> Path: + root = (base or Path.cwd()).resolve() + candidate = Path((raw or "").strip() or fallback).expanduser() + if not candidate.is_absolute(): + candidate = root / candidate + resolved = candidate.resolve(strict=False) + try: + resolved.relative_to(root) + except ValueError as exc: + raise ValueError(f"Output path escapes workspace root: {candidate}") from exc + return resolved + + +def main() -> int: + args = _parse_args() + token = (os.environ.get("GITHUB_TOKEN", "") or os.environ.get("GH_TOKEN", "")).strip() + required = [item.strip() for item in args.required_context if item.strip()] + + if not required: + raise SystemExit("At least one --required-context is required") + if not token: + raise SystemExit("GITHUB_TOKEN or GH_TOKEN is required") + + deadline = time.time() + max(args.timeout_seconds, 1) + + final_payload: dict[str, Any] | None = None + while time.time() <= deadline: + check_runs = _api_get(args.repo, f"commits/{args.sha}/check-runs?per_page=100", token) + statuses = _api_get(args.repo, f"commits/{args.sha}/status", token) + contexts = _collect_contexts(check_runs, statuses) + status, missing, failed = _evaluate(required, contexts) + + final_payload = { + "status": status, + "repo": args.repo, + "sha": args.sha, + "required": required, + "missing": missing, + "failed": failed, + "contexts": contexts, + "timestamp_utc": datetime.now(timezone.utc).isoformat(), + } + + if status == "pass": + break + + # wait only while there are missing contexts or in-progress check-runs + in_progress = any(v.get("state") != "completed" for v in contexts.values() if v.get("source") == "check_run") + if not missing and not in_progress: + break + time.sleep(max(args.poll_seconds, 1)) + + if final_payload is None: + raise SystemExit("No payload collected") + + try: + out_json = _safe_output_path(args.out_json, "quality-zero-gate/required-checks.json") + out_md = _safe_output_path(args.out_md, "quality-zero-gate/required-checks.md") + except ValueError as exc: + print(str(exc), file=sys.stderr) + return 1 + + out_json.parent.mkdir(parents=True, exist_ok=True) + out_md.parent.mkdir(parents=True, exist_ok=True) + out_json.write_text(json.dumps(final_payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + out_md.write_text(_render_md(final_payload), encoding="utf-8") + print(out_md.read_text(encoding="utf-8"), end="") + + return 0 if final_payload["status"] == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/quality/check_sentry_zero.py b/scripts/quality/check_sentry_zero.py new file mode 100644 index 00000000..0614eae0 --- /dev/null +++ b/scripts/quality/check_sentry_zero.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import sys +import urllib.parse +import urllib.request +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +_SCRIPT_DIR = Path(__file__).resolve().parent +_HELPER_ROOT = _SCRIPT_DIR if (_SCRIPT_DIR / "security_helpers.py").exists() else _SCRIPT_DIR.parent +if str(_HELPER_ROOT) not in sys.path: + sys.path.insert(0, str(_HELPER_ROOT)) + +from security_helpers import normalize_https_url + +SENTRY_API_BASE = "https://sentry.io/api/0" + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Assert Sentry has zero unresolved issues for configured projects.") + parser.add_argument("--org", default="", help="Sentry org slug (falls back to SENTRY_ORG env)") + parser.add_argument( + "--project", + action="append", + default=[], + help="Project slug (repeatable, falls back to SENTRY_PROJECT_BACKEND/SENTRY_PROJECT_WEB env)", + ) + parser.add_argument("--token", default="", help="Sentry auth token (falls back to SENTRY_AUTH_TOKEN env)") + parser.add_argument("--out-json", default="sentry-zero/sentry.json", help="Output JSON path") + parser.add_argument("--out-md", default="sentry-zero/sentry.md", help="Output markdown path") + return parser.parse_args() + + +def _request(url: str, token: str) -> tuple[list[Any], dict[str, str]]: + safe_url = normalize_https_url(url, allowed_host_suffixes={"sentry.io"}) + req = urllib.request.Request( + safe_url, + headers={ + "Accept": "application/json", + "Authorization": f"Bearer {token}", + "User-Agent": "reframe-sentry-zero-gate", + }, + method="GET", + ) + with urllib.request.urlopen(req, timeout=30) as resp: + body = json.loads(resp.read().decode("utf-8")) + headers = {k.lower(): v for k, v in resp.headers.items()} + if not isinstance(body, list): + raise RuntimeError("Unexpected Sentry response payload") + return body, headers + + +def _hits_from_headers(headers: dict[str, str]) -> int | None: + raw = headers.get("x-hits") + if not raw: + return None + try: + return int(raw) + except ValueError: + return None + + +def _render_md(payload: dict) -> str: + lines = [ + "# Sentry Zero Gate", + "", + f"- Status: `{payload['status']}`", + f"- Org: `{payload.get('org')}`", + f"- Timestamp (UTC): `{payload['timestamp_utc']}`", + "", + "## Project results", + ] + + for item in payload.get("projects", []): + lines.append(f"- `{item['project']}` unresolved=`{item['unresolved']}`") + + if not payload.get("projects"): + lines.append("- None") + + lines.extend(["", "## Findings"]) + findings = payload.get("findings") or [] + if findings: + lines.extend(f"- {item}" for item in findings) + else: + lines.append("- None") + + return "\n".join(lines) + "\n" + + +def _safe_output_path(raw: str, fallback: str, base: Path | None = None) -> Path: + root = (base or Path.cwd()).resolve() + candidate = Path((raw or "").strip() or fallback).expanduser() + if not candidate.is_absolute(): + candidate = root / candidate + resolved = candidate.resolve(strict=False) + try: + resolved.relative_to(root) + except ValueError as exc: + raise ValueError(f"Output path escapes workspace root: {candidate}") from exc + return resolved + + +def main() -> int: + import os + + args = _parse_args() + token = (args.token or os.environ.get("SENTRY_AUTH_TOKEN", "")).strip() + org = (args.org or os.environ.get("SENTRY_ORG", "")).strip() + api_base = normalize_https_url(SENTRY_API_BASE, allowed_hosts={"sentry.io"}).rstrip("/") + + projects = [p for p in args.project if p] + if not projects: + for env_name in ("SENTRY_PROJECT_BACKEND", "SENTRY_PROJECT_WEB"): + value = str(os.environ.get(env_name, "")).strip() + if value: + projects.append(value) + + findings: list[str] = [] + project_results: list[dict[str, Any]] = [] + + if not token: + findings.append("SENTRY_AUTH_TOKEN is missing.") + if not org: + findings.append("SENTRY_ORG is missing.") + if not projects: + findings.append("No Sentry projects configured (SENTRY_PROJECT_BACKEND/SENTRY_PROJECT_WEB).") + + status = "fail" + if not findings: + try: + for project in projects: + query = urllib.parse.urlencode({"query": "is:unresolved", "limit": "1"}) + org_slug = urllib.parse.quote(org, safe="") + project_slug = urllib.parse.quote(project, safe="") + url = f"{api_base}/projects/{org_slug}/{project_slug}/issues/?{query}" + issues, headers = _request(url, token) + unresolved = _hits_from_headers(headers) + if unresolved is None: + unresolved = len(issues) + if unresolved >= 1: + findings.append( + f"Sentry project {project} returned unresolved issues but no X-Hits header for exact totals." + ) + if unresolved != 0: + findings.append(f"Sentry project {project} has {unresolved} unresolved issues (expected 0).") + project_results.append({"project": project, "unresolved": unresolved}) + + status = "pass" if not findings else "fail" + except Exception as exc: # pragma: no cover - network/runtime surface + findings.append(f"Sentry API request failed: {exc}") + status = "fail" + + payload = { + "status": status, + "org": org, + "projects": project_results, + "timestamp_utc": datetime.now(timezone.utc).isoformat(), + "findings": findings, + } + + try: + out_json = _safe_output_path(args.out_json, "sentry-zero/sentry.json") + out_md = _safe_output_path(args.out_md, "sentry-zero/sentry.md") + except ValueError as exc: + print(str(exc), file=sys.stderr) + return 1 + + out_json.parent.mkdir(parents=True, exist_ok=True) + out_md.parent.mkdir(parents=True, exist_ok=True) + out_json.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + out_md.write_text(_render_md(payload), encoding="utf-8") + print(out_md.read_text(encoding="utf-8"), end="") + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/quality/check_sonar_zero.py b/scripts/quality/check_sonar_zero.py new file mode 100644 index 00000000..6b237418 --- /dev/null +++ b/scripts/quality/check_sonar_zero.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import base64 +import json +import sys +import urllib.parse +import urllib.request +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +_SCRIPT_DIR = Path(__file__).resolve().parent +_HELPER_ROOT = _SCRIPT_DIR if (_SCRIPT_DIR / "security_helpers.py").exists() else _SCRIPT_DIR.parent +if str(_HELPER_ROOT) not in sys.path: + sys.path.insert(0, str(_HELPER_ROOT)) + +from security_helpers import normalize_https_url + +SONAR_API_BASE = "https://sonarcloud.io" + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Assert SonarCloud has zero open issues and a passing quality gate.") + parser.add_argument("--project-key", required=True, help="Sonar project key") + parser.add_argument("--token", default="", help="Sonar token (falls back to SONAR_TOKEN env)") + parser.add_argument("--branch", default="", help="Optional branch scope") + parser.add_argument("--pull-request", default="", help="Optional PR scope") + parser.add_argument("--out-json", default="sonar-zero/sonar.json", help="Output JSON path") + parser.add_argument("--out-md", default="sonar-zero/sonar.md", help="Output markdown path") + return parser.parse_args() + + +def _auth_header(token: str) -> str: + raw = f"{token}:".encode("utf-8") + return "Basic " + base64.b64encode(raw).decode("ascii") + + +def _request_json(url: str, auth_header: str) -> dict[str, Any]: + safe_url = normalize_https_url(url, allowed_host_suffixes={"sonarcloud.io"}).rstrip("/") + request = urllib.request.Request( + safe_url, + headers={ + "Accept": "application/json", + "Authorization": auth_header, + "User-Agent": "reframe-sonar-zero-gate", + }, + method="GET", + ) + with urllib.request.urlopen(request, timeout=30) as resp: + return json.loads(resp.read().decode("utf-8")) + + +def _render_md(payload: dict) -> str: + lines = [ + "# Sonar Zero Gate", + "", + f"- Status: `{payload['status']}`", + f"- Project: `{payload['project_key']}`", + f"- Open issues: `{payload.get('open_issues')}`", + f"- Quality gate: `{payload.get('quality_gate')}`", + f"- Timestamp (UTC): `{payload['timestamp_utc']}`", + "", + "## Findings", + ] + findings = payload.get("findings") or [] + if findings: + lines.extend(f"- {item}" for item in findings) + else: + lines.append("- None") + return "\n".join(lines) + "\n" + + +def _safe_output_path(raw: str, fallback: str, base: Path | None = None) -> Path: + root = (base or Path.cwd()).resolve() + candidate = Path((raw or "").strip() or fallback).expanduser() + if not candidate.is_absolute(): + candidate = root / candidate + resolved = candidate.resolve(strict=False) + try: + resolved.relative_to(root) + except ValueError as exc: + raise ValueError(f"Output path escapes workspace root: {candidate}") from exc + return resolved + + +def main() -> int: + import os + + args = _parse_args() + token = (args.token or os.environ.get("SONAR_TOKEN", "")).strip() + api_base = normalize_https_url(SONAR_API_BASE, allowed_hosts={"sonarcloud.io"}).rstrip("/") + + findings: list[str] = [] + open_issues: int | None = None + quality_gate: str | None = None + + if not token: + findings.append("SONAR_TOKEN is missing.") + status = "fail" + else: + auth = _auth_header(token) + try: + issues_query = { + "componentKeys": args.project_key, + "resolved": "false", + "ps": "1", + } + if args.branch: + issues_query["branch"] = args.branch + if args.pull_request: + issues_query["pullRequest"] = args.pull_request + + issues_url = f"{api_base}/api/issues/search?{urllib.parse.urlencode(issues_query)}" + issues_payload = _request_json(issues_url, auth) + paging = issues_payload.get("paging") or {} + open_issues = int(paging.get("total") or 0) + + gate_query = {"projectKey": args.project_key} + if args.branch: + gate_query["branch"] = args.branch + if args.pull_request: + gate_query["pullRequest"] = args.pull_request + gate_url = f"{api_base}/api/qualitygates/project_status?{urllib.parse.urlencode(gate_query)}" + gate_payload = _request_json(gate_url, auth) + project_status = (gate_payload.get("projectStatus") or {}) + quality_gate = str(project_status.get("status") or "UNKNOWN") + + if open_issues != 0: + findings.append(f"Sonar reports {open_issues} open issues (expected 0).") + if quality_gate != "OK": + findings.append(f"Sonar quality gate status is {quality_gate} (expected OK).") + + status = "pass" if not findings else "fail" + except Exception as exc: # pragma: no cover - network/runtime surface + status = "fail" + findings.append(f"Sonar API request failed: {exc}") + + payload = { + "status": status, + "project_key": args.project_key, + "open_issues": open_issues, + "quality_gate": quality_gate, + "timestamp_utc": datetime.now(timezone.utc).isoformat(), + "findings": findings, + } + + try: + out_json = _safe_output_path(args.out_json, "sonar-zero/sonar.json") + out_md = _safe_output_path(args.out_md, "sonar-zero/sonar.md") + except ValueError as exc: + print(str(exc), file=sys.stderr) + return 1 + + out_json.parent.mkdir(parents=True, exist_ok=True) + out_md.parent.mkdir(parents=True, exist_ok=True) + out_json.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + out_md.write_text(_render_md(payload), encoding="utf-8") + print(out_md.read_text(encoding="utf-8"), end="") + + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/security_helpers.py b/scripts/security_helpers.py new file mode 100644 index 00000000..662d8851 --- /dev/null +++ b/scripts/security_helpers.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import ipaddress +from urllib.parse import urlparse, urlunparse + + +def normalize_https_url( + raw_url: str, + *, + allowed_hosts: set[str] | None = None, + allowed_host_suffixes: set[str] | None = None, + strip_query: bool = False, +) -> str: + """Validate user-provided URLs for CLI scripts. + + Rules: + - https scheme only, + - no embedded credentials, + - reject localhost/private/link-local IP targets, + - optional hostname allowlist. + - optional hostname suffix allowlist. + """ + + parsed = urlparse((raw_url or "").strip()) + if parsed.scheme != "https": + raise ValueError(f"Only https URLs are allowed: {raw_url!r}") + if not parsed.hostname: + raise ValueError(f"URL is missing a hostname: {raw_url!r}") + if parsed.username or parsed.password: + raise ValueError(f"URL credentials are not allowed: {raw_url!r}") + + hostname = parsed.hostname.lower().strip(".") + if allowed_hosts is not None and hostname not in {host.lower().strip(".") for host in allowed_hosts}: + raise ValueError(f"URL host is not in allowlist: {hostname}") + if allowed_host_suffixes is not None: + suffixes = {suffix.lower().strip(".") for suffix in allowed_host_suffixes if suffix.strip(".")} + if suffixes and not any(hostname == suffix or hostname.endswith(f".{suffix}") for suffix in suffixes): + raise ValueError(f"URL host is not in suffix allowlist: {hostname}") + + try: + ip_value = ipaddress.ip_address(hostname) + except ValueError: + ip_value = None + + if ip_value is not None and ( + ip_value.is_private + or ip_value.is_loopback + or ip_value.is_link_local + or ip_value.is_reserved + or ip_value.is_multicast + ): + raise ValueError(f"Private or local addresses are not allowed: {hostname}") + + if hostname in {"localhost", "localhost.localdomain"}: + raise ValueError("Localhost URLs are not allowed.") + + sanitized = parsed._replace(fragment="", params="") + if strip_query: + sanitized = sanitized._replace(query="") + return urlunparse(sanitized) From 7906704575a0d915b5777cd6944c11fc3c6a4079 Mon Sep 17 00:00:00 2001 From: Prekzursil Date: Tue, 3 Mar 2026 08:23:16 +0200 Subject: [PATCH 2/5] chore: align codecov yaml with strict org policy Co-authored-by: Codex --- codecov.yml | 54 +++++++++++++++++++++++++++++++++-------------------- 1 file changed, 34 insertions(+), 20 deletions(-) diff --git a/codecov.yml b/codecov.yml index 181a1ca5..1404be97 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,4 +1,10 @@ +codecov: + require_ci_to_pass: true + coverage: + precision: 2 + round: down + range: "100...100" status: project: default: @@ -8,26 +14,34 @@ coverage: default: target: 100% threshold: 0% + comment: layout: "reach,diff,flags,files" - behavior: default -flag_management: + +flags: + backend: + paths: + - backend/ + frontend: + paths: + - frontend/webcoder_ui/ + +component_management: default_rules: - carryforward: false - individual_flags: - - name: backend - statuses: - - type: project - target: 100% - threshold: 0% - - type: patch - target: 100% - threshold: 0% - - name: frontend - statuses: - - type: project - target: 100% - threshold: 0% - - type: patch - target: 100% - threshold: 0% + statuses: + - type: project + target: 100% + threshold: 0% + individual_components: + - component_id: backend + name: backend + paths: + - backend/ + - component_id: frontend + name: frontend + paths: + - frontend/webcoder_ui/ + +bundle_analysis: + warning_threshold: "0%" + status: informational From cb94c6ea76e1430af2ed97f69dacdc517b46cc72 Mon Sep 17 00:00:00 2001 From: Prekzursil Date: Tue, 3 Mar 2026 08:35:27 +0200 Subject: [PATCH 3/5] chore: relax unavailable secret requirements and deepscan API dependency Co-authored-by: Codex --- .github/workflows/codecov-analytics.yml | 6 ------ .github/workflows/deepscan-zero.yml | 13 +++++++++---- .github/workflows/quality-zero-gate.yml | 4 ---- scripts/quality/check_quality_secrets.py | 7 +------ 4 files changed, 10 insertions(+), 20 deletions(-) diff --git a/.github/workflows/codecov-analytics.yml b/.github/workflows/codecov-analytics.yml index fd348322..fb094f77 100644 --- a/.github/workflows/codecov-analytics.yml +++ b/.github/workflows/codecov-analytics.yml @@ -24,11 +24,6 @@ jobs: - uses: actions/setup-node@v6 with: node-version: '20' - - name: Validate Codecov token - run: | - if [ -z "${CODECOV_TOKEN}" ]; then - echo "Missing CODECOV_TOKEN" >&2 - exit 1 fi - name: Backend coverage @@ -44,7 +39,6 @@ jobs: - name: Upload coverage to Codecov uses: codecov/codecov-action@v5 with: - token: ${{ secrets.CODECOV_TOKEN }} files: backend/coverage.xml,frontend/webcoder_ui/coverage/lcov.info flags: backend,frontend fail_ci_if_error: true diff --git a/.github/workflows/deepscan-zero.yml b/.github/workflows/deepscan-zero.yml index 0f2569bb..824afdb7 100644 --- a/.github/workflows/deepscan-zero.yml +++ b/.github/workflows/deepscan-zero.yml @@ -9,19 +9,24 @@ on: permissions: contents: read + checks: read jobs: deepscan-zero: name: DeepScan Zero runs-on: ubuntu-latest env: - DEEPSCAN_API_TOKEN: ${{ secrets.DEEPSCAN_API_TOKEN }} - DEEPSCAN_OPEN_ISSUES_URL: ${{ vars.DEEPSCAN_OPEN_ISSUES_URL }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v6 - - name: Assert DeepScan zero-open gate + - name: Assert DeepScan vendor check is green run: | - python3 scripts/quality/check_deepscan_zero.py \ + python3 scripts/quality/check_required_checks.py \ + --repo "${GITHUB_REPOSITORY}" \ + --sha "${GITHUB_SHA}" \ + --required-context "DeepScan" \ + --timeout-seconds 1200 \ + --poll-seconds 20 \ --out-json "deepscan-zero/deepscan.json" \ --out-md "deepscan-zero/deepscan.md" - name: Upload DeepScan artifacts diff --git a/.github/workflows/quality-zero-gate.yml b/.github/workflows/quality-zero-gate.yml index 1a0d0b87..7ca978a8 100644 --- a/.github/workflows/quality-zero-gate.yml +++ b/.github/workflows/quality-zero-gate.yml @@ -30,10 +30,6 @@ jobs: - name: Run quality secrets preflight run: | python3 scripts/quality/check_quality_secrets.py \ - --required-secret DEEPSCAN_API_TOKEN \ - --required-var SENTRY_PROJECT \ - --required-var DEEPSCAN_POLICY_MODE \ - --required-var DEEPSCAN_OPEN_ISSUES_URL \ --out-json quality-secrets/secrets.json \ --out-md quality-secrets/secrets.md - name: Upload secrets preflight artifact diff --git a/scripts/quality/check_quality_secrets.py b/scripts/quality/check_quality_secrets.py index 8e896f30..f6fa1722 100644 --- a/scripts/quality/check_quality_secrets.py +++ b/scripts/quality/check_quality_secrets.py @@ -11,19 +11,14 @@ DEFAULT_REQUIRED_SECRETS = [ "SONAR_TOKEN", "CODACY_API_TOKEN", - "CODECOV_TOKEN", "SNYK_TOKEN", "SENTRY_AUTH_TOKEN", "APPLITOOLS_API_KEY", - "PERCY_TOKEN", - "BROWSERSTACK_USERNAME", - "BROWSERSTACK_ACCESS_KEY", ] DEFAULT_REQUIRED_VARS = [ "SENTRY_ORG", - "SENTRY_PROJECT_BACKEND", - "SENTRY_PROJECT_WEB", + "SENTRY_PROJECT", ] From 4409906d9e73a235a984d0645941014a570466e2 Mon Sep 17 00:00:00 2001 From: Prekzursil Date: Tue, 3 Mar 2026 08:37:00 +0200 Subject: [PATCH 4/5] fix: repair codecov analytics workflow syntax Co-authored-by: Codex --- .github/workflows/codecov-analytics.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/codecov-analytics.yml b/.github/workflows/codecov-analytics.yml index fb094f77..27eb5522 100644 --- a/.github/workflows/codecov-analytics.yml +++ b/.github/workflows/codecov-analytics.yml @@ -24,7 +24,6 @@ jobs: - uses: actions/setup-node@v6 with: node-version: '20' - fi - name: Backend coverage run: | From 912ddfaf686114d2225c2a6050a2140bc69ac645 Mon Sep 17 00:00:00 2001 From: Prekzursil Date: Tue, 3 Mar 2026 09:07:23 +0200 Subject: [PATCH 5/5] ci: harden snyk zero target detection Co-authored-by: Codex --- .github/workflows/snyk-zero.yml | 156 +++++++++++++++++++++++++++++++- 1 file changed, 155 insertions(+), 1 deletion(-) diff --git a/.github/workflows/snyk-zero.yml b/.github/workflows/snyk-zero.yml index 5ccda58f..35b20edc 100644 --- a/.github/workflows/snyk-zero.yml +++ b/.github/workflows/snyk-zero.yml @@ -16,21 +16,175 @@ jobs: runs-on: ubuntu-latest env: SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + REPO_NAME: ${{ github.event.repository.name }} steps: - uses: actions/checkout@v6 + - name: Set up Node uses: actions/setup-node@v6 with: node-version: '20' + + - name: Set up .NET (SWFOC only) + if: ${{ github.event.repository.name == 'SWFOC-Mod-Menu' }} + uses: actions/setup-dotnet@v4 + with: + dotnet-version: '8.0.x' + - name: Install Snyk CLI run: npm install -g snyk + - name: Validate token run: | if [ -z "${SNYK_TOKEN}" ]; then echo "Missing SNYK_TOKEN" >&2 exit 1 fi + + - name: Detect OSS scan strategy + id: detect + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + : > artifacts/detected-targets.txt + + repo="${REPO_NAME}" + has_target=false + oss_mode="skipped" + skip_reason="unsupported_target_files" + oss_cmd="" + + add_target() { + printf '%s\n' "$1" >> artifacts/detected-targets.txt + } + + if [ "$repo" = "env-inspector" ]; then + if [ -f requirements-build.txt ]; then + has_target=true + oss_mode="executed" + skip_reason="" + oss_cmd="snyk test --file=requirements-build.txt --package-manager=pip --severity-threshold=low" + add_target "requirements-build.txt" + fi + elif [ "$repo" = "SWFOC-Mod-Menu" ]; then + if [ -f SwfocTrainer.sln ]; then + dotnet restore SwfocTrainer.sln + fi + if find . -name project.assets.json -print -quit | grep -q .; then + has_target=true + oss_mode="executed" + skip_reason="" + oss_cmd="snyk test --all-projects --severity-threshold=low" + add_target "project.assets.json" + fi + elif [ "$repo" = "Star-Wars-Galactic-Battlegrounds-Save-Game-Editor" ]; then + has_target=false + oss_mode="skipped" + skip_reason="unsupported_target_files" + else + if find . -maxdepth 5 \ + \( -name package-lock.json -o -name yarn.lock -o -name pnpm-lock.yaml -o -name package.json \ + -o -name requirements.txt -o -name 'requirements-*.txt' -o -name pyproject.toml \ + -o -name Pipfile -o -name poetry.lock -o -name go.mod -o -name pom.xml \ + -o -name build.gradle -o -name build.gradle.kts -o -name gradle.lockfile \ + -o -name '*.csproj' -o -name '*.sln' -o -name Gemfile -o -name Cargo.toml \ + \) -print -quit | grep -q .; then + has_target=true + oss_mode="executed" + skip_reason="" + oss_cmd="snyk test --all-projects --severity-threshold=low" + while IFS= read -r f; do + add_target "$f" + done < <(find . -maxdepth 5 \ + \( -name package-lock.json -o -name yarn.lock -o -name pnpm-lock.yaml -o -name package.json \ + -o -name requirements.txt -o -name 'requirements-*.txt' -o -name pyproject.toml \ + -o -name Pipfile -o -name poetry.lock -o -name go.mod -o -name pom.xml \ + -o -name build.gradle -o -name build.gradle.kts -o -name gradle.lockfile \ + -o -name '*.csproj' -o -name '*.sln' -o -name Gemfile -o -name Cargo.toml \ + \) -print | sort | head -n 100) + fi + fi + + echo "has_target=${has_target}" >> "$GITHUB_OUTPUT" + echo "oss_mode=${oss_mode}" >> "$GITHUB_OUTPUT" + echo "skip_reason=${skip_reason}" >> "$GITHUB_OUTPUT" + { + echo 'oss_cmd<> "$GITHUB_OUTPUT" + - name: Snyk OSS test - run: snyk test --all-projects --severity-threshold=low + id: oss + if: ${{ steps.detect.outputs.has_target == 'true' }} + continue-on-error: true + run: ${{ steps.detect.outputs.oss_cmd }} + - name: Snyk code test + id: code + continue-on-error: true run: snyk code test --severity-threshold=low + + - name: Build snyk-oss-mode.json and enforce failure policy + id: finalize + if: ${{ always() }} + env: + HAS_TARGET: ${{ steps.detect.outputs.has_target }} + OSS_MODE: ${{ steps.detect.outputs.oss_mode }} + SKIP_REASON: ${{ steps.detect.outputs.skip_reason }} + OSS_OUTCOME: ${{ steps.oss.outcome }} + CODE_OUTCOME: ${{ steps.code.outcome }} + run: | + python3 - <<'PY' + import json + import os + from pathlib import Path + + detected_path = Path('artifacts/detected-targets.txt') + detected = [] + if detected_path.exists(): + for line in detected_path.read_text().splitlines(): + line = line.strip() + if line: + detected.append(line) + + has_target = os.getenv('HAS_TARGET', '').lower() == 'true' + oss_mode = os.getenv('OSS_MODE') or ('executed' if has_target else 'skipped') + skip_reason = os.getenv('SKIP_REASON', '') + oss_outcome = os.getenv('OSS_OUTCOME') or ('skipped' if not has_target else 'failure') + code_outcome = os.getenv('CODE_OUTCOME') or 'failure' + + result = 'pass' + if has_target and oss_outcome != 'success': + result = 'fail' + if code_outcome != 'success': + result = 'fail' + + payload = { + 'oss_mode': oss_mode, + 'detected_targets': detected, + 'skip_reason': skip_reason, + 'code_scan_executed': True, + 'oss_outcome': oss_outcome, + 'code_outcome': code_outcome, + 'result': result, + } + + out = Path('artifacts/snyk-oss-mode.json') + out.parent.mkdir(parents=True, exist_ok=True) + out.write_text(json.dumps(payload, indent=2) + '\n') + print(json.dumps(payload, indent=2)) + + if result != 'pass': + raise SystemExit(1) + PY + + - name: Upload Snyk mode artifact + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: snyk-oss-mode + path: | + artifacts/snyk-oss-mode.json + artifacts/detected-targets.txt