diff --git a/.bazelrc b/.bazelrc index 0823a01..3a6ddac 100644 --- a/.bazelrc +++ b/.bazelrc @@ -1,2 +1,7 @@ common --registry=https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/ common --registry=https://bcr.bazel.build + +build --java_language_version=17 +build --tool_java_language_version=17 +build --java_runtime_version=remotejdk_17 +build --tool_java_runtime_version=remotejdk_17 diff --git a/.github/workflows/sbom_dependency_submission.yml b/.github/workflows/sbom_dependency_submission.yml new file mode 100644 index 0000000..0330998 --- /dev/null +++ b/.github/workflows/sbom_dependency_submission.yml @@ -0,0 +1,111 @@ +name: SBOM Dependency Submission +# Submit SBOM-derived dependency snapshot to GitHub Dependency Graph, +# enabling Dependabot vulnerability alerts for SBOM-declared packages. +# +# Requirements (configured by org/repo admin): +# - Dependency Graph must be enabled in repo Settings → Code security +# - Write permission on contents (for dependency-graph/snapshots) +# +# GitHub Dependency Submission API: +# https://docs.github.com/en/rest/dependency-graph/dependency-submission + +on: + workflow_call: + inputs: + sbom_target: + description: 'Bazel SBOM target to build (e.g. //:sbom_all)' + required: false + type: string + default: '//:sbom_all' + release_tag: + description: 'Version tag for the SBOM component_version' + required: false + type: string + default: 'dev' + +jobs: + sbom-dependency-submission: + name: Build SBOM and submit to Dependency Graph + runs-on: ubuntu-24.04 + permissions: + contents: write # Required for dependency-graph/snapshots API + + steps: + - name: Checkout + uses: actions/checkout@v4.2.2 + + - name: Setup Bazel + uses: bazel-contrib/setup-bazel@0.15.0 + with: + disk-cache: true + repository-cache: true + bazelisk-cache: true + + - name: Build SBOM + run: | + bazel build ${{ inputs.sbom_target }} \ + --define=component_version=${{ inputs.release_tag }} + + - name: Collect SPDX outputs + run: | + mkdir -p sbom_output + find bazel-bin -name "*.spdx.json" -exec cp {} sbom_output/ \; + echo "SBOM files collected:" + ls -lh sbom_output/ || echo "(none)" + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Convert SPDX → GitHub Dependency Snapshot + run: | + mkdir -p snapshots + for spdx_file in sbom_output/*.spdx.json; do + [ -f "$spdx_file" ] || continue + base=$(basename "$spdx_file" .spdx.json) + correlator="${{ github.workflow }}_${base}" + echo "Converting $spdx_file (correlator: $correlator)" + python3 sbom/scripts/spdx_to_github_snapshot.py \ + --input "$spdx_file" \ + --output "snapshots/${base}_snapshot.json" \ + --sha "${{ github.sha }}" \ + --ref "${{ github.ref }}" \ + --job-correlator "$correlator" \ + --job-id "${{ github.run_id }}" + done + + - name: Submit snapshots to GitHub Dependency Graph + env: + GH_TOKEN: ${{ github.token }} + run: | + repo="${{ github.repository }}" + submitted=0 + failed=0 + for snapshot_file in snapshots/*_snapshot.json; do + [ -f "$snapshot_file" ] || continue + echo "Submitting $snapshot_file to $repo ..." + http_code=$(gh api \ + "repos/${repo}/dependency-graph/snapshots" \ + --method POST \ + --input "$snapshot_file" \ + --jq '.message // "submitted"' \ + 2>&1) && { + echo " OK: $http_code" + submitted=$((submitted + 1)) + } || { + echo " FAILED: $http_code" + failed=$((failed + 1)) + } + done + echo "---" + echo "Submitted: $submitted, Failed: $failed" + [ "$failed" -eq 0 ] || exit 1 + + - name: Upload snapshot artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: dependency-snapshots-${{ inputs.release_tag }} + path: snapshots/ + retention-days: 30 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7354275..b43c3ec 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -23,3 +23,6 @@ jobs: - name: Run coverage module tests run: | bazel test //coverage/tests:all + - name: Run rules_score tests + run: | + bazel test //bazel/rules/rules_score/... diff --git a/MODULE.bazel b/MODULE.bazel index 2f5d939..e6c032d 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -13,7 +13,7 @@ module( name = "score_tooling", - version = "1.1.0", + version = "0.0.0", compatibility_level = 1, ) @@ -28,9 +28,7 @@ bazel_dep(name = "rules_java", version = "8.15.1") bazel_dep(name = "rules_rust", version = "0.61.0") bazel_dep(name = "rules_multitool", version = "1.9.0") bazel_dep(name = "score_rust_policies", version = "0.0.2") - -bazel_dep(name = "bazel_skylib", version = "1.7.1", dev_dependency = True) - +bazel_dep(name = "bazel_skylib", version = "1.7.1") bazel_dep(name = "buildifier_prebuilt", version = "8.2.0.2") ############################################################################### @@ -95,3 +93,8 @@ multitool.hub( lockfile = "tools/yamlfmt.lock.json", ) use_repo(multitool, "yamlfmt_hub") + +bazel_dep(name = "score_docs_as_code", version = "3.0.1", dev_dependency = True) + +# bazel_dep(name = "score_platform", version = "0.5.0") +bazel_dep(name = "score_process", version = "1.3.2") diff --git a/README.md b/README.md index bded050..571b805 100644 --- a/README.md +++ b/README.md @@ -58,14 +58,15 @@ bazel run //:rust_coverage -- --min-line-coverage 80 ## Upgrading from separate MODULES -If you are still using separate module imports and want to upgrade to the new version. +If you are still using separate module imports and want to upgrade to the new version. Here are two examples to showcase how to do this. ``` load("@score_python_basics//:defs.bzl", "score_py_pytest") => load("@score_tooling//:defs.bzl", "score_py_pytest") load("@score_cr_checker//:cr_checker.bzl", "copyright_checker") => load("@score_tooling//:defs.bzl", "copyright_checker") ``` -All things inside of 'tooling' can now be imported from `@score_tooling//:defs.bzl`. + +All things inside of 'tooling' can now be imported from `@score_tooling//:defs.bzl`. The available import targets are: - score_virtualenv @@ -78,6 +79,7 @@ The available import targets are: - rust_coverage_report ## Format the tooling repository -```bash + +```bash bazel run //:format.fix ``` diff --git a/bazel/rules/rules_score/BUILD b/bazel/rules/rules_score/BUILD new file mode 100644 index 0000000..6b53f2f --- /dev/null +++ b/bazel/rules/rules_score/BUILD @@ -0,0 +1,53 @@ +load( + "//bazel/rules/rules_score:rules_score.bzl", + "sphinx_module", +) + +exports_files([ + "templates/conf.template.py", + "templates/seooc_index.template.rst", + "templates/unit.template.rst", + "templates/component.template.rst", +]) + +# HTML merge tool +py_binary( + name = "sphinx_html_merge", + srcs = ["src/sphinx_html_merge.py"], + main = "src/sphinx_html_merge.py", + visibility = ["//visibility:public"], +) + +# Sphinx build binary with all required dependencies +py_binary( + name = "score_build", + srcs = ["src/sphinx_wrapper.py"], + data = [], + env = { + "SOURCE_DIRECTORY": "", + "DATA": "", + "ACTION": "check", + }, + main = "src/sphinx_wrapper.py", + visibility = ["//visibility:public"], + deps = [ + "@score_docs_as_code//src:plantuml_for_python", + "@score_docs_as_code//src/extensions/score_sphinx_bundle", + ], +) + +sphinx_module( + name = "rules_score_doc", + srcs = glob( + [ + "docs/**/*.rst", + "docs/**/*.puml", + ], + allow_empty = True, + ), + index = "docs/index.rst", + visibility = ["//visibility:public"], + deps = [ + "@score_process//:score_process_module", + ], +) diff --git a/bazel/rules/rules_score/docs/index.rst b/bazel/rules/rules_score/docs/index.rst new file mode 100644 index 0000000..79769a4 --- /dev/null +++ b/bazel/rules/rules_score/docs/index.rst @@ -0,0 +1,444 @@ +SCORE Rules for Bazel +===================== + +This package provides Bazel build rules for defining and building SCORE documentation modules with integrated Sphinx-based HTML generation. + +.. contents:: Table of Contents + :depth: 2 + :local: + + +Overview +-------- + +The ``rules_score`` package provides Bazel rules for structuring and documenting safety-critical software following S-CORE process guidelines: + +**Documentation Rule:** + +- ``sphinx_module``: Generic rule for building Sphinx HTML documentation with dependency support + +**Artifact Rules:** + +- ``feature_requirements``: High-level feature specifications +- ``component_requirements``: Component-level requirements +- ``assumptions_of_use``: Safety-relevant operating conditions +- ``architectural_design``: Software architecture documentation +- ``safety_analysis``: Detailed safety analysis (FMEA, FTA) +- ``dependability_analysis``: Comprehensive safety analysis results + +**Structural Rules:** + +- ``unit``: Smallest testable software element (design + implementation + tests) +- ``component``: Collection of units providing specific functionality +- ``dependable_element``: Complete Safety Element out of Context (SEooC) with full documentation + +All rules support cross-module dependencies for automatic sphinx-needs integration and HTML merging. + + +sphinx_module +------------- + +Builds Sphinx-based HTML documentation from RST source files with support for dependencies and cross-referencing. + +.. code-block:: python + + sphinx_module( + name = "my_docs", + srcs = glob(["docs/**/*.rst"]), + index = "docs/index.rst", + deps = ["@external_module//:docs"], + ) + +**Key Parameters:** + +- ``srcs``: RST/MD source files +- ``index``: Main index.rst file +- ``deps``: Other sphinx_module or dependable_element targets for cross-referencing +- ``sphinx``: Sphinx build binary (default: ``//bazel/rules/rules_score:score_build``) + +**Output:** ``/html/`` with merged dependency documentation + + +Artifact Rules +-------------- + +Artifact rules define S-CORE process work products. All provide ``SphinxSourcesInfo`` for documentation generation. + +**feature_requirements** + +.. code-block:: python + + feature_requirements( + name = "features", + srcs = ["docs/features.rst"], + ) + +**component_requirements** + +.. code-block:: python + + component_requirements( + name = "requirements", + srcs = ["docs/requirements.rst"], + ) + +**assumptions_of_use** + +.. code-block:: python + + assumptions_of_use( + name = "aous", + srcs = ["docs/assumptions.rst"], + ) + +**architectural_design** + +.. code-block:: python + + architectural_design( + name = "architecture", + static = ["docs/static_arch.rst"], + dynamic = ["docs/dynamic_arch.rst"], + ) + +**safety_analysis** + +.. code-block:: python + + safety_analysis( + name = "safety", + controlmeasures = ["docs/controls.rst"], + failuremodes = ["docs/failures.rst"], + fta = ["docs/fta.rst"], + arch_design = ":architecture", + ) + +**dependability_analysis** + +.. code-block:: python + + dependability_analysis( + name = "analysis", + arch_design = ":architecture", + dfa = ["docs/dfa.rst"], + safety_analysis = [":safety"], + ) + + +Structural Rules +---------------- + +**unit** + +Define the smallest testable software element. + +.. code-block:: python + + unit( + name = "my_unit", + unit_design = [":architecture"], + implementation = ["//src:lib"], + tests = ["//tests:unit_test"], + ) + +**component** + +Define a collection of units. + +.. code-block:: python + + component( + name = "my_component", + component_requirements = [":requirements"], + units = [":my_unit"], + implementation = ["//src:binary"], + tests = ["//tests:integration_test"], + ) + +**dependable_element** + +Define a complete SEooC with automatic documentation generation. + +.. code-block:: python + + dependable_element( + name = "my_seooc", + description = "My safety-critical component", + assumptions_of_use = [":aous"], + requirements = [":requirements"], + architectural_design = [":architecture"], + dependability_analysis = [":analysis"], + components = [":my_component"], + tests = ["//tests:system_test"], + deps = ["@platform//:platform_module"], + ) + +**Generated Targets:** + +- ````: Sphinx module with HTML documentation +- ``_needs``: Sphinx-needs JSON for cross-referencing +- ``_index``: Generated index.rst with artifact structure + + srcs = glob(["docs/**/*.rst"]), + index = "docs/index.rst", + deps = ["@external_module//:docs"], + ) + +**Key Parameters:** + +- ``srcs``: RST/MD source files +- ``index``: Main index.rst file +- ``deps``: Other sphinx_module or dependable_element targets for cross-referencing +- ``sphinx``: Sphinx build binary (default: ``//bazel/rules/rules_score:score_build``) + +**Output:** ``/html/`` with merged dependency documentation + + +Artifact Rules +-------------- + +Artifact rules define S-CORE process work products. All provide ``SphinxSourcesInfo`` for documentation generation. + +**feature_requirements** + +.. code-block:: python + + feature_requirements( + name = "features", + srcs = ["docs/features.rst"], + ) + +**component_requirements** + +.. code-block:: python + + component_requirements( + name = "requirements", + srcs = ["docs/requirements.rst"], + feature_requirement = [":features"], + ) + +**assumptions_of_use** + +.. code-block:: python + + assumptions_of_use( + name = "aous", + srcs = ["docs/assumptions.rst"], + ) + +**architectural_design** + +.. code-block:: python + + architectural_design( + name = "architecture", + static = ["docs/static_arch.rst"], + dynamic = ["docs/dynamic_arch.rst"], + ) + +**safety_analysis** + +.. code-block:: python + + safety_analysis( + name = "safety", + controlmeasures = ["docs/controls.rst"], + failuremodes = ["docs/failures.rst"], + fta = ["docs/fta.rst"], + arch_design = ":architecture", + ) + +**dependability_analysis** + +.. code-block:: python + + dependability_analysis( + name = "analysis", + arch_design = ":architecture", + dfa = ["docs/dfa.rst"], + safety_analysis = [":safety"], + ) + + +Structural Rules +---------------- + +**unit** + +Define the smallest testable software element. + +.. code-block:: python + + unit( + name = "my_unit", + unit_design = [":architecture"], + implementation = ["//src:lib"], + tests = ["//tests:unit_test"], + ) + +**component** + +Define a collection of units. + +.. code-block:: python + + component( + name = "my_component", + component_requirements = [":requirements"], + units = [":my_unit"], + implementation = ["//src:binary"], + tests = ["//tests:integration_test"], + ) + +**dependable_element** + +Define a complete SEooC with automatic documentation generation. + +.. code-block:: python + + dependable_element( + name = "my_seooc", + description = "My safety-critical component", + assumptions_of_use = [":aous"], + requirements = [":requirements"], + architectural_design = [":architecture"], + dependability_analysis = [":analysis"], + components = [":my_component"], + tests = ["//tests:system_test"], + deps = ["@platform//:platform_module"], + ) + +**Generated Targets:** + +- ````: Sphinx module with HTML documentation +- ``_needs``: Sphinx-needs JSON for cross-referencing +- ``_index``: Generated index.rst with artifact structure + +**Implementation Details:** + +The macro automatically: + +- Generates an index.rst file with a toctree referencing all provided artifacts +- Creates symlinks to artifact files (assumptions of use, requirements, architecture, safety analysis) for co-location with the generated index +- Delegates to ``sphinx_module`` for actual Sphinx build and HTML generation +- Integrates dependencies for cross-module referencing and HTML merging + +Dependency Management +--------------------- + +Use ``deps`` for cross-module references. HTML is automatically merged: + +.. code-block:: text + + /html/ + ├── index.html # Main documentation + ├── _static/ + ├── dependency1/ # Merged dependency + └── dependency2/ + + +Complete Example +---------------- + +.. code-block:: python + + load("//bazel/rules/rules_score:rules_score.bzl", + "architectural_design", "assumptions_of_use", + "component", "component_requirements", + "dependability_analysis", "dependable_element", + "feature_requirements", "safety_analysis", "unit") + + # Artifacts + feature_requirements(name = "features", srcs = ["docs/features.rst"]) + component_requirements(name = "reqs", srcs = ["docs/reqs.rst"], + feature_requirement = [":features"]) + assumptions_of_use(name = "aous", srcs = ["docs/aous.rst"]) + architectural_design(name = "arch", static = ["docs/arch.rst"], + dynamic = ["docs/dynamic.rst"]) + safety_analysis(name = "safety", arch_design = ":arch") + dependability_analysis(name = "analysis", arch_design = ":arch", + dfa = ["docs/dfa.rst"], + safety_analysis = [":safety"]) + + # Implementation + cc_library(name = "kvs_lib", srcs = ["kvs.cpp"], hdrs = ["kvs.h"]) + cc_test(name = "kvs_test", srcs = ["kvs_test.cpp"], deps = [":kvs_lib"]) + + # Structure + unit(name = "kvs_unit", unit_design = [":arch"], + implementation = [":kvs_lib"], tests = [":kvs_test"]) + component(name = "kvs_component", requirements = [":reqs"], + units = [":kvs_unit"], implementation = [":kvs_lib"], tests = []) + + # SEooC + dependable_element( + name = "persistency_kvs", + description = "Key-Value Store for persistent data storage", + assumptions_of_use = [":aous"], + requirements = [":reqs"], + architectural_design = [":arch"], + dependability_analysis = [":analysis"], + components = [":kvs_component"], + tests = [], + deps = ["@score_process//:score_process_module"], + ) + +Build: + +.. code-block:: bash + + bazel build //:persistency_kvs + # Output: bazel-bin/persistency_kvs/html/ + + # Implementation + cc_library(name = "kvs_lib", srcs = ["kvs.cpp"], hdrs = ["kvs.h"]) + cc_test(name = "kvs_test", srcs = ["kvs_test.cpp"], deps = [":kvs_lib"]) + + # Structure + unit(name = "kvs_unit", unit_design = [":arch"], + implementation = [":kvs_lib"], tests = [":kvs_test"]) + component(name = "kvs_component", component_requirements = [":reqs"], + units = [":kvs_unit"], implementation = [":kvs_lib"], tests = []) + + # SEooC + dependable_element( + name = "persistency_kvs", + description = "Key-Value Store for persistent data storage", + assumptions_of_use = [":aous"], + requirements = [":reqs"], + architectural_design = [":arch"], + dependability_analysis = [":analysis"], + components = [":kvs_component"], + tests = [], + deps = ["@score_process//:score_process_module"], + ) + +Build: + +.. code-block:: bash + + bazel build //:kvs_seooc + # Output: bazel-bin/kvs_seooc/html/ + # Includes merged HTML from score_platform and score_process modules + +Design Rationale +---------------- + +These rules provide a structured approach to documentation by: + +1. **Two-Tier Architecture**: Generic ``sphinx_module`` for flexibility, specialized ``score_component`` for safety-critical work +2. **Dependency Management**: Automatic cross-referencing and HTML merging across modules +3. **Standardization**: SEooC enforces consistent structure for safety documentation +4. **Traceability**: Sphinx-needs integration enables bidirectional traceability +5. **Automation**: Index generation, symlinking, and configuration management are automatic +6. **Build System Integration**: Bazel ensures reproducible, cacheable documentation builds + +Reference Implementation +------------------------ + +See complete examples in the test BUILD file: + +.. literalinclude:: ../test/BUILD + :language: python + :caption: test/BUILD diff --git a/bazel/rules/rules_score/private/BUILD b/bazel/rules/rules_score/private/BUILD new file mode 100644 index 0000000..e69de29 diff --git a/bazel/rules/rules_score/private/architectural_design.bzl b/bazel/rules/rules_score/private/architectural_design.bzl new file mode 100644 index 0000000..ddd8fdd --- /dev/null +++ b/bazel/rules/rules_score/private/architectural_design.bzl @@ -0,0 +1,147 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Architectural Design build rules for S-CORE projects. + +This module provides macros and rules for defining architectural design +documentation following S-CORE process guidelines. Architectural design +documents describe the software architecture including static and dynamic views. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +ArchitecturalDesignInfo = provider( + doc = "Provider for architectural design artifacts", + fields = { + "static": "Depset of static architecture diagram files (e.g., class diagrams, component diagrams)", + "dynamic": "Depset of dynamic architecture diagram files (e.g., sequence diagrams, activity diagrams)", + "name": "Name of the architectural design target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _architectural_design_impl(ctx): + """Implementation for architectural_design rule. + + Collects architectural design artifacts including static and dynamic + diagrams and provides them through the ArchitecturalDesignInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and ArchitecturalDesignInfo + """ + static_files = depset(ctx.files.static) + dynamic_files = depset(ctx.files.dynamic) + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [static_files, dynamic_files], + ) + + return [ + DefaultInfo(files = all_files), + ArchitecturalDesignInfo( + static = static_files, + dynamic = dynamic_files, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = all_files, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_architectural_design = rule( + implementation = _architectural_design_impl, + doc = "Collects architectural design documents and diagrams for S-CORE process compliance", + attrs = { + "static": attr.label_list( + allow_files = [".puml", ".plantuml", ".png", ".svg", ".rst", ".md"], + mandatory = False, + doc = "Static architecture diagrams (class diagrams, component diagrams, etc.)", + ), + "dynamic": attr.label_list( + allow_files = [".puml", ".plantuml", ".png", ".svg", ".rst", ".md"], + mandatory = False, + doc = "Dynamic architecture diagrams (sequence diagrams, activity diagrams, etc.)", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def architectural_design( + name, + static = [], + dynamic = [], + visibility = None): + """Define architectural design following S-CORE process guidelines. + + Architectural design documents describe the software architecture of a + component, including both static and dynamic views. Static views show + the structural organization (classes, components, modules), while dynamic + views show the behavioral aspects (sequences, activities, states). + + Args: + name: The name of the architectural design target. Used as the base + name for all generated targets. + static: Optional list of labels to diagram files (.puml, .plantuml, + .png, .svg) or documentation files (.rst, .md) containing static + architecture views such as class diagrams, component diagrams, + or package diagrams as defined in the S-CORE process. + dynamic: Optional list of labels to diagram files (.puml, .plantuml, + .png, .svg) or documentation files (.rst, .md) containing dynamic + architecture views such as sequence diagrams, activity diagrams, + or state diagrams as defined in the S-CORE process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main architectural design target providing ArchitecturalDesignInfo + + Example: + ```starlark + architectural_design( + name = "my_architectural_design", + static = [ + "class_diagram.puml", + "component_diagram.puml", + ], + dynamic = [ + "sequence_diagram.puml", + "activity_diagram.puml", + ], + ) + ``` + """ + _architectural_design( + name = name, + static = static, + dynamic = dynamic, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/assumptions_of_use.bzl b/bazel/rules/rules_score/private/assumptions_of_use.bzl new file mode 100644 index 0000000..36c584f --- /dev/null +++ b/bazel/rules/rules_score/private/assumptions_of_use.bzl @@ -0,0 +1,154 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Assumptions of Use build rules for S-CORE projects. + +This module provides macros and rules for defining Assumptions of Use (AoU) +following S-CORE process guidelines. Assumptions of Use define the safety-relevant +operating conditions and constraints for a Safety Element out of Context (SEooC). +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:component_requirements.bzl", "ComponentRequirementsInfo") +load("//bazel/rules/rules_score/private:feature_requirements.bzl", "FeatureRequirementsInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +AssumptionsOfUseInfo = provider( + doc = "Provider for assumptions of use artifacts", + fields = { + "srcs": "Depset of source files containing assumptions of use", + "feature_requirements": "List of FeatureRequirementsInfo providers this AoU traces to", + "name": "Name of the assumptions of use target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _assumptions_of_use_impl(ctx): + """Implementation for assumptions_of_use rule. + + Collects assumptions of use source files and links them to their + parent feature requirements through providers. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and AssumptionsOfUseInfo + """ + srcs = depset(ctx.files.srcs) + + # Collect feature requirements providers + feature_reqs = [] + for feat_req in ctx.attr.feature_requirements: + if FeatureRequirementsInfo in feat_req: + feature_reqs.append(feat_req[FeatureRequirementsInfo]) + + # Collect transitive sphinx sources from feature requirements + transitive = [srcs] + for feat_req in ctx.attr.feature_requirements: + if SphinxSourcesInfo in feat_req: + transitive.append(feat_req[SphinxSourcesInfo].transitive_srcs) + + return [ + DefaultInfo(files = srcs), + AssumptionsOfUseInfo( + srcs = srcs, + feature_requirements = feature_reqs, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = srcs, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_assumptions_of_use = rule( + implementation = _assumptions_of_use_impl, + doc = "Collects Assumptions of Use documents with traceability to feature requirements", + attrs = { + "srcs": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = True, + doc = "Source files containing Assumptions of Use specifications", + ), + "feature_requirements": attr.label_list( + providers = [FeatureRequirementsInfo], + mandatory = False, + doc = "List of feature_requirements targets that these Assumptions of Use trace to", + ), + "component_requirements": attr.label_list( + providers = [ComponentRequirementsInfo], + mandatory = False, + doc = "List of feature_requirements targets that these Assumptions of Use trace to", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def assumptions_of_use( + name, + srcs, + feature_requirement = [], + component_requirements = [], + visibility = None): + """Define Assumptions of Use following S-CORE process guidelines. + + Assumptions of Use (AoU) define the safety-relevant operating conditions + and constraints for a Safety Element out of Context (SEooC). They specify + the conditions under which the component is expected to operate safely + and the responsibilities of the integrator. + + Args: + name: The name of the assumptions of use target. Used as the base + name for all generated targets. + srcs: List of labels to .rst, .md, or .trlc files containing the + Assumptions of Use specifications as defined in the S-CORE + process. + feature_requirement: Optional list of labels to feature_requirements + targets that these Assumptions of Use relate to. Establishes + traceability as defined in the S-CORE process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main assumptions of use target providing AssumptionsOfUseInfo + + Example: + ```starlark + assumptions_of_use( + name = "my_assumptions_of_use", + srcs = ["assumptions_of_use.rst"], + feature_requirement = [":my_feature_requirements"], + ) + ``` + """ + _assumptions_of_use( + name = name, + srcs = srcs, + feature_requirements = feature_requirement, + component_requirements = component_requirements, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/component.bzl b/bazel/rules/rules_score/private/component.bzl new file mode 100644 index 0000000..4e41b9e --- /dev/null +++ b/bazel/rules/rules_score/private/component.bzl @@ -0,0 +1,150 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Component build rules for S-CORE projects. + +This module provides macros and rules for defining software components +following S-CORE process guidelines. A component consists of multiple units +with associated requirements and tests. +""" + +load("//bazel/rules/rules_score:providers.bzl", "ComponentInfo", "SphinxSourcesInfo") + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _component_impl(ctx): + """Implementation for component rule. + + Collects component requirements, units, and tests and provides them + through the ComponentInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and ComponentInfo + """ + + # Collect requirements files from component_requirements targets + requirements_files = [] + for req_target in ctx.attr.requirements: + if SphinxSourcesInfo in req_target: + requirements_files.append(req_target[SphinxSourcesInfo].srcs) + + requirements_depset = depset(transitive = requirements_files) + + # Collect components and tests + components_depset = depset(ctx.attr.components) + tests_depset = depset(ctx.attr.tests) + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [requirements_depset], + ) + + return [ + DefaultInfo(files = all_files), + ComponentInfo( + name = ctx.label.name, + requirements = requirements_depset, + components = components_depset, + tests = tests_depset, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = all_files, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_component = rule( + implementation = _component_impl, + doc = "Defines a software component composed of multiple units for S-CORE process compliance", + attrs = { + "requirements": attr.label_list( + mandatory = True, + doc = "Component requirements artifacts (typically component_requirements targets)", + ), + "components": attr.label_list( + mandatory = True, + doc = "Unit targets that comprise this component", + ), + "tests": attr.label_list( + mandatory = True, + doc = "Component-level integration test targets", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def component( + name, + units = None, + tests = [], + requirements = None, + components = None, + testonly = True, + visibility = None): + """Define a software component following S-CORE process guidelines. + + A component is a collection of related units that together provide + a specific functionality. It consists of: + - Component requirements: Requirements specification for the component + - Implementation: Concrete libraries/binaries that realize the component + - Units: Individual software units that implement the requirements + - Tests: Integration tests that verify the component as a whole + + Args: + name: The name of the component. Used as the target name. + component_requirements: List of labels to component_requirements targets + that define the requirements for this component. + requirements: Alias for component_requirements (use one or the other). + implementation: List of labels to implementation targets (cc_library, + cc_binary, etc.) that realize this component. + units: List of labels to unit targets that comprise this component. + components: Alias for units (use one or the other). + tests: List of labels to Bazel test targets that verify the component + integration. + testonly: If true, only testonly targets can depend on this component. + visibility: Bazel visibility specification for the component target. + + Example: + ```python + component( + name = "kvs_component", + requirements = [":kvs_component_requirements"], + implementation = [":kvs_lib", ":kvs_tool"], + units = [":kvs_unit1", ":kvs_unit2"], + tests = ["//persistency/kvs/tests:score_kvs_component_integration_tests"], + visibility = ["//visibility:public"], + ) + ``` + """ + + _component( + name = name, + requirements = requirements, + components = components, + tests = tests, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/component_requirements.bzl b/bazel/rules/rules_score/private/component_requirements.bzl new file mode 100644 index 0000000..8735354 --- /dev/null +++ b/bazel/rules/rules_score/private/component_requirements.bzl @@ -0,0 +1,128 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Component Requirements build rules for S-CORE projects. + +This module provides macros and rules for defining component requirements +following S-CORE process guidelines. Component requirements are derived from +feature requirements and define the specific requirements for a software component. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:feature_requirements.bzl", "FeatureRequirementsInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +ComponentRequirementsInfo = provider( + doc = "Provider for component requirements artifacts", + fields = { + "srcs": "Depset of source files containing component requirements", + "requirements": "List of FeatureRequirementsInfo providers this component traces to", + "name": "Name of the component requirements target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _component_requirements_impl(ctx): + """Implementation for component_requirements rule. + + Collects component requirements source files and links them to their + parent feature requirements through providers. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and ComponentRequirementsInfo + """ + srcs = depset(ctx.files.srcs) + + # Collect feature requirements providers + feature_reqs = [] + + # Collect transitive sphinx sources from feature requirements + transitive = [srcs] + + return [ + DefaultInfo(files = srcs), + ComponentRequirementsInfo( + srcs = srcs, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = srcs, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_component_requirements = rule( + implementation = _component_requirements_impl, + doc = "Collects component requirements documents with traceability to feature requirements", + attrs = { + "srcs": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = True, + doc = "Source files containing component requirements specifications", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def component_requirements( + name, + srcs, + visibility = None): + """Define component requirements following S-CORE process guidelines. + + Component requirements are derived from feature requirements and define + the specific functional and safety requirements for a software component. + They establish traceability from high-level features to component-level + specifications. + + Args: + name: The name of the component requirements target. Used as the base + name for all generated targets. + srcs: List of labels to .rst, .md, or .trlc files containing the + component requirements specifications as defined in the S-CORE + process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main component requirements target providing ComponentRequirementsInfo + + Example: + ```starlark + component_requirements( + name = "my_component_requirements", + srcs = ["component_requirements.rst"], + ) + ``` + """ + _component_requirements( + name = name, + srcs = srcs, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/dependability_analysis.bzl b/bazel/rules/rules_score/private/dependability_analysis.bzl new file mode 100644 index 0000000..0d31738 --- /dev/null +++ b/bazel/rules/rules_score/private/dependability_analysis.bzl @@ -0,0 +1,183 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Dependability Analysis build rules for S-CORE projects. + +This module provides macros and rules for defining dependability analysis +documentation following S-CORE process guidelines. Dependability analysis +combines safety analysis with dependent failure analysis (DFA) to provide +a comprehensive view of component reliability and safety. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:architectural_design.bzl", "ArchitecturalDesignInfo") +load("//bazel/rules/rules_score/private:safety_analysis.bzl", "AnalysisInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +DependabilityAnalysisInfo = provider( + doc = "Provider for dependability analysis artifacts", + fields = { + "safety_analysis": "List of AnalysisInfo providers", + "security_analysis": "List of AnalysisInfo providers", + "arch_design": "ArchitecturalDesignInfo provider for linked architectural design", + "name": "Name of the dependability analysis target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _dependability_analysis_impl(ctx): + """Implementation for dependability_analysis rule. + + Collects dependability analysis artifacts including safety analysis results + and dependent failure analysis, linking them to architectural design. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and DependabilityAnalysisInfo + """ + security_analysis_files = depset(ctx.files.dfa) + safety_analysis_files = depset(ctx.files.fmea) + + # Get architectural design provider if available + arch_design_info = None + if ctx.attr.arch_design and ArchitecturalDesignInfo in ctx.attr.arch_design: + arch_design_info = ctx.attr.arch_design[ArchitecturalDesignInfo] + + # Combine all files for DefaultInfo + all_files = depset(transitive = [security_analysis_files, safety_analysis_files]) + + # Collect transitive sphinx sources from safety analysis and architectural design + transitive = [all_files] + for sa in ctx.attr.security_analysis: + if SphinxSourcesInfo in sa: + transitive.append(sa[SphinxSourcesInfo].transitive_srcs) + if ctx.attr.arch_design and SphinxSourcesInfo in ctx.attr.arch_design: + transitive.append(ctx.attr.arch_design[SphinxSourcesInfo].transitive_srcs) + + return [ + DefaultInfo(files = all_files), + DependabilityAnalysisInfo( + safety_analysis = security_analysis_files, + security_analysis = security_analysis_files, + arch_design = arch_design_info, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_dependability_analysis = rule( + implementation = _dependability_analysis_impl, + doc = "Collects dependability analysis documents for S-CORE process compliance", + attrs = { + "security_analysis": attr.label_list( + # TODO: change provider name + providers = [AnalysisInfo], + mandatory = False, + doc = "List of safety_analysis targets containing FMEA, FMEDA, FTA results", + ), + "dfa": attr.label_list( + allow_files = [".rst", ".md"], + mandatory = False, + doc = "Dependent Failure Analysis (DFA) documentation", + ), + "fmea": attr.label_list( + allow_files = [".rst", ".md"], + mandatory = False, + doc = "Failure Mode and Effects Analysis (FMEA) documentation", + ), + "arch_design": attr.label( + providers = [ArchitecturalDesignInfo], + mandatory = False, + doc = "Reference to architectural_design target for traceability", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def dependability_analysis( + name, + safety_analysis = [], + dfa = [], + fmea = [], + arch_design = None, + visibility = None): + """Define dependability analysis following S-CORE process guidelines. + + Dependability analysis provides a comprehensive view of component + reliability and safety by combining safety analysis results with + dependent failure analysis (DFA). It establishes traceability to + the architectural design for complete safety argumentation. + + Args: + name: The name of the dependability analysis target. Used as the base + name for all generated targets. + safety_analysis: Optional list of labels to safety_analysis targets + containing the results of FMEA, FMEDA, FTA, or other safety + analysis methods as defined in the S-CORE process. + dfa: Optional list of labels to .rst or .md files containing + Dependent Failure Analysis (DFA) documentation. DFA identifies + failures that could affect multiple components or functions + as defined in the S-CORE process. + fmea: Optional list of labels to .rst or .md files containing + Failure Mode and Effects Analysis (FMEA) documentation. FMEA + identifies potential failure modes and their effects on the + system as defined in the S-CORE process. + arch_design: Optional label to an architectural_design target for + establishing traceability between dependability analysis and + the software architecture. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main dependability analysis target providing DependabilityAnalysisInfo + + Example: + ```starlark + dependability_analysis( + name = "my_dependability_analysis", + safety_analysis = [":my_safety_analysis"], + dfa = ["dependent_failure_analysis.rst"], + fmea = ["failure_mode_effects_analysis.rst"], + arch_design = ":my_architectural_design", + ) + ``` + """ + _dependability_analysis( + name = name, + # TODO: this needs to be fixed. A security is not a safety_analysis. + # we leave it for now for compatibility reasons until there is alignment on the a + # attributes of a dependability analysis + security_analysis = safety_analysis, + dfa = dfa, + fmea = fmea, + arch_design = arch_design, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/dependable_element.bzl b/bazel/rules/rules_score/private/dependable_element.bzl new file mode 100644 index 0000000..da368f9 --- /dev/null +++ b/bazel/rules/rules_score/private/dependable_element.bzl @@ -0,0 +1,789 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Dependable Element build rules for S-CORE projects. + +This module provides macros and rules for defining dependable elements (Safety +Elements out of Context - SEooC) following S-CORE process guidelines. A dependable +element is a safety-critical component with comprehensive documentation including +assumptions of use, requirements, design, and safety analysis. +""" + +load( + "//bazel/rules/rules_score:providers.bzl", + "ComponentInfo", + "SphinxSourcesInfo", + "UnitInfo", +) +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "sphinx_module") + +# ============================================================================ +# Template Constants +# ============================================================================ + +_UNIT_DESIGN_SECTION_TEMPLATE = """Unit Design +----------- + +.. toctree:: + :maxdepth: 2 + +{design_refs}""" + +_IMPLEMENTATION_SECTION_TEMPLATE = """Implementation +-------------- + +This {entity_type} is implemented by the following targets: + +{implementation_list}""" + +_TESTS_SECTION_TEMPLATE = """Tests +----- + +This {entity_type} is verified by the following test targets: + +{test_list}""" + +_COMPONENT_REQUIREMENTS_SECTION_TEMPLATE = """Component Requirements +---------------------- + +.. toctree:: + :maxdepth: 2 + +{requirements_refs}""" + +_COMPONENT_UNITS_SECTION_TEMPLATE = """Units +----- + +This component is composed of the following units: + +{unit_links}""" + +_UNIT_TEMPLATE = """ + +Unit: {unit_name} +{underline} + +{design_section}{implementation_section}{tests_section}""" + +_COMPONENT_TEMPLATE = """ + +Component: {component_name} +{underline} + +{requirements_section}{units_section}{implementation_section}{tests_section}""" + +# ============================================================================ +# Helper Functions for Documentation Generation +# ============================================================================ + +def _get_sphinx_files(target): + return target[SphinxSourcesInfo].srcs.to_list() + +def _filter_doc_files(files): + """Filter files to only include documentation files. + + Args: + files: List of files to filter + + Returns: + List of documentation files + """ + return [f for f in files if f.extension in ["rst", "md", "puml", "plantuml", "png", "svg"]] + +def _find_common_directory(files): + """Find the longest common directory path for a list of files. + + Args: + files: List of File objects + + Returns: + String representing the common directory path, or empty string if none + """ + if not files: + return "" + + # Get all directory paths + dirs = [f.dirname for f in files] + + if not dirs: + return "" + + # Start with first directory + common = dirs[0] + + # Iterate through all directories to find common prefix + for d in dirs[1:]: + # Find common prefix between common and d + # Split into path components + common_parts = common.split("/") + d_parts = d.split("/") + + # Find matching prefix + new_common_parts = [] + for i in range(min(len(common_parts), len(d_parts))): + if common_parts[i] == d_parts[i]: + new_common_parts.append(common_parts[i]) + else: + break + + common = "/".join(new_common_parts) + + if not common: + break + + return common + +def _compute_relative_path(file, common_dir): + """Compute relative path from common directory to file. + + Args: + file: File object + common_dir: Common directory path string + + Returns: + String containing the relative path + """ + file_dir = file.dirname + + if not common_dir: + return file.basename + + if not file_dir.startswith(common_dir): + return file.basename + + if file_dir == common_dir: + return file.basename + + relative_subdir = file_dir[len(common_dir):].lstrip("/") + return relative_subdir + "/" + file.basename + +def _is_document_file(file): + """Check if file should be included in toctree. + + Args: + file: File object + + Returns: + Boolean indicating if file is a document (.rst or .md) + """ + return file.extension in ["rst", "md"] + +def _create_artifact_symlink(ctx, artifact_name, artifact_file, relative_path): + """Create symlink for artifact file in output directory. + + Args: + ctx: Rule context + artifact_name: Name of artifact type (e.g., "architectural_design") + artifact_file: Source file + relative_path: Relative path within artifact directory + + Returns: + Declared output file + """ + output_file = ctx.actions.declare_file( + ctx.label.name + "/" + artifact_name + "/" + relative_path, + ) + + ctx.actions.symlink( + output = output_file, + target_file = artifact_file, + ) + + return output_file + +def _process_artifact_files(ctx, artifact_name, label): + """Process all files from a single label for a given artifact type. + + Args: + ctx: Rule context + artifact_name: Name of artifact type + label: Label to process + + Returns: + Tuple of (output_files, index_references) + """ + output_files = [] + index_refs = [] + + # Get and filter files + all_files = _get_sphinx_files(label) + doc_files = _filter_doc_files(all_files) + + if not doc_files: + return (output_files, index_refs) + + # Find common directory to preserve hierarchy + common_dir = _find_common_directory(doc_files) + + # Process each file + for artifact_file in doc_files: + # Compute paths + relative_path = _compute_relative_path(artifact_file, common_dir) + + # Create symlink + output_file = _create_artifact_symlink( + ctx, + artifact_name, + artifact_file, + relative_path, + ) + output_files.append(output_file) + + # Add to index if it's a document file + if _is_document_file(artifact_file): + doc_ref = (artifact_name + "/" + relative_path) \ + .replace(".rst", "") \ + .replace(".md", "") + index_refs.append(doc_ref) + + return (output_files, index_refs) + +def _process_artifact_type(ctx, artifact_name): + """Process all labels for a given artifact type. + + Args: + ctx: Rule context + artifact_name: Name of artifact type (e.g., "architectural_design") + + Returns: + Tuple of (output_files, index_references) + """ + output_files = [] + index_refs = [] + + attr_list = getattr(ctx.attr, artifact_name) + if not attr_list: + return (output_files, index_refs) + + # Process each label + for label in attr_list: + label_outputs, label_refs = _process_artifact_files( + ctx, + artifact_name, + label, + ) + output_files.extend(label_outputs) + index_refs.extend(label_refs) + + return (output_files, index_refs) + +def _process_deps(ctx): + """Process deps to generate references to submodule documentation. + + The HTML merger in sphinx_module will copy the HTML directories from deps. + We generate RST bullet list with links to those HTML directories. + + Args: + ctx: Rule context + + Returns: + String containing RST-formatted bullet list of links + """ + if not ctx.attr.deps: + return "" + + # Generate RST bullet list with links to submodule HTML + links = [] + for dep in ctx.attr.deps: + dep_name = dep.label.name + + # Create a link to the index.html that will be merged + # Format: * `Module Name `_ + # Use underscores in name for readability, convert to spaces for display + display_name = dep_name.replace("_", " ").title() + links.append("* `{} <{}/index.html>`_".format(display_name, dep_name)) + + return "\n".join(links) + +def _get_component_names(components): + return [c.label.name for c in components] + +def _collect_units_recursive(components, visited_units = None): + """Iteratively collect all units from components, handling nested components. + + Uses a stack-based approach to avoid Starlark recursion limitations. + + Args: + components: List of component targets + visited_units: Dict of unit names already visited (for deduplication) + + Returns: + Dict mapping unit names to unit targets + """ + if visited_units == None: + visited_units = {} + + # Process components iteratively using a work queue approach + # Since Starlark doesn't support while loops, we use a for loop with a large enough range + # and track our own index + to_process = [] + components + + for _ in range(1000): # Max depth to prevent infinite loops + if not to_process: + break + comp_target = to_process.pop(0) + + # Check if this is a component with ComponentInfo + if ComponentInfo in comp_target: + comp_info = comp_target[ComponentInfo] + + # Process nested components + nested_components = comp_info.components.to_list() + for nested in nested_components: + # Check if nested item is a unit or component + if UnitInfo in nested: + unit_name = nested.label.name + if unit_name not in visited_units: + visited_units[unit_name] = nested + elif ComponentInfo in nested: + # Add nested component to queue for processing + to_process.append(nested) + + # Check if this is directly a unit + elif UnitInfo in comp_target: + unit_name = comp_target.label.name + if unit_name not in visited_units: + visited_units[unit_name] = comp_target + + return visited_units + +def _generate_unit_doc(ctx, unit_target, unit_name): + """Generate RST documentation for a single unit. + + Args: + ctx: Rule context + unit_target: The unit target + unit_name: Name of the unit + + Returns: + Tuple of (rst_file, list_of_output_files) + """ + unit_info = unit_target[UnitInfo] + + # Create RST file for this unit + unit_rst = ctx.actions.declare_file(ctx.label.name + "/units/" + unit_name + ".rst") + + # Collect design files - unit_design depset contains File objects + design_files = [] + design_refs = [] + if unit_info.unit_design: + doc_files = _filter_doc_files(unit_info.unit_design.to_list()) + + if doc_files: + # Find common directory + common_dir = _find_common_directory(doc_files) + + for f in doc_files: + relative_path = _compute_relative_path(f, common_dir) + output_file = _create_artifact_symlink( + ctx, + "units/" + unit_name + "_design", + f, + relative_path, + ) + design_files.append(output_file) + + if _is_document_file(f): + doc_ref = ("units/" + unit_name + "_design/" + relative_path) \ + .replace(".rst", "") \ + .replace(".md", "") + design_refs.append(" " + doc_ref) + + # Collect implementation target names + impl_names = [] + if unit_info.implementation: + for impl in unit_info.implementation.to_list(): + impl_names.append(impl.label) + + # Collect test target names + test_names = [] + if unit_info.tests: + for test in unit_info.tests.to_list(): + test_names.append(test.label) + + # Generate RST content using template + underline = "=" * (len("Unit: " + unit_name)) + + # Generate sections from template constants + design_section = "" + if design_refs: + design_section = "\n" + _UNIT_DESIGN_SECTION_TEMPLATE.format( + design_refs = "\n".join(design_refs), + ) + "\n" + + implementation_section = "" + if impl_names: + impl_list = "\n".join(["- ``" + str(impl) + "``" for impl in impl_names]) + implementation_section = "\n" + _IMPLEMENTATION_SECTION_TEMPLATE.format( + entity_type = "unit", + implementation_list = impl_list, + ) + "\n" + + tests_section = "" + if test_names: + test_list = "\n".join(["- ``" + str(test) + "``" for test in test_names]) + tests_section = "\n" + _TESTS_SECTION_TEMPLATE.format( + entity_type = "unit", + test_list = test_list, + ) + "\n" + + # Generate unit RST content from template constant + unit_content = _UNIT_TEMPLATE.format( + unit_name = unit_name, + underline = underline, + design_section = design_section, + implementation_section = implementation_section, + tests_section = tests_section, + ) + + ctx.actions.write( + output = unit_rst, + content = unit_content, + ) + + return (unit_rst, design_files) + +def _generate_component_doc(ctx, comp_target, comp_name, unit_names): + """Generate RST documentation for a single component. + + Args: + ctx: Rule context + comp_target: The component target + comp_name: Name of the component + unit_names: List of unit names that belong to this component + + Returns: + Tuple of (rst_file, list_of_output_files) + """ + comp_info = comp_target[ComponentInfo] + + # Create RST file for this component + comp_rst = ctx.actions.declare_file(ctx.label.name + "/components/" + comp_name + ".rst") + + # Collect requirements files - requirements depset contains File objects + req_files = [] + req_refs = [] + if comp_info.requirements: + doc_files = _filter_doc_files(comp_info.requirements.to_list()) + + if doc_files: + # Find common directory + common_dir = _find_common_directory(doc_files) + + for f in doc_files: + relative_path = _compute_relative_path(f, common_dir) + output_file = _create_artifact_symlink( + ctx, + "components/" + comp_name + "_requirements", + f, + relative_path, + ) + req_files.append(output_file) + + if _is_document_file(f): + doc_ref = ("components/" + comp_name + "_requirements/" + relative_path) \ + .replace(".rst", "") \ + .replace(".md", "") + req_refs.append(" " + doc_ref) + + # Collect test target names + test_names = [] + if comp_info.tests: + for test in comp_info.tests.to_list(): + test_names.append(test.label) + + # Generate RST content using template + underline = "=" * (len("Component: " + comp_name)) + + # Generate sections from template constants + requirements_section = "" + if req_refs: + requirements_section = "\n" + _COMPONENT_REQUIREMENTS_SECTION_TEMPLATE.format( + requirements_refs = "\n".join(req_refs), + ) + "\n" + + units_section = "" + if unit_names: + unit_links = "\n".join(["- :doc:`../units/" + unit_name + "`" for unit_name in unit_names]) + units_section = "\n" + _COMPONENT_UNITS_SECTION_TEMPLATE.format( + unit_links = unit_links, + ) + "\n" + + tests_section = "" + if test_names: + test_list = "\n".join(["- ``" + str(test) + "``" for test in test_names]) + tests_section = "\n" + _TESTS_SECTION_TEMPLATE.format( + entity_type = "component", + test_list = test_list, + ) + "\n" + + # Generate component RST content from template constant + component_content = _COMPONENT_TEMPLATE.format( + component_name = comp_name, + underline = underline, + requirements_section = requirements_section, + units_section = units_section, + implementation_section = "", + tests_section = tests_section, + ) + + ctx.actions.write( + output = comp_rst, + content = component_content, + ) + + return (comp_rst, req_files) + +# ============================================================================ +# Index Generation Rule Implementation +# ============================================================================ + +def _dependable_element_index_impl(ctx): + """Generate index.rst file with references to all dependable element artifacts. + + This rule creates a Sphinx index.rst file that includes references to all + the documentation artifacts for the dependable element. + + Args: + ctx: Rule context + + Returns: + DefaultInfo provider with generated index.rst file + """ + + # Declare output index file + index_rst = ctx.actions.declare_file(ctx.label.name + "/index.rst") + output_files = [index_rst] + + # Define artifacts + # Note: "requirements" can contain both component_requirements and feature_requirements + artifact_types = [ + "components", + "assumptions_of_use", + "requirements", + "architectural_design", + "dependability_analysis", + "checklists", + ] + + # Process each artifact type + artifacts_by_type = {} + for artifact_name in artifact_types: + files, refs = _process_artifact_type(ctx, artifact_name) + output_files.extend(files) + artifacts_by_type[artifact_name] = refs + + # Collect all units recursively from components + all_units = _collect_units_recursive(ctx.attr.components) + + # Generate documentation for each unit + unit_refs = [] + for unit_name, unit_target in all_units.items(): + unit_rst, unit_files = _generate_unit_doc(ctx, unit_target, unit_name) + output_files.append(unit_rst) + output_files.extend(unit_files) + unit_refs.append(" units/" + unit_name) + + # Generate documentation for each component + component_refs = [] + for comp_target in ctx.attr.components: + if ComponentInfo in comp_target: + comp_info = comp_target[ComponentInfo] + comp_name = comp_info.name + + # Collect units that belong to this component + comp_unit_names = [] + for nested in comp_info.components.to_list(): + if UnitInfo in nested: + comp_unit_names.append(nested.label.name) + elif ComponentInfo in nested: + # For nested components, collect their units recursively + nested_units = _collect_units_recursive([nested]) + comp_unit_names.extend(nested_units.keys()) + + comp_rst, comp_files = _generate_component_doc(ctx, comp_target, comp_name, comp_unit_names) + output_files.append(comp_rst) + output_files.extend(comp_files) + component_refs.append(" components/" + comp_name) + + # Process dependencies (submodules) + deps_links = _process_deps(ctx) + + # Generate index file from template + title = ctx.attr.module_name + underline = "=" * len(title) + + ctx.actions.expand_template( + template = ctx.file.template, + output = index_rst, + substitutions = { + "{title}": title, + "{underline}": underline, + "{description}": ctx.attr.description, + "{units}": "\n".join(unit_refs) if unit_refs else " (none)", + "{components}": "\n".join(component_refs) if component_refs else " (none)", + "{assumptions_of_use}": "\n ".join(artifacts_by_type["assumptions_of_use"]), + "{component_requirements}": "\n ".join(artifacts_by_type["requirements"]), + "{architectural_design}": "\n ".join(artifacts_by_type["architectural_design"]), + "{dependability_analysis}": "\n ".join(artifacts_by_type["dependability_analysis"]), + "{checklists}": "\n ".join(artifacts_by_type["checklists"]), + "{submodules}": deps_links, + }, + ) + + return [ + DefaultInfo(files = depset(output_files)), + ] + +_dependable_element_index = rule( + implementation = _dependable_element_index_impl, + doc = "Generates index.rst file with references to dependable element artifacts", + attrs = { + "module_name": attr.string( + mandatory = True, + doc = "Name of the dependable element module (used as document title)", + ), + "description": attr.string( + mandatory = True, + doc = "Description of the dependable element. Supports RST formatting.", + ), + "assumptions_of_use": attr.label_list( + mandatory = True, + doc = "Assumptions of Use targets or files.", + ), + "requirements": attr.label_list( + mandatory = True, + doc = "Requirements targets (component_requirements, feature_requirements, etc.).", + ), + "architectural_design": attr.label_list( + mandatory = True, + doc = "Architectural design targets or files.", + ), + "dependability_analysis": attr.label_list( + mandatory = True, + doc = "Dependability analysis targets or files.", + ), + "components": attr.label_list( + default = [], + doc = "Safety checklists targets or files.", + ), + "tests": attr.label_list( + default = [], + doc = "Integration tests for the dependable element.", + ), + "checklists": attr.label_list( + default = [], + doc = "Safety checklists targets or files.", + ), + "template": attr.label( + allow_single_file = [".rst"], + mandatory = True, + doc = "Template file for generating index.rst", + ), + "deps": attr.label_list( + default = [], + doc = "Dependencies on other dependable element modules (submodules).", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def dependable_element( + name, + description, + assumptions_of_use, + requirements, + architectural_design, + dependability_analysis, + components, + tests, + checklists = [], + deps = [], + sphinx = Label("@score_tooling//bazel/rules/rules_score:score_build"), + testonly = True, + visibility = None): + """Define a dependable element (Safety Element out of Context - SEooC) following S-CORE process guidelines. + + This macro creates a complete dependable element with integrated documentation + generation. It generates an index.rst file referencing all artifacts and builds + HTML documentation using the sphinx_module infrastructure. + + A dependable element is a safety-critical component that can be developed + independently and integrated into different systems. It includes comprehensive + documentation covering all aspects required for safety certification. + + Args: + name: The name of the dependable element. Used as the base name for + all generated targets. + description: String containing a high-level description of the element. + This text provides context about what the element does and its purpose. + Supports RST formatting. + assumptions_of_use: List of labels to assumptions_of_use targets that + define the safety-relevant operating conditions and constraints. + requirements: List of labels to requirements targets (component_requirements, + feature_requirements, etc.) that define functional and safety requirements. + architectural_design: List of labels to architectural_design targets that + describe the software architecture and design decisions. + dependability_analysis: List of labels to dependability_analysis targets + containing safety analysis results (FMEA, FMEDA, FTA, DFA, etc.). + components: List of labels to component and/or unit targets that implement + this dependable element. + tests: List of labels to Bazel test targets that verify the dependable + element at the system level (integration tests, system tests). + checklists: Optional list of labels to .rst or .md files containing + safety checklists and verification documents. + deps: Optional list of other module targets this element depends on. + Cross-references will work automatically. + sphinx: Label to sphinx build binary. Default: //bazel/rules/rules_score:score_build + testonly: If True, only testonly targets can depend on this target. + visibility: Bazel visibility specification for the dependable element target. + + Generated Targets: + _index: Internal rule that generates index.rst and copies artifacts + : Main dependable element target (sphinx_module) with HTML documentation + _needs: Sphinx-needs JSON target (created by sphinx_module for cross-referencing) + + """ + + # Step 1: Generate index.rst and collect all artifacts + _dependable_element_index( + name = name + "_index", + module_name = name, + description = description, + template = Label("//bazel/rules/rules_score:templates/seooc_index.template.rst"), + assumptions_of_use = assumptions_of_use, + requirements = requirements, + components = components, + architectural_design = architectural_design, + dependability_analysis = dependability_analysis, + checklists = checklists, + tests = tests, + deps = deps, + testonly = testonly, + visibility = ["//visibility:private"], + ) + + # Step 2: Create sphinx_module using generated index and artifacts + sphinx_module( + name = name, + srcs = [":" + name + "_index"], + index = ":" + name + "_index", + deps = deps, + sphinx = sphinx, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/feature_requirements.bzl b/bazel/rules/rules_score/private/feature_requirements.bzl new file mode 100644 index 0000000..fd8dec1 --- /dev/null +++ b/bazel/rules/rules_score/private/feature_requirements.bzl @@ -0,0 +1,119 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Feature Requirements build rules for S-CORE projects. + +This module provides macros and rules for defining feature requirements +following S-CORE process guidelines. Feature requirements describe the +high-level features that a software component must implement. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +FeatureRequirementsInfo = provider( + doc = "Provider for feature requirements artifacts", + fields = { + "srcs": "Depset of source files containing feature requirements", + "name": "Name of the feature requirements target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _feature_requirements_impl(ctx): + """Implementation for feature_requirements rule. + + Collects feature requirements source files and provides them through + the FeatureRequirementsInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and FeatureRequirementsInfo + """ + srcs = depset(ctx.files.srcs) + + return [ + DefaultInfo(files = srcs), + FeatureRequirementsInfo( + srcs = srcs, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = srcs, + transitive_srcs = srcs, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_feature_requirements = rule( + implementation = _feature_requirements_impl, + doc = "Collects feature requirements documents for S-CORE process compliance", + attrs = { + "srcs": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = True, + doc = "Source files containing feature requirements specifications", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def feature_requirements( + name, + srcs, + visibility = None): + """Define feature requirements following S-CORE process guidelines. + + Feature requirements describe the high-level features and capabilities + that a software component must implement. They serve as the top-level + requirements that drive component-level requirements. + + Args: + name: The name of the feature requirements target. Used as the base + name for all generated targets. + srcs: List of labels to .rst, .md, or .trlc files containing the + feature requirements specifications as defined in the S-CORE + process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main feature requirements target providing FeatureRequirementsInfo + + Example: + ```starlark + feature_requirements( + name = "my_feature_requirements", + srcs = ["feature_requirements.rst"], + ) + ``` + """ + _feature_requirements( + name = name, + srcs = srcs, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/safety_analysis.bzl b/bazel/rules/rules_score/private/safety_analysis.bzl new file mode 100644 index 0000000..9614e25 --- /dev/null +++ b/bazel/rules/rules_score/private/safety_analysis.bzl @@ -0,0 +1,175 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Safety Analysis build rules for S-CORE projects. + +This module provides macros and rules for defining safety analysis documentation +following S-CORE process guidelines. Safety analysis includes failure mode analysis, +control measures, fault tree analysis, and other safety-related artifacts. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:architectural_design.bzl", "ArchitecturalDesignInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +AnalysisInfo = provider( + doc = "Provider for safety analysis artifacts", + fields = { + "controlmeasures": "Depset of control measures documentation or requirements", + "failuremodes": "Depset of failure modes documentation or requirements", + "fta": "Depset of Fault Tree Analysis diagrams", + "arch_design": "ArchitecturalDesignInfo provider for linked architectural design", + "name": "Name of the safety analysis target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _analysis_impl(ctx): + """Implementation for safety_analysis rule. + + Collects safety analysis artifacts including control measures, failure modes, + and fault tree analysis diagrams, linking them to architectural design. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and AnalysisInfo + """ + controlmeasures = depset(ctx.files.controlmeasures) + failuremodes = depset(ctx.files.failuremodes) + fta = depset(ctx.files.fta) + + # Get architectural design provider if available + arch_design_info = None + if ctx.attr.arch_design and ArchitecturalDesignInfo in ctx.attr.arch_design: + arch_design_info = ctx.attr.arch_design[ArchitecturalDesignInfo] + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [controlmeasures, failuremodes, fta], + ) + + # Collect transitive sphinx sources from architectural design + transitive = [all_files] + if ctx.attr.arch_design and SphinxSourcesInfo in ctx.attr.arch_design: + transitive.append(ctx.attr.arch_design[SphinxSourcesInfo].transitive_srcs) + + return [ + DefaultInfo(files = all_files), + AnalysisInfo( + controlmeasures = controlmeasures, + failuremodes = failuremodes, + fta = fta, + arch_design = arch_design_info, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_analysis = rule( + implementation = _analysis_impl, + doc = "Collects safety analysis documents for S-CORE process compliance", + attrs = { + "controlmeasures": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = False, + doc = "Control measures documentation or requirements targets (can be AoUs or requirements)", + ), + "failuremodes": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = False, + doc = "Failure modes documentation or requirements targets", + ), + "fta": attr.label_list( + allow_files = [".puml", ".plantuml", ".png", ".svg"], + mandatory = False, + doc = "Fault Tree Analysis (FTA) diagrams", + ), + "arch_design": attr.label( + providers = [ArchitecturalDesignInfo], + mandatory = False, + doc = "Reference to architectural_design target for traceability", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def safety_analysis( + name, + controlmeasures = [], + failuremodes = [], + fta = [], + arch_design = None, + visibility = None): + """Define safety analysis following S-CORE process guidelines. + + Safety analysis documents the safety-related analysis of a component, + including failure mode and effects analysis (FMEA/FMEDA), fault tree + analysis (FTA), and control measures that mitigate identified risks. + + Args: + name: The name of the safety analysis target. Used as the base + name for all generated targets. + controlmeasures: Optional list of labels to documentation files or + requirements targets containing control measures that mitigate + identified failure modes. Can reference Assumptions of Use or + requirements as defined in the S-CORE process. + failuremodes: Optional list of labels to documentation files or + requirements targets containing identified failure modes as + defined in the S-CORE process. + fta: Optional list of labels to Fault Tree Analysis diagram files + (.puml, .plantuml, .png, .svg) as defined in the S-CORE process. + arch_design: Optional label to an architectural_design target for + establishing traceability between safety analysis and architecture. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main safety analysis target providing AnalysisInfo + + Example: + ```starlark + safety_analysis( + name = "my_safety_analysis", + controlmeasures = [":my_control_measures"], + failuremodes = [":my_failure_modes"], + fta = ["fault_tree.puml"], + arch_design = ":my_architectural_design", + ) + ``` + """ + _analysis( + name = name, + controlmeasures = controlmeasures, + failuremodes = failuremodes, + fta = fta, + arch_design = arch_design, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/sphinx_module.bzl b/bazel/rules/rules_score/private/sphinx_module.bzl new file mode 100644 index 0000000..2ab3d29 --- /dev/null +++ b/bazel/rules/rules_score/private/sphinx_module.bzl @@ -0,0 +1,302 @@ +# ====================================================================================== +# Providers +# ====================================================================================== + +SphinxModuleInfo = provider( + doc = "Provider for Sphinx HTML module documentation", + fields = { + "html_dir": "Directory containing HTML files", + }, +) + +SphinxNeedsInfo = provider( + doc = "Provider for sphinx-needs info", + fields = { + "needs_json_file": "Direct needs.json file for this module", + "needs_json_files": "Depset of needs.json files including transitive dependencies", + }, +) + +# ====================================================================================== +# Helpers +# ====================================================================================== +def _create_config_py(ctx): + """Get or generate the conf.py configuration file. + + Args: + ctx: Rule context + """ + if ctx.attr.config: + config_file = ctx.attr.config.files.to_list()[0] + else: + config_file = ctx.actions.declare_file(ctx.label.name + "/conf.py") + template = ctx.file._config_template + + # Read template and substitute PROJECT_NAME + ctx.actions.expand_template( + template = template, + output = config_file, + substitutions = { + "{PROJECT_NAME}": ctx.label.name.replace("_", " ").title(), + }, + ) + return config_file + +# ====================================================================================== +# Common attributes for Sphinx rules +# ====================================================================================== +sphinx_rule_attrs = { + "srcs": attr.label_list( + allow_files = True, + doc = "List of source files for the Sphinx documentation.", + ), + "sphinx": attr.label( + doc = "The Sphinx build binary to use.", + mandatory = True, + executable = True, + cfg = "exec", + ), + "config": attr.label( + allow_files = [".py"], + doc = "Configuration file (conf.py) for the Sphinx documentation. If not provided, a default config will be generated.", + mandatory = False, + ), + "index": attr.label( + allow_files = [".rst"], + doc = "Index file (index.rst) for the Sphinx documentation.", + mandatory = True, + ), + "deps": attr.label_list( + doc = "List of other sphinx_module targets this module depends on for intersphinx.", + ), + "_config_template": attr.label( + default = Label("//bazel/rules/rules_score:templates/conf.template.py"), + allow_single_file = True, + doc = "Template for generating default conf.py", + ), + "_html_merge_tool": attr.label( + default = Label("//bazel/rules/rules_score:sphinx_html_merge"), + executable = True, + cfg = "exec", + doc = "Tool for merging HTML directories", + ), +} + +# ====================================================================================== +# Rule implementations +# ====================================================================================== +def _score_needs_impl(ctx): + output_path = ctx.label.name.replace("_needs", "") + "/needs.json" + needs_output = ctx.actions.declare_file(output_path) + + # Get config file (generate or use provided) + config_file = _create_config_py(ctx) + + # Phase 1: Build needs.json (without external needs) + needs_inputs = ctx.files.srcs + [config_file] + + if ctx.attr.config: + needs_inputs = needs_inputs + ctx.files.config + + needs_args = [ + "--index_file", + ctx.attr.index.files.to_list()[0].path, + "--output_dir", + needs_output.dirname, + "--config", + config_file.path, + "--builder", + "needs", + ] + + ctx.actions.run( + inputs = needs_inputs, + outputs = [needs_output], + arguments = needs_args, + progress_message = "Generating needs.json for: %s" % ctx.label.name, + executable = ctx.executable.sphinx, + ) + + transitive_needs = [dep[SphinxNeedsInfo].needs_json_files for dep in ctx.attr.deps if SphinxNeedsInfo in dep] + needs_json_files = depset([needs_output], transitive = transitive_needs) + + return [ + DefaultInfo( + files = needs_json_files, + ), + SphinxNeedsInfo( + needs_json_file = needs_output, # Direct file only + needs_json_files = needs_json_files, # Transitive depset + ), + ] + +def _score_html_impl(ctx): + """Implementation for building a Sphinx module with two-phase build. + + Phase 1: Generate needs.json for this module and collect from all deps + Phase 2: Generate HTML with external needs and merge all dependency HTML + """ + + # Collect all transitive dependencies with deduplication + modules = [] + + needs_external_needs = {} + for dep in ctx.attr.needs: + if SphinxNeedsInfo in dep: + dep_name = dep.label.name.replace("_needs", "") + needs_external_needs[dep.label.name] = { + "base_url": dep_name, # Relative path to the subdirectory where dep HTML is copied + "json_path": dep[SphinxNeedsInfo].needs_json_file.path, # Use direct file + "id_prefix": "", + "css_class": "", + } + + for dep in ctx.attr.deps: + if SphinxModuleInfo in dep: + modules.extend([dep[SphinxModuleInfo].html_dir]) + + needs_external_needs_json = ctx.actions.declare_file(ctx.label.name + "/needs_external_needs.json") + + ctx.actions.write( + output = needs_external_needs_json, + content = json.encode_indent(needs_external_needs, indent = " "), + ) + + # Read template and substitute PROJECT_NAME + config_file = ctx.actions.declare_file(ctx.label.name + "/conf.py") + template = ctx.file._config_template + + ctx.actions.expand_template( + template = template, + output = config_file, + substitutions = { + "{PROJECT_NAME}": ctx.label.name.replace("_", " ").title(), + }, + ) + + # Build HTML with external needs + html_inputs = ctx.files.srcs + ctx.files.needs + [config_file, needs_external_needs_json] + sphinx_html_output = ctx.actions.declare_directory(ctx.label.name + "/_html") + html_args = [ + "--index_file", + ctx.attr.index.files.to_list()[0].path, + "--output_dir", + sphinx_html_output.path, + "--config", + config_file.path, + "--builder", + "html", + ] + + ctx.actions.run( + inputs = html_inputs, + outputs = [sphinx_html_output], + arguments = html_args, + progress_message = "Building HTML: %s" % ctx.label.name, + executable = ctx.executable.sphinx, + ) + + # Create final HTML output directory with dependencies using Python merge script + html_output = ctx.actions.declare_directory(ctx.label.name + "/html") + + # Build arguments for the merge script + merge_args = [ + "--output", + html_output.path, + "--main", + sphinx_html_output.path, + ] + + merge_inputs = [sphinx_html_output] + + # Add each dependency + for dep in ctx.attr.deps: + if SphinxModuleInfo in dep: + dep_html_dir = dep[SphinxModuleInfo].html_dir + dep_name = dep.label.name + merge_inputs.append(dep_html_dir) + merge_args.extend(["--dep", dep_name + ":" + dep_html_dir.path]) + + # Merging html files + ctx.actions.run( + inputs = merge_inputs, + outputs = [html_output], + arguments = merge_args, + progress_message = "Merging HTML with dependencies for %s" % ctx.label.name, + executable = ctx.executable._html_merge_tool, + ) + + return [ + DefaultInfo(files = depset(ctx.files.needs + [html_output])), + SphinxModuleInfo( + html_dir = html_output, + ), + ] + +# ====================================================================================== +# Rule definitions +# ====================================================================================== + +_score_needs = rule( + implementation = _score_needs_impl, + attrs = sphinx_rule_attrs, +) + +_score_html = rule( + implementation = _score_html_impl, + attrs = dict(sphinx_rule_attrs, needs = attr.label_list( + allow_files = True, + doc = "Submodule symbols.needs targets for this module.", + )), +) + +# ====================================================================================== +# Rule wrappers +# ====================================================================================== + +def sphinx_module( + name, + srcs, + index, + config = None, + deps = [], + sphinx = Label("//bazel/rules/rules_score:score_build"), + testonly = False, + visibility = ["//visibility:public"]): + """Build a Sphinx module with transitive HTML dependencies. + + This rule builds documentation modules into complete HTML sites with + transitive dependency collection. All dependencies are automatically + included in a modules/ subdirectory for intersphinx cross-referencing. + + Args: + name: Name of the target + srcs: List of source files (.rst, .md) with index file first + index: Label to index.rst file + config: Label to conf.py configuration file (optional, will be auto-generated if not provided) + deps: List of other sphinx_module targets this module depends on + sphinx: Label to sphinx build binary (default: :sphinx_build) + visibility: Bazel visibility + """ + _score_needs( + name = name + "_needs", + srcs = srcs, + config = config, + index = index, + deps = [d + "_needs" for d in deps], + sphinx = sphinx, + testonly = testonly, + visibility = visibility, + ) + + _score_html( + name = name, + srcs = srcs, + config = config, + index = index, + deps = deps, + needs = [d + "_needs" for d in deps], + sphinx = sphinx, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/unit.bzl b/bazel/rules/rules_score/private/unit.bzl new file mode 100644 index 0000000..835b3a1 --- /dev/null +++ b/bazel/rules/rules_score/private/unit.bzl @@ -0,0 +1,157 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Unit build rules for S-CORE projects. + +This module provides macros and rules for defining software units +following S-CORE process guidelines. A unit is the smallest testable +software element with associated design, implementation, and tests. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo", "UnitInfo") + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _unit_impl(ctx): + """Implementation for unit rule. + + Collects unit design artifacts, implementation targets, and tests + and provides them through the UnitInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and UnitInfo + """ + + # Collect design files from unit_design targets + design_files = [] + for design_target in ctx.attr.unit_design: + if SphinxSourcesInfo in design_target: + design_files.append(design_target[SphinxSourcesInfo].srcs) + + design_depset = depset(transitive = design_files) + + # Collect implementation and test targets + # Include scope targets in the implementation depset + implementation_depset = depset(ctx.attr.implementation + ctx.attr.scope) + tests_depset = depset(ctx.attr.tests) + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [design_depset], + ) + + return [ + DefaultInfo(files = all_files), + UnitInfo( + name = ctx.label.name, + unit_design = design_depset, + implementation = implementation_depset, + tests = tests_depset, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = all_files, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_unit = rule( + implementation = _unit_impl, + doc = "Defines a software unit with design, implementation, and tests for S-CORE process compliance", + attrs = { + "unit_design": attr.label_list( + mandatory = True, + doc = "Unit design artifacts (typically architectural_design targets)", + ), + "implementation": attr.label_list( + mandatory = True, + doc = "Implementation targets (cc_library, py_library, rust_library, etc.)", + ), + "scope": attr.label_list( + default = [], + doc = "Additional not explicitly named targets which are needed for the unit implementation", + ), + "tests": attr.label_list( + mandatory = True, + doc = "Test targets that verify the unit (cc_test, py_test, rust_test, etc.)", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def unit( + name, + unit_design, + implementation, + tests, + scope = [], + testonly = True, + visibility = None): + """Define a software unit following S-CORE process guidelines. + + A unit is the smallest testable software element in the S-CORE process. + It consists of: + - Unit design: Design documentation and diagrams + - Implementation: Source code that realizes the design + - Tests: Test cases that verify the implementation + + Args: + name: The name of the unit. Used as the target name. + unit_design: List of labels to architectural_design targets or design + documentation that describes the unit's internal structure and behavior. + implementation: List of labels to Bazel targets representing the actual + implementation (cc_library, py_library, rust_library, etc.). + scope: Optional list of additional targets needed for the unit implementation + but not explicitly named in the implementation list. Default is empty list. + tests: List of labels to Bazel test targets (cc_test, py_test, rust_test, etc.) + that verify the unit implementation. + testonly: If true, only testonly targets can depend on this unit. Set to true + when the unit depends on testonly targets like tests. + visibility: Bazel visibility specification for the unit target. + + Example: + ```python + unit( + name = "kvs_unit1", + unit_design = [":kvs_architectural_design"], + implementation = [ + "//persistency/kvs:lib1", + "//persistency/kvs:lib2", + "//persistency/kvs:lib3", + ], + tests = ["//persistency/kvs/tests:score_kvs_component_tests"], + visibility = ["//visibility:public"], + ) + ``` + """ + _unit( + name = name, + unit_design = unit_design, + implementation = implementation, + scope = scope, + tests = tests, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/providers.bzl b/bazel/rules/rules_score/providers.bzl new file mode 100644 index 0000000..e038df2 --- /dev/null +++ b/bazel/rules/rules_score/providers.bzl @@ -0,0 +1,57 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Shared providers for S-CORE documentation build rules. + +This module defines providers that are shared across multiple documentation +build rules to enable consistent Sphinx documentation generation. +""" + +# ============================================================================ +# Provider Definitions +# ============================================================================ + +SphinxSourcesInfo = provider( + doc = """Provider for Sphinx documentation source files. + + This provider aggregates all source files needed for Sphinx documentation + builds, including reStructuredText, Markdown, PlantUML diagrams, and + image files. Rules that produce documentation artifacts should provide + this to enable integration with sphinx_module and dependable_element. + """, + fields = { + "srcs": "Depset of source files for Sphinx documentation (.rst, .md, .puml, .plantuml, .svg, .png, etc.)", + "transitive_srcs": "Depset of transitive source files from dependencies", + }, +) + +UnitInfo = provider( + doc = "Provider for unit artifacts", + fields = { + "name": "Name of the unit target", + "unit_design": "Depset of unit design artifacts (architectural design)", + "implementation": "Depset of implementation targets (libraries, binaries)", + "tests": "Depset of test targets", + }, +) + +ComponentInfo = provider( + doc = "Provider for component artifacts", + fields = { + "name": "Name of the component target", + "requirements": "Depset of component requirements artifacts", + "components": "Depset of unit targets that comprise this component", + "tests": "Depset of component-level integration test targets", + }, +) diff --git a/bazel/rules/rules_score/rules_score.bzl b/bazel/rules/rules_score/rules_score.bzl new file mode 100644 index 0000000..7084744 --- /dev/null +++ b/bazel/rules/rules_score/rules_score.bzl @@ -0,0 +1,62 @@ +load("@rules_python//sphinxdocs:sphinx.bzl", "sphinx_docs") +load("@rules_python//sphinxdocs:sphinx_docs_library.bzl", "sphinx_docs_library") +load( + "//bazel/rules/rules_score:providers.bzl", + _ComponentInfo = "ComponentInfo", + _SphinxSourcesInfo = "SphinxSourcesInfo", + _UnitInfo = "UnitInfo", +) +load( + "//bazel/rules/rules_score/private:architectural_design.bzl", + _architectural_design = "architectural_design", +) +load( + "//bazel/rules/rules_score/private:assumptions_of_use.bzl", + _assumptions_of_use = "assumptions_of_use", +) +load( + "//bazel/rules/rules_score/private:component.bzl", + _component = "component", +) +load( + "//bazel/rules/rules_score/private:component_requirements.bzl", + _component_requirements = "component_requirements", +) +load( + "//bazel/rules/rules_score/private:dependability_analysis.bzl", + _dependability_analysis = "dependability_analysis", +) +load( + "//bazel/rules/rules_score/private:dependable_element.bzl", + _dependable_element = "dependable_element", +) +load( + "//bazel/rules/rules_score/private:feature_requirements.bzl", + _feature_requirements = "feature_requirements", +) +load( + "//bazel/rules/rules_score/private:safety_analysis.bzl", + _safety_analysis = "safety_analysis", +) +load( + "//bazel/rules/rules_score/private:sphinx_module.bzl", + _sphinx_module = "sphinx_module", +) +load( + "//bazel/rules/rules_score/private:unit.bzl", + _unit = "unit", +) + +architectural_design = _architectural_design +assumptions_of_use = _assumptions_of_use +component_requirements = _component_requirements +dependability_analysis = _dependability_analysis +feature_requirements = _feature_requirements +safety_analysis = _safety_analysis +sphinx_module = _sphinx_module +unit = _unit +component = _component +dependable_element = _dependable_element +SphinxSourcesInfo = _SphinxSourcesInfo +UnitInfo = _UnitInfo +ComponentInfo = _ComponentInfo diff --git a/bazel/rules/rules_score/src/sphinx_html_merge.py b/bazel/rules/rules_score/src/sphinx_html_merge.py new file mode 100644 index 0000000..60dfaa4 --- /dev/null +++ b/bazel/rules/rules_score/src/sphinx_html_merge.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 +"""Merge multiple Sphinx HTML output directories. + +This script merges Sphinx HTML documentation from multiple modules into a single +output directory. It copies the main module's HTML as-is, and then copies each +dependency module's HTML into a subdirectory, excluding nested module directories +to avoid duplication. + +Usage: + sphinx_html_merge.py --output OUTPUT_DIR --main MAIN_HTML_DIR [--dep NAME:PATH ...] +""" + +import argparse +import os +import re +import shutil +import sys +from pathlib import Path + + +# Standard Sphinx directories that should be copied +# Note: _static and _sphinx_design_static are excluded for dependencies to avoid duplication +SPHINX_DIRS = {"_sources", ".doctrees"} + + +def copy_html_files(src_dir, dst_dir, exclude_module_dirs=None, sibling_modules=None): + """Copy HTML and related files from src to dst, with optional link fixing. + + Args: + src_dir: Source HTML directory + dst_dir: Destination directory + exclude_module_dirs: Set of module directory names to skip (to avoid copying nested modules). + If None, copy everything. + sibling_modules: Set of sibling module names for fixing links in HTML files. + If None, no link fixing is performed. + """ + src_path = Path(src_dir) + dst_path = Path(dst_dir) + + if not src_path.exists(): + print(f"Warning: Source directory does not exist: {src_dir}", file=sys.stderr) + return + + dst_path.mkdir(parents=True, exist_ok=True) + + if exclude_module_dirs is None: + exclude_module_dirs = set() + + # Prepare regex patterns for link fixing if needed + module_pattern = None + static_pattern = None + if sibling_modules: + module_pattern = re.compile( + r'((?:href|src)=")(' + + "|".join(re.escape(mod) for mod in sibling_modules) + + r")/", + re.IGNORECASE, + ) + static_pattern = re.compile( + r'((?:href|src)=")(\.\./)*(_static|_sphinx_design_static)/', re.IGNORECASE + ) + + def process_file(src_file, dst_file, relative_path): + """Read, optionally modify, and write a file.""" + if src_file.suffix == ".html" and sibling_modules: + # Read, modify, and write HTML files + try: + content = src_file.read_text(encoding="utf-8") + + # Replace module_name/ with ../module_name/ + modified_content = module_pattern.sub(r"\1../\2/", content) + + # Calculate depth for static file references + depth = len(relative_path.parents) - 1 + parent_prefix = "../" * (depth + 1) + + def replace_static(match): + return f"{match.group(1)}{parent_prefix}{match.group(3)}/" + + modified_content = static_pattern.sub(replace_static, modified_content) + + # Write modified content + dst_file.parent.mkdir(parents=True, exist_ok=True) + dst_file.write_text(modified_content, encoding="utf-8") + except Exception as e: + print(f"Warning: Failed to process {src_file}: {e}", file=sys.stderr) + # Fallback to regular copy on error + shutil.copy2(src_file, dst_file) + else: + # Regular copy for non-HTML files + dst_file.parent.mkdir(parents=True, exist_ok=True) + shutil.copy2(src_file, dst_file) + + def copy_tree(src, dst, rel_path): + """Recursively copy directory tree with processing.""" + for item in src.iterdir(): + rel_item = rel_path / item.name + dst_item = dst / item.name + + if item.is_file(): + process_file(item, dst_item, rel_item) + elif item.is_dir(): + # Skip excluded directories + if item.name in exclude_module_dirs: + continue + # Skip static dirs from dependencies + if ( + item.name in ("_static", "_sphinx_design_static") + and exclude_module_dirs + ): + continue + + dst_item.mkdir(parents=True, exist_ok=True) + copy_tree(item, dst_item, rel_item) + + # Start copying from root + copy_tree(src_path, dst_path, Path(".")) + + +def merge_html_dirs(output_dir, main_html_dir, dependencies): + """Merge HTML directories. + + Args: + output_dir: Target output directory + main_html_dir: Main module's HTML directory to copy as-is + dependencies: List of (name, path) tuples for dependency modules + """ + output_path = Path(output_dir) + + # First, copy the main HTML directory + print(f"Copying main HTML from {main_html_dir} to {output_dir}") + copy_html_files(main_html_dir, output_dir) + + # Collect all dependency names for link fixing and exclusion + dep_names = [name for name, _ in dependencies] + + # Then copy each dependency into a subdirectory with link fixing + for dep_name, dep_html_dir in dependencies: + dep_output = output_path / dep_name + print(f"Copying dependency {dep_name} from {dep_html_dir} to {dep_output}") + # Exclude other module directories to avoid nested modules + # Remove current module from the list to get actual siblings to exclude + sibling_modules = set(n for n in dep_names if n != dep_name) + copy_html_files( + dep_html_dir, + dep_output, + exclude_module_dirs=sibling_modules, + sibling_modules=sibling_modules, + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Merge Sphinx HTML documentation directories" + ) + parser.add_argument( + "--output", required=True, help="Output directory for merged HTML" + ) + parser.add_argument("--main", required=True, help="Main HTML directory to copy") + parser.add_argument( + "--dep", + action="append", + default=[], + metavar="NAME:PATH", + help="Dependency HTML directory in format NAME:PATH", + ) + + args = parser.parse_args() + + # Parse dependencies + dependencies = [] + for dep_spec in args.dep: + if ":" not in dep_spec: + print( + f"Error: Invalid dependency format '{dep_spec}', expected NAME:PATH", + file=sys.stderr, + ) + return 1 + + name, path = dep_spec.split(":", 1) + dependencies.append((name, path)) + + # Merge the HTML directories + merge_html_dirs(args.output, args.main, dependencies) + + print(f"Successfully merged HTML into {args.output}") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bazel/rules/rules_score/src/sphinx_wrapper.py b/bazel/rules/rules_score/src/sphinx_wrapper.py new file mode 100644 index 0000000..1376057 --- /dev/null +++ b/bazel/rules/rules_score/src/sphinx_wrapper.py @@ -0,0 +1,262 @@ +""" +Wrapper script for running Sphinx builds in Bazel environments. + +This script provides a command-line interface to Sphinx documentation builds, +handling argument parsing, environment configuration, and build execution. +It's designed to be used as part of Bazel build rules for Score modules. +""" + +import argparse +import logging +import os +import sys +import time +from pathlib import Path +from typing import List, Optional +import re +import sys +from contextlib import redirect_stdout, redirect_stderr + +from sphinx.cmd.build import main as sphinx_main + +# Constants +DEFAULT_PORT = 8000 +DEFAULT_GITHUB_VERSION = "main" +DEFAULT_SOURCE_DIR = "." + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format="%(levelname)s: %(message)s", +) +logger = logging.getLogger(__name__) + +SANDBOX_PATH = re.compile(r"^.*_main/") + + +class StdoutProcessor: + def write(self, text): + if text.strip(): + text = re.sub(SANDBOX_PATH, "", text) + sys.__stdout__.write(f"[SPHINX_STDOUT]: {text.strip()}\n") + + def flush(self): + sys.__stdout__.flush() + + +class StderrProcessor: + def write(self, text): + if text.strip(): + text = re.sub(SANDBOX_PATH, "", text) + sys.__stderr__.write(f"[SPHINX_STDERR]: {text.strip()}\n") + + def flush(self): + sys.__stderr__.flush() + + +def get_env(name: str, required: bool = True) -> Optional[str]: + """ + Get an environment variable value. + + Args: + name: The name of the environment variable + required: Whether the variable is required (raises error if not set) + + Returns: + The value of the environment variable, or None if not required and not set + + Raises: + ValueError: If the variable is required but not set + """ + val = os.environ.get(name) + logger.debug(f"Environment variable {name} = {val}") + if val is None and required: + raise ValueError(f"Required environment variable {name} is not set") + return val + + +def validate_arguments(args: argparse.Namespace) -> None: + """ + Validate required command-line arguments. + + Args: + args: Parsed command-line arguments + + Raises: + ValueError: If required arguments are missing or invalid + """ + if not args.index_file: + raise ValueError("--index_file is required") + if not args.output_dir: + raise ValueError("--output_dir is required") + if not args.builder: + raise ValueError("--builder is required") + + # Validate that index file exists if it's a real path + index_path = Path(args.index_file) + if not index_path.exists(): + raise ValueError(f"Index file does not exist: {args.index_file}") + + +def build_sphinx_arguments(args: argparse.Namespace) -> List[str]: + """ + Build the argument list for Sphinx. + + Args: + args: Parsed command-line arguments + + Returns: + List of arguments to pass to Sphinx + """ + source_dir = ( + str(Path(args.index_file).parent) if args.index_file else DEFAULT_SOURCE_DIR + ) + config_dir = str(Path(args.config).parent) if args.config else source_dir + + base_arguments = [ + source_dir, # source dir + args.output_dir, # output dir + "-c", + config_dir, # config directory + # "-W", # treat warning as errors - disabled for modular builds + "--keep-going", # do not abort after one error + "-T", # show details in case of errors in extensions + "--jobs", + "auto", + ] + + # Configure sphinx build with GitHub user and repo from CLI + if args.github_user and args.github_repo: + base_arguments.extend( + [ + f"-A=github_user={args.github_user}", + f"-A=github_repo={args.github_repo}", + f"-A=github_version={DEFAULT_GITHUB_VERSION}", + ] + ) + + # Add doc_path if SOURCE_DIRECTORY environment variable is set + source_directory = get_env("SOURCE_DIRECTORY", required=False) + if source_directory: + base_arguments.append(f"-A=doc_path='{source_directory}'") + + base_arguments.extend(["-b", args.builder]) + + return base_arguments + + +def run_sphinx_build(sphinx_args: List[str], builder: str) -> int: + """ + Execute the Sphinx build and measure duration. + + Args: + sphinx_args: Arguments to pass to Sphinx + builder: The builder type (for logging purposes) + + Returns: + The exit code from Sphinx build + """ + logger.info(f"Starting Sphinx build with builder: {builder}") + logger.debug(f"Sphinx arguments: {sphinx_args}") + + start_time = time.perf_counter() + + try: + exit_code = sphinx_main(sphinx_args) + except Exception as e: + logger.error(f"Sphinx build failed with exception: {e}") + return 1 + + end_time = time.perf_counter() + duration = end_time - start_time + + if exit_code == 0: + logger.info(f"docs ({builder}) finished successfully in {duration:.1f} seconds") + else: + logger.error( + f"docs ({builder}) failed with exit code {exit_code} after {duration:.1f} seconds" + ) + + return exit_code + + +def parse_arguments() -> argparse.Namespace: + """ + Parse command-line arguments. + + Returns: + Parsed command-line arguments + """ + parser = argparse.ArgumentParser( + description="Wrapper for Sphinx documentation builds in Bazel environments" + ) + + # Required arguments + parser.add_argument( + "--index_file", + required=True, + help="Path to the index file (e.g., index.rst)", + ) + parser.add_argument( + "--output_dir", + required=True, + help="Build output directory", + ) + parser.add_argument( + "--builder", + required=True, + help="Sphinx builder to use (e.g., html, needs, json)", + ) + + # Optional arguments + parser.add_argument( + "--config", + help="Path to config file (conf.py)", + ) + parser.add_argument( + "--github_user", + help="GitHub username to embed in the Sphinx build", + ) + parser.add_argument( + "--github_repo", + help="GitHub repository to embed in the Sphinx build", + ) + parser.add_argument( + "--port", + type=int, + default=DEFAULT_PORT, + help=f"Port to use for live preview (default: {DEFAULT_PORT}). Use 0 for auto-detection.", + ) + + return parser.parse_args() + + +def main() -> int: + """ + Main entry point for the Sphinx wrapper script. + + Returns: + Exit code (0 for success, non-zero for failure) + """ + try: + args = parse_arguments() + validate_arguments(args) + # Create processor instance + stdout_processor = StdoutProcessor() + stderr_processor = StderrProcessor() + # Redirect stdout and stderr + with redirect_stderr(stdout_processor), redirect_stdout(stderr_processor): + sphinx_args = build_sphinx_arguments(args) + exit_code = run_sphinx_build(sphinx_args, args.builder) + exit_code = 0 + return exit_code + except ValueError as e: + logger.error(f"Validation error: {e}") + return 1 + except Exception as e: + logger.error(f"Unexpected error: {e}") + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bazel/rules/rules_score/templates/conf.template.py b/bazel/rules/rules_score/templates/conf.template.py new file mode 100644 index 0000000..e916952 --- /dev/null +++ b/bazel/rules/rules_score/templates/conf.template.py @@ -0,0 +1,207 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Generic Sphinx configuration template for SCORE modules. + +This file is auto-generated from a template and should not be edited directly. +Template variables like {PROJECT_NAME} are replaced during Bazel build. +""" + +import json +import os +from pathlib import Path +from typing import Any, Dict, List +from sphinx.util import logging + + +# Create a logger with the Sphinx namespace +logger = logging.getLogger(__name__) + +# Project configuration - {PROJECT_NAME} will be replaced by the module name during build +project = "{PROJECT_NAME}" +author = "S-CORE" +version = "1.0" +release = "1.0.0" +project_url = ( + "https://github.com/eclipse-score" # Required by score_metamodel extension +) + +# Sphinx extensions - comprehensive list for SCORE modules +extensions = [ + "sphinx_needs", + "sphinx_design", + "myst_parser", + "sphinxcontrib.plantuml", + "score_plantuml", + "score_metamodel", + "score_draw_uml_funcs", + "score_source_code_linker", + "score_layout", +] + +# MyST parser extensions +myst_enable_extensions = ["colon_fence"] + +# Exclude patterns for Bazel builds +exclude_patterns = [ + "bazel-*", + ".venv*", +] + +# Enable markdown rendering +source_suffix = { + ".rst": "restructuredtext", + ".md": "markdown", +} + +# Enable numref for cross-references +numfig = True + +# HTML theme +# html_theme = "pydata_sphinx_theme" + + +# Configuration constants +NEEDS_EXTERNAL_FILE = "needs_external_needs.json" +BAZEL_OUT_DIR = "bazel-out" + + +def find_workspace_root() -> Path: + """ + Find the Bazel workspace root by looking for the bazel-out directory. + + Returns: + Path to the workspace root directory + """ + current = Path.cwd() + + # Traverse up the directory tree looking for bazel-out + while current != current.parent: + if (current / BAZEL_OUT_DIR).exists(): + return current + current = current.parent + + # If we reach the root without finding it, return current directory + return Path.cwd() + + +def load_external_needs() -> List[Dict[str, Any]]: + """ + Load external needs configuration from JSON file. + + This function reads the needs_external_needs.json file if it exists and + resolves relative paths to absolute paths based on the workspace root. + + Returns: + List of external needs configurations with resolved paths + """ + needs_file = Path(NEEDS_EXTERNAL_FILE) + + if not needs_file.exists(): + logger.info(f"{NEEDS_EXTERNAL_FILE} not found - no external dependencies") + return [] + + logger.info(f"Loading external needs from {NEEDS_EXTERNAL_FILE}") + + try: + with needs_file.open("r", encoding="utf-8") as file: + needs_dict = json.load(file) + except json.JSONDecodeError as e: + logger.error(f"Failed to parse {NEEDS_EXTERNAL_FILE}: {e}") + return [] + except Exception as e: + logger.error(f"Failed to read {NEEDS_EXTERNAL_FILE}: {e}") + return [] + + workspace_root = find_workspace_root() + logger.info(f"Workspace root: {workspace_root}") + + external_needs = [] + for key, config in needs_dict.items(): + if "json_path" not in config: + logger.warning( + f"External needs config for '{key}' missing 'json_path', skipping" + ) + continue + + # Resolve relative path to absolute path + # Bazel provides relative paths like: bazel-out/k8-fastbuild/bin/.../needs.json + # We need absolute paths: .../execroot/_main/bazel-out/... + json_path = workspace_root / config["json_path"] + config["json_path"] = str(json_path) + + logger.info(f"Added external needs config for '{key}':") + logger.info(f" json_path: {config['json_path']}") + logger.info(f" id_prefix: {config.get('id_prefix', 'none')}") + logger.info(f" version: {config.get('version', 'none')}") + + external_needs.append(config) + + return external_needs + + +def verify_config(app: Any, config: Any) -> None: + """ + Initialize and verify external needs configuration. + + This is called during Sphinx's config-inited event to ensure + external needs configuration is correctly set up. We need to + explicitly set the config value here because Sphinx doesn't + automatically pick up module-level variables for extension configs. + + Args: + app: Sphinx application object + config: Sphinx configuration object + """ + # Set the config from our module-level variable + # This is needed because sphinx-needs registers its config with add_config_value + # which doesn't automatically pick up module-level variables from conf.py + if needs_external_needs: + config.needs_external_needs = needs_external_needs + + logger.info("=" * 80) + logger.info("Verifying Sphinx configuration") + logger.info(f" Project: {config.project}") + logger.info(f" External needs count: {len(config.needs_external_needs)}") + logger.info("=" * 80) + + +def setup(app: Any) -> Dict[str, Any]: + """ + Sphinx setup hook to register event listeners. + + Args: + app: Sphinx application object + + Returns: + Extension metadata dictionary + """ + app.connect("config-inited", verify_config) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } + + +# Initialize external needs configuration +logger.info("=" * 80) +logger.info(f"Sphinx configuration loaded for project: {project}") +logger.info(f"Current working directory: {Path.cwd()}") + +# Load external needs configuration +# Note: This sets a module-level variable that is then applied to the Sphinx +# config object in the verify_config callback during the config-inited event +needs_external_needs = load_external_needs() diff --git a/bazel/rules/rules_score/templates/seooc_index.template.rst b/bazel/rules/rules_score/templates/seooc_index.template.rst new file mode 100644 index 0000000..5def2dc --- /dev/null +++ b/bazel/rules/rules_score/templates/seooc_index.template.rst @@ -0,0 +1,76 @@ +.. ******************************************************************************* +.. Copyright (c) 2025 Contributors to the Eclipse Foundation +.. +.. See the NOTICE file(s) distributed with this work for additional +.. information regarding copyright ownership. +.. +.. This program and the accompanying materials are made available under the +.. terms of the Apache License Version 2.0 which is available at +.. https://www.apache.org/licenses/LICENSE-2.0 +.. +.. SPDX-License-Identifier: Apache-2.0 +.. ******************************************************************************* + +Dependable element: {title} +===================={underline} + +{description} + +Architectural Design +-------------------- + +.. toctree:: + :maxdepth: 2 + + {architectural_design} + + +Assumptions of Use +------------------ + +.. toctree:: + :maxdepth: 2 + + {assumptions_of_use} + +Components +---------- + +.. toctree:: + :maxdepth: 1 + +{components} + + +Units +----- + +.. toctree:: + :maxdepth: 1 + +{units} + + + + +Dependability Analysis +---------------------- + +.. toctree:: + :maxdepth: 2 + + {dependability_analysis} + +Checklists +---------- + +.. toctree:: + :maxdepth: 2 + + {checklists} + +Submodules +---------- +This module includes the following submodules: + +{submodules} diff --git a/bazel/rules/rules_score/test/BUILD b/bazel/rules/rules_score/test/BUILD new file mode 100644 index 0000000..e1690cd --- /dev/null +++ b/bazel/rules/rules_score/test/BUILD @@ -0,0 +1,389 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +load( + "//bazel/rules/rules_score:rules_score.bzl", + "architectural_design", + "assumptions_of_use", + "component", + "component_requirements", + "dependability_analysis", + "dependable_element", + "feature_requirements", + "safety_analysis", + "sphinx_module", + "unit", +) +load( + ":html_generation_test.bzl", + "html_merging_test", + "module_dependencies_test", + "needs_transitive_test", + "sphinx_module_test_suite", +) +load( + ":seooc_test.bzl", + "seooc_artifacts_copied_test", + "seooc_needs_provider_test", + "seooc_sphinx_module_generated_test", +) +load( + ":unit_component_test.bzl", + "component_provider_test", + "component_sphinx_sources_test", + "unit_component_test_suite", + "unit_provider_test", + "unit_sphinx_sources_test", +) + +package(default_visibility = ["//visibility:public"]) + +# ============================================================================ +# Test Fixtures - Module Definitions +# ============================================================================ + +# Test 1: Multi-Module Aggregation +# Dependency graph: module_a_lib -> module_b_lib -> module_c_lib +# module_a_lib -> module_c_lib (also direct) +sphinx_module( + name = "module_c_lib", + srcs = glob(["fixtures/module_c/*.rst"]), + index = "fixtures/module_c/index.rst", + sphinx = "//bazel/rules/rules_score:score_build", +) + +sphinx_module( + name = "module_b_lib", + srcs = glob(["fixtures/module_b/*.rst"]), + index = "fixtures/module_b/index.rst", + sphinx = "//bazel/rules/rules_score:score_build", + deps = [":module_c_lib"], +) + +sphinx_module( + name = "module_a_lib", + srcs = glob(["fixtures/module_a/*.rst"]), + index = "fixtures/module_a/index.rst", + sphinx = "//bazel/rules/rules_score:score_build", + deps = [ + ":module_b_lib", + ":module_c_lib", + ], +) + +# Test 2: SEooC (Safety Element out of Context) Module +# Tests the score_component macro with S-CORE process artifacts + +# - Feature Requirements: wp__requirements_feat +# TODO: Feature requirements are a stand-alone artifact for now +# We have to link them manually to component requirements +feature_requirements( + name = "feat_req", + srcs = ["fixtures/seooc_test/feature_requirements.rst"], +) + +# - Component Requirements: wp__requirements_comp +component_requirements( + name = "comp_req", + srcs = ["fixtures/seooc_test/component_requirements.rst"], +) + +# - Assumptions of Use: wp__requirements_comp_aou +assumptions_of_use( + name = "aous", + srcs = ["fixtures/seooc_test/assumptions_of_use.rst"], + feature_requirement = [":feat_req"], +) + +# - Architecture Design: wp__component_arch +architectural_design( + name = "arch_design", + dynamic = ["fixtures/seooc_test/dynamic_architecture.rst"], + static = ["fixtures/seooc_test/static_architecture.rst"], +) + +# - Safety Analysis (DFA): wp__sw_component_dfa +# - Safety Analysis (FMEA): wp__sw_component_fmea +dependability_analysis( + name = "dependability_analysis_target", + arch_design = ":arch_design", + dfa = ["fixtures/seooc_test/dfa.rst"], + safety_analysis = [":samplelibrary_safety_analysis"], +) + +safety_analysis( + name = "samplelibrary_safety_analysis", + # TODO + # controlmeasures = [], # can be AoUs or requirements + # failuremodes = [], + # fta = [], + arch_design = ":arch_design", +) + +dependable_element( + name = "seooc_test_lib", + architectural_design = [":arch_design"], + assumptions_of_use = [":aous"], + components = [], + dependability_analysis = [":dependability_analysis_target"], + description = "Test SEooC module demonstrating S-CORE process compliance structure.", + requirements = [":comp_req"], + tests = [], + deps = [ + ":module_c_lib", # dependency to other seoocs/score_components + ], +) + +# ============================================================================ +# Test Fixtures - Unit, Component, and Dependable Element +# ============================================================================ + +# Mock implementation targets with dummy functions +cc_library( + name = "mock_lib1", + srcs = ["fixtures/mock_lib1.cc"], +) + +cc_library( + name = "mock_lib2", + srcs = ["fixtures/mock_lib2.cc"], +) + +cc_binary( + name = "test_component_binary", + srcs = ["fixtures/test_component_main.cc"], + deps = [ + ":mock_lib1", + ":mock_lib2", + ], +) + +cc_test( + name = "test_unit_tests", + testonly = True, + srcs = ["fixtures/test_unit_test.cc"], + tags = ["manual"], + deps = [ + ":mock_lib1", + ":mock_lib2", + ], +) + +# Test Unit +unit( + name = "test_unit", + testonly = True, + tests = [":test_unit_tests"], + unit_design = [":arch_design"], + implementation = [ + ":mock_lib1", + ":mock_lib2", + ], +) + +unit( + name = "test_binary_unit", + testonly = True, + tests = [":test_unit_tests"], + unit_design = [":arch_design"], + implementation = [":test_component_binary"], +) + +# Test Component +component( + name = "test_component", + testonly = True, + components = [ + ":test_unit", + "test_binary_unit", + ], + requirements = [":comp_req"], + tests = [], # Empty for testing +) + +# Test Dependable Element +dependable_element( + name = "test_dependable_element", + testonly = True, + architectural_design = [":arch_design"], + assumptions_of_use = [":aous"], + components = [":test_component"], + dependability_analysis = [":dependability_analysis_target"], + description = "Test dependable element for unit testing", + requirements = [":comp_req"], + tests = [], # Empty for testing +) + +# ============================================================================ +# Test Fixtures - Nested Components for Recursive Testing +# ============================================================================ + +# Additional mock implementations +cc_library( + name = "mock_lib3", + srcs = ["fixtures/mock_lib1.cc"], # Reuse same source for testing +) + +cc_test( + name = "test_unit2_tests", + testonly = True, + srcs = ["fixtures/test_unit_test.cc"], + tags = ["manual"], + deps = [":mock_lib3"], +) + +# Second unit that will be shared between components +unit( + name = "test_unit2", + testonly = True, + tests = [":test_unit2_tests"], + unit_design = [":arch_design"], + implementation = [":mock_lib3"], +) + +# Nested component containing unit2 +component( + name = "test_nested_component", + testonly = True, + components = [":test_unit2"], + requirements = [":comp_req"], + tests = [], +) + +# Parent component containing nested component and shared unit +component( + name = "test_parent_component", + testonly = True, + components = [ + ":test_nested_component", + ":test_unit2", # Same unit appears here and in nested component + ":test_unit", # Different unit + ], + requirements = [":comp_req"], + tests = [], +) + +# Dependable element with nested components to test recursive collection +dependable_element( + name = "test_dependable_element_nested", + testonly = True, + architectural_design = [":arch_design"], + assumptions_of_use = [":aous"], + components = [":test_parent_component"], + dependability_analysis = [":dependability_analysis_target"], + description = "Test dependable element with nested components for testing recursive unit collection and deduplication", + requirements = [":comp_req"], + tests = [], +) + +# ============================================================================ +# Test Instantiations - HTML Generation Tests +# ============================================================================ + +# Needs Generation Tests +needs_transitive_test( + name = "needs_transitive_test", + target_under_test = ":module_b_lib_needs", +) + +# Dependency Tests +module_dependencies_test( + name = "module_dependencies_test", + target_under_test = ":module_a_lib", +) + +html_merging_test( + name = "html_merging_test", + target_under_test = ":module_a_lib", +) + +# ============================================================================ +# SEooC-Specific Tests +# ============================================================================ + +# Test that all artifacts are copied +seooc_artifacts_copied_test( + name = "seooc_tests_artifacts_copied", + target_under_test = ":seooc_test_lib_index", +) + +# Test that sphinx_module is generated with correct providers +seooc_sphinx_module_generated_test( + name = "seooc_tests_sphinx_module_generated", + target_under_test = ":seooc_test_lib", +) + +# Test that needs provider exists for cross-referencing +seooc_needs_provider_test( + name = "seooc_tests_needs_provider", + target_under_test = ":seooc_test_lib_needs", +) + +# ============================================================================ +# Test Suites +# ============================================================================ + +# Main test suite combining all sphinx_module tests +sphinx_module_test_suite(name = "sphinx_module_tests") + +# SEooC-focused test suite +test_suite( + name = "seooc_tests", + tests = [ + ":seooc_tests_artifacts_copied", + ":seooc_tests_needs_provider", + ":seooc_tests_sphinx_module_generated", + ], +) + +# ============================================================================ +# Unit, Component, and Dependable Element Test Instantiations +# ============================================================================ + +# Unit tests +unit_provider_test( + name = "unit_provider_test", + target_under_test = ":test_unit", +) + +unit_sphinx_sources_test( + name = "unit_sphinx_sources_test", + target_under_test = ":test_unit", +) + +# Component tests +component_provider_test( + name = "component_provider_test", + target_under_test = ":test_component", +) + +component_sphinx_sources_test( + name = "component_sphinx_sources_test", + target_under_test = ":test_component", +) + +# Unit, Component, and Dependable Element test suite +unit_component_test_suite(name = "unit_component_tests") + +# ============================================================================ +# Combined Test Suite +# ============================================================================ + +# Combined test suite for all tests +test_suite( + name = "all_tests", + tests = [ + ":seooc_tests", + ":sphinx_module_tests", + ":unit_component_tests", + ], +) diff --git a/bazel/rules/rules_score/test/fixtures/mock_lib1.cc b/bazel/rules/rules_score/test/fixtures/mock_lib1.cc new file mode 100644 index 0000000..599e3c1 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/mock_lib1.cc @@ -0,0 +1,4 @@ +// Mock implementation for testing purposes +int mock_function_1() { + return 42; +} diff --git a/bazel/rules/rules_score/test/fixtures/mock_lib2.cc b/bazel/rules/rules_score/test/fixtures/mock_lib2.cc new file mode 100644 index 0000000..588f15d --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/mock_lib2.cc @@ -0,0 +1,4 @@ +// Mock implementation for testing purposes +int mock_function_2() { + return 84; +} diff --git a/bazel/rules/rules_score/test/fixtures/mock_test.sh b/bazel/rules/rules_score/test/fixtures/mock_test.sh new file mode 100755 index 0000000..d5aa21e --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/mock_test.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +# Mock test script that always succeeds +# Used for testing rule structure without actual implementation + +exit 0 diff --git a/bazel/rules/rules_score/test/fixtures/module_a/index.rst b/bazel/rules/rules_score/test/fixtures/module_a/index.rst new file mode 100644 index 0000000..573ad4b --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/module_a/index.rst @@ -0,0 +1,31 @@ +Module A Documentation +====================== + +This is the documentation for Module A. + +.. document:: Documentation for Module A + :id: doc__module_fixtures_module_a + :status: valid + :safety: ASIL_B + :security: NO + :realizes: wp__component_arch + +Overview +-------- + +Module A is a simple module that depends on Module C. + +Features +-------- + +.. needlist:: + :tags: module_a + +Cross-Module References +----------------------- + +General reference to Module C :external+module_c_lib:doc:`index`. + +Need reference to Module C :need:`doc__module_fixtures_module_c`. + +Need reference to Module B :need:`doc__module_fixtures_module_b`. diff --git a/bazel/rules/rules_score/test/fixtures/module_b/index.rst b/bazel/rules/rules_score/test/fixtures/module_b/index.rst new file mode 100644 index 0000000..3155c10 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/module_b/index.rst @@ -0,0 +1,37 @@ +Module B Documentation +====================== + +This is the documentation for Module B. + +.. document:: Documentation for Module B + :id: doc__module_fixtures_module_b + :status: valid + :safety: ASIL_B + :security: NO + :realizes: + +Overview +-------- + +Module B depends on both Module A and Module C. + +Features +-------- + +.. needlist:: + :tags: module_b + +Cross-Module References +----------------------- + +This module references: + +* :external+module_a_lib:doc:`index` from Module A +* :external+module_c_lib:doc:`index` from Module C +* Need reference to Module C :need:`doc__module_fixtures_module_c` +* Need reference to Module C :need:`doc__module_fixtures_module_d` + +Dependencies +------------ + +Module B integrates functionality from both dependent modules. diff --git a/bazel/rules/rules_score/test/fixtures/module_c/index.rst b/bazel/rules/rules_score/test/fixtures/module_c/index.rst new file mode 100644 index 0000000..b73ae61 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/module_c/index.rst @@ -0,0 +1,29 @@ +Module C Documentation +====================== + +This is the documentation for Module C. + +.. document:: Documentation for Module C + :id: doc__module_fixtures_module_c + :status: valid + :safety: ASIL_B + :security: NO + :realizes: + + +Overview +-------- + +Module C is a base module with no dependencies. +Local need link: :need:`doc__module_fixtures_module_c` + +Features +-------- + +.. needlist:: + :tags: module_c + +Content +------- + +Module C provides foundational functionality used by other modules. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/architectural_design.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/architectural_design.rst new file mode 100644 index 0000000..02e96f7 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/architectural_design.rst @@ -0,0 +1,174 @@ +Architectural Design +==================== + +This document describes the architectural design of the test SEooC module. + +Software Architecture Overview +------------------------------- + +The system consists of the following software components: + +.. comp_arc_sta:: Input Processing Module + :id: comp_arc_sta__seooc_test__input_processing_module + :status: valid + :tags: architecture, component, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__input_data_processing, comp_req__seooc_test__can_message_reception + + Responsible for receiving and validating input data from CAN interface. + + **Inputs**: Raw CAN messages + + **Outputs**: Validated data structures + + **Safety Mechanisms**: CRC validation, sequence counter check + +.. comp_arc_sta:: Data Processing Engine + :id: comp_arc_sta__seooc_test__data_processing_engine + :status: valid + :tags: architecture, component, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__output_accuracy, comp_req__seooc_test__redundant_calculation + + Core processing component that performs calculations on validated data. + + **Inputs**: Validated data from Input Processing Module + + **Outputs**: Processed results + + **Safety Mechanisms**: Dual-channel redundant calculation + +.. comp_arc_sta:: Output Handler + :id: comp_arc_sta__seooc_test__output_handler + :status: valid + :tags: architecture, component, seooc_test + :safety: QM + :security: NO + :fulfils: comp_req__seooc_test__can_message_transmission + + Formats and transmits output data via CAN interface. + + **Inputs**: Processed results from Data Processing Engine + + **Outputs**: CAN messages + + **Safety Mechanisms**: Message sequence numbering, alive counter + +.. comp_arc_sta:: Fault Detection and Handling + :id: comp_arc_sta__seooc_test__fault_detection_handling + :status: valid + :tags: architecture, component, safety, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection, comp_req__seooc_test__safe_state_transition + + Monitors system health and handles fault conditions. + + **Inputs**: Status from all components + + **Outputs**: System state, error flags + + **Safety Mechanisms**: Watchdog timer, plausibility checks + +Component Interfaces +--------------------- + +Interface: CAN Communication +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. real_arc_int:: CAN RX Interface + :id: real_arc_int__seooc_test__can_rx + :status: valid + :tags: interface, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__can_message_reception + :language: cpp + + * **Protocol**: CAN 2.0B + * **Baud Rate**: 500 kbps + * **Message ID Range**: 0x100-0x1FF + * **DLC**: 8 bytes + +.. real_arc_int:: CAN TX Interface + :id: real_arc_int__seooc_test__can_tx + :status: valid + :tags: interface, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__can_message_transmission + :language: cpp + + * **Protocol**: CAN 2.0B + * **Baud Rate**: 500 kbps + * **Message ID Range**: 0x200-0x2FF + * **DLC**: 8 bytes + +Design Decisions +---------------- + +.. comp_arc_dyn:: Use of Hardware Watchdog + :id: comp_arc_dyn__seooc_test__hw_watchdog + :status: valid + :tags: design-decision, safety, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + The architecture includes a hardware watchdog timer to ensure system + reliability and meet safety requirements. + + **Rationale**: Hardware watchdog provides independent monitoring + of software execution and can detect timing violations. + + **Alternatives Considered**: Software-only monitoring (rejected due + to lower ASIL coverage) + +.. comp_arc_dyn:: Redundant Processing Paths + :id: comp_arc_dyn__seooc_test__redundancy + :status: valid + :tags: design-decision, safety, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__redundant_calculation + + Critical calculations are performed using redundant processing paths + to detect and prevent silent data corruption. + + **Rationale**: Meets ASIL-B requirements for detection of random + hardware faults during calculation. + + **Implementation**: Main path + shadow path with result comparison + +Memory Architecture +------------------- + +.. comp_arc_sta:: RAM Allocation + :id: comp_arc_sta__seooc_test__ram_allocation + :status: valid + :tags: resource, memory, seooc_test + :safety: QM + :security: NO + :fulfils: aou_req__seooc_test__memory_requirements + + * **Total RAM**: 512 KB + * **Stack**: 64 KB + * **Heap**: 128 KB + * **Static Data**: 256 KB + * **Reserved**: 64 KB + +.. comp_arc_sta:: Flash Allocation + :id: comp_arc_sta__seooc_test__flash_allocation + :status: valid + :tags: resource, memory, seooc_test + :safety: QM + :security: NO + :fulfils: aou_req__seooc_test__memory_requirements + + * **Total Flash**: 2 MB + * **Application Code**: 1.5 MB + * **Configuration Data**: 256 KB + * **Boot Loader**: 128 KB + * **Reserved**: 128 KB diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/assumptions_of_use.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/assumptions_of_use.rst new file mode 100644 index 0000000..fae172c --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/assumptions_of_use.rst @@ -0,0 +1,80 @@ +Assumptions of Use +================== + +This document describes the assumptions of use for the test SEooC module. + +.. aou_req:: Operating Temperature Range + :id: aou_req__seooc_test__operating_temperature_range + :status: valid + :tags: environment, iso26262, seooc_test + :safety: ASIL_B + :security: NO + + The SEooC shall operate within temperature range -40°C to +85°C. + +.. aou_req:: Supply Voltage + :id: aou_req__seooc_test__supply_voltage + :status: valid + :tags: power, iso26262, seooc_test + :safety: ASIL_B + :security: NO + + The SEooC shall operate with supply voltage 12V ±10%. + + Maximum current consumption: 2.5A + +.. aou_req:: Processing Load + :id: aou_req__seooc_test__processing_load + :status: valid + :tags: performance, iso26262, seooc_test + :safety: ASIL_B + :security: NO + + The maximum processing load shall not exceed 80% to ensure + timing requirements are met. + +Environmental Assumptions +------------------------- + +.. aou_req:: Controlled Environment + :id: aou_req__seooc_test__controlled_environment + :status: valid + :tags: environment, seooc_test + :safety: ASIL_B + :security: NO + + The system operates in a controlled automotive environment + compliant with ISO 16750 standards. + +.. aou_req:: Maintenance + :id: aou_req__seooc_test__maintenance + :status: valid + :tags: maintenance, seooc_test + :safety: ASIL_B + :security: NO + + Regular maintenance is performed according to the maintenance + schedule defined in the integration manual. + +Integration Constraints +----------------------- + +.. aou_req:: CAN Bus Interface + :id: aou_req__seooc_test__can_bus_interface + :status: valid + :tags: interface, communication, seooc_test + :safety: ASIL_B + :security: NO + + The host system shall provide a CAN 2.0B compliant interface + for communication with the SEooC. + +.. aou_req:: Memory Requirements + :id: aou_req__seooc_test__memory_requirements + :status: valid + :tags: resource, seooc_test + :safety: ASIL_B + :security: NO + + The host system shall provide at least 512KB of RAM and + 2MB of flash memory for the SEooC. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/component_requirements.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/component_requirements.rst new file mode 100644 index 0000000..1d7f90c --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/component_requirements.rst @@ -0,0 +1,105 @@ +Component Requirements +====================== + +This document defines the functional and safety requirements. + +Functional Requirements +------------------------ + +.. comp_req:: Input Data Processing + :id: comp_req__seooc_test__input_data_processing + :status: valid + :tags: functional, performance, seooc_test + :safety: QM + :security: NO + :satisfies: aou_req__seooc_test__processing_load + + The system shall process input data within 100ms from reception. + + **Rationale**: Real-time processing required for control loop. + +.. comp_req:: Output Accuracy + :id: comp_req__seooc_test__output_accuracy + :status: valid + :tags: functional, quality, seooc_test + :safety: QM + :security: NO + + The system shall provide output with 99.9% accuracy under + nominal operating conditions. + +.. comp_req:: Data Logging + :id: comp_req__seooc_test__data_logging + :status: valid + :tags: functional, diagnostic, seooc_test + :safety: QM + :security: NO + + The system shall log all error events with timestamp and + error code to non-volatile memory. + +Safety Requirements +------------------- + +.. comp_req:: Fault Detection + :id: comp_req__seooc_test__fault_detection + :status: valid + :tags: safety, seooc_test + :safety: ASIL_B + :security: NO + :satisfies: aou_req__seooc_test__processing_load + + The system shall detect and handle fault conditions within 50ms. + + **ASIL Level**: ASIL-B + **Safety Mechanism**: Watchdog timer + plausibility checks + +.. comp_req:: Safe State Transition + :id: comp_req__seooc_test__safe_state_transition + :status: valid + :tags: safety, seooc_test + :safety: ASIL_B + :security: NO + + The system shall maintain safe state during power loss and + complete shutdown within 20ms. + + **ASIL Level**: ASIL-B + **Safe State**: All outputs disabled, error flag set + +.. comp_req:: Redundant Calculation + :id: comp_req__seooc_test__redundant_calculation + :status: valid + :tags: safety, seooc_test + :safety: ASIL_B + :security: NO + + Critical calculations shall be performed using redundant + processing paths with comparison. + + **ASIL Level**: ASIL-B + **Safety Mechanism**: Dual-channel processing + +Communication Requirements +--------------------------- + +.. comp_req:: CAN Message Transmission + :id: comp_req__seooc_test__can_message_transmission + :status: valid + :tags: functional, communication, seooc_test + :safety: QM + :security: NO + :satisfies: aou_req__seooc_test__can_bus_interface + + The system shall transmit status messages on CAN bus + every 100ms ±10ms. + +.. comp_req:: CAN Message Reception + :id: comp_req__seooc_test__can_message_reception + :status: valid + :tags: functional, communication, seooc_test + :safety: QM + :security: NO + :satisfies: aou_req__seooc_test__can_bus_interface + + The system shall process received CAN messages within 10ms. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/dependability_analysis.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/dependability_analysis.rst new file mode 100644 index 0000000..ea5b518 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/dependability_analysis.rst @@ -0,0 +1,292 @@ +Safety Analysis +=============== + +This document contains the safety analysis for the test SEooC module. + +Failure Mode and Effects Analysis (FMEA) +----------------------------------------- + +.. comp_saf_fmea:: Input Data Corruption + :id: comp_saf_fmea__seooc_test__input_data_corruption + :status: valid + :tags: fmea, safety, seooc_test + :violates: comp_arc_sta__seooc_test__input_processing_module + :fault_id: bit_flip + :failure_effect: Corrupted input data from CAN bus due to electromagnetic interference, transmission errors, or faulty sensor leading to incorrect processing results + :mitigated_by: comp_req__seooc_test__fault_detection + :sufficient: yes + + **Failure Mode**: Corrupted input data from CAN bus + + **Potential Causes**: + + * Electromagnetic interference + * Transmission errors + * Faulty sensor + + **Effects**: Incorrect processing results, potential unsafe output + + **Severity**: High (S9) + + **Occurrence**: Medium (O4) + + **Detection**: High (D2) + + **RPN**: 72 + + **Detection Method**: CRC checksum validation, sequence counter check + + **Mitigation**: Reject invalid data and enter safe state within 50ms + +.. comp_saf_fmea:: Processing Timeout + :id: comp_saf_fmea__seooc_test__processing_timeout + :status: valid + :tags: fmea, safety, seooc_test + :violates: comp_arc_sta__seooc_test__fault_detection_handling + :fault_id: timing_failure + :failure_effect: Processing exceeds time deadline due to software defect, CPU overload, or hardware fault causing system unresponsiveness + :mitigated_by: comp_req__seooc_test__fault_detection + :sufficient: yes + + **Failure Mode**: Processing exceeds time deadline + + **Potential Causes**: + + * Software defect (infinite loop) + * CPU overload + * Hardware fault + + **Effects**: System becomes unresponsive, watchdog reset + + **Severity**: Medium (S6) + + **Occurrence**: Low (O3) + + **Detection**: Very High (D1) + + **RPN**: 18 + + **Detection Method**: Hardware watchdog timer + + **Mitigation**: System reset and recovery to safe state + +.. comp_saf_fmea:: Calculation Error + :id: comp_saf_fmea__seooc_test__calculation_error + :status: valid + :tags: fmea, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :fault_id: seu + :failure_effect: Incorrect calculation result due to single event upset, register corruption, or ALU malfunction + :mitigated_by: comp_req__seooc_test__redundant_calculation + :sufficient: yes + + **Failure Mode**: Incorrect calculation result due to random hardware fault + + **Potential Causes**: + + * Single event upset (SEU) + * Register corruption + * ALU malfunction + + **Effects**: Incorrect output values + + **Severity**: High (S8) + + **Occurrence**: Very Low (O2) + + **Detection**: High (D2) + + **RPN**: 32 + + **Detection Method**: Dual-channel redundant calculation with comparison + + **Mitigation**: Discard result and use previous valid value, set error flag + +Dependent Failure Analysis (DFA) +--------------------------------- + +.. comp_saf_dfa:: System Failure Top Event + :id: comp_saf_dfa__seooc_test__system_failure_top + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: common_cause + :failure_effect: System provides unsafe output due to common cause failures affecting multiple safety mechanisms simultaneously + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Top Event**: System provides unsafe output + + **Goal**: Probability < 1e-6 per hour (ASIL-B target) + +.. comp_saf_dfa:: Hardware Failure Branch + :id: comp_saf_dfa__seooc_test__hw_failure + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: hw_common_mode + :failure_effect: Hardware component failures due to common cause (overvoltage, overtemperature) affecting multiple components + :mitigated_by: aou_req__seooc_test__operating_temperature_range, aou_req__seooc_test__supply_voltage + :sufficient: yes + + **Event**: Hardware component failure + + **Sub-events**: + + * Microcontroller failure (λ = 5e-7) + * Power supply failure (λ = 3e-7) + * CAN transceiver failure (λ = 2e-7) + + **Combined Probability**: 1.0e-6 per hour + +.. comp_saf_dfa:: Software Failure Branch + :id: comp_saf_dfa__seooc_test__sw_failure + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: sw_systematic + :failure_effect: Software defect affecting both processing channels due to systematic fault in common code base + :mitigated_by: comp_req__seooc_test__redundant_calculation + :sufficient: yes + + **Event**: Software defect leads to unsafe output + + **Sub-events**: + + * Undetected software bug (λ = 8e-6, detection coverage 90%) + * Memory corruption (λ = 1e-7) + + **Combined Probability**: 9e-7 per hour (after detection coverage) + +.. comp_saf_dfa:: External Interference Branch + :id: comp_saf_dfa__seooc_test__ext_interference + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__input_processing_module + :failure_id: emi + :failure_effect: External interference causing simultaneous malfunction of multiple components + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Event**: External interference causes malfunction + + **Sub-events**: + + * EMI beyond specification (λ = 5e-8) + * Voltage transient (λ = 2e-8, mitigation 99%) + + **Combined Probability**: 5.2e-8 per hour (after mitigation) + +**Total System Failure Probability**: 1.95e-6 per hour + +**ASIL-B Target**: < 1e-5 per hour ✓ **PASSED** + +Safety Mechanisms +----------------- + +.. comp_arc_sta:: SM: Input Validation + :id: comp_arc_sta__seooc_test__sm_input_validation + :status: valid + :tags: safety-mechanism, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + **Description**: All input data is validated before processing + + **Checks Performed**: + + * CRC-16 checksum validation + * Message sequence counter verification + * Data range plausibility checks + + **Diagnostic Coverage**: 95% + + **Reaction**: Reject invalid data, increment error counter, use last valid value + +.. comp_arc_sta:: SM: Watchdog Timer + :id: comp_arc_sta__seooc_test__sm_watchdog + :status: valid + :tags: safety-mechanism, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + **Description**: Hardware watchdog monitors software execution + + **Configuration**: + + * Timeout: 150ms + * Window watchdog: 100-140ms trigger window + * Reset delay: 10ms + + **Diagnostic Coverage**: 99% + + **Reaction**: System reset, boot to safe state + +.. comp_arc_sta:: SM: Redundant Calculation + :id: comp_arc_sta__seooc_test__sm_redundant_calc + :status: valid + :tags: safety-mechanism, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__redundant_calculation + + **Description**: Critical calculations performed in dual channels + + **Implementation**: + + * Main calculation path + * Independent shadow path + * Result comparison with tolerance check + + **Diagnostic Coverage**: 98% + + **Reaction**: On mismatch, use previous valid value, set error flag + +Safety Validation Results +-------------------------- + +.. comp_arc_dyn:: Validation: FMEA Coverage + :id: comp_arc_dyn__seooc_test__val_fmea_coverage + :status: valid + :tags: validation, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + **Result**: All identified failure modes have detection mechanisms + + **Coverage**: 100% of critical failure modes + + **Status**: ✓ PASSED + +.. comp_arc_dyn:: Validation: DFA Target Achievement + :id: comp_arc_dyn__seooc_test__val_dfa_target + :status: valid + :tags: validation, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__safe_state_transition + + **Result**: System failure probability 1.95e-6 per hour + + **Target**: < 1e-5 per hour (ASIL-B) + + **Margin**: 5.1x + + **Status**: ✓ PASSED + +.. comp_arc_dyn:: Validation: Safety Mechanism Effectiveness + :id: comp_arc_dyn__seooc_test__val_sm_effectiveness + :status: valid + :tags: validation, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__redundant_calculation + + **Result**: Combined diagnostic coverage 97.3% + + **Target**: > 90% (ASIL-B) + + **Status**: ✓ PASSED diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/dfa.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/dfa.rst new file mode 100644 index 0000000..7b2e30d --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/dfa.rst @@ -0,0 +1,149 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Dependent Failure Analysis (DFA) +================================ + +This document contains the Dependent Failure Analysis (DFA) for the test SEooC module, +following ISO 26262 requirements for analysis of dependent failures. + +Component DFA Overview +---------------------- + +The dependent failure analysis identifies and evaluates common cause failures, +cascading failures, and dependent failures that could affect the safety of the component. + +.. comp_saf_dfa:: Common Cause Failure Analysis + :id: comp_saf_dfa__seooc_test__common_cause_analysis + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: ccf_root + :failure_effect: Common cause failures affecting multiple safety mechanisms simultaneously + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Analysis Scope**: Identification of common cause failures + + **Initiators Analyzed**: + + * Environmental conditions (temperature, EMI, vibration) + * Power supply anomalies + * Manufacturing and design defects + * Maintenance-induced failures + + **Conclusion**: All identified common cause initiators have adequate mitigation measures. + +.. comp_saf_dfa:: Power Supply Dependency + :id: comp_saf_dfa__seooc_test__power_dependency + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: power_ccf + :failure_effect: Power supply failure affecting both main and redundant processing paths + :mitigated_by: aou_req__seooc_test__supply_voltage + :sufficient: yes + + **Dependent Failure**: Power supply failure + + **Affected Elements**: + + * Main processing unit + * Redundant calculation path + * Communication interface + + **Independence Measures**: + + * Voltage monitoring with independent reference + * Brownout detection circuit + * Defined safe state on power loss + + **Residual Risk**: Acceptable (< 1e-8 per hour) + +.. comp_saf_dfa:: Clock Source Dependency + :id: comp_saf_dfa__seooc_test__clock_dependency + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: clock_ccf + :failure_effect: Clock failure causing simultaneous malfunction of timing-dependent safety mechanisms + :mitigated_by: comp_req__seooc_test__fault_detection + :sufficient: yes + + **Dependent Failure**: Clock source failure + + **Affected Elements**: + + * Watchdog timer + * Communication timing + * Task scheduling + + **Independence Measures**: + + * Internal RC oscillator as backup + * Clock monitoring unit + * Frequency range checks + + **Residual Risk**: Acceptable (< 5e-9 per hour) + +.. comp_saf_dfa:: Software Design Dependency + :id: comp_saf_dfa__seooc_test__sw_design_dependency + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: sw_ccf + :failure_effect: Systematic software defect in common code base affecting both calculation paths + :mitigated_by: comp_req__seooc_test__redundant_calculation + :sufficient: yes + + **Dependent Failure**: Systematic software defect + + **Affected Elements**: + + * Main calculation algorithm + * Redundant calculation algorithm + * Result comparison logic + + **Independence Measures**: + + * Diverse implementation of redundant path + * Independent development teams + * Different compilers/toolchains for each path + + **Residual Risk**: Acceptable (< 1e-7 per hour with diversity measures) + +DFA Summary +----------- + +.. comp_saf_dfa:: DFA Summary and Conclusion + :id: comp_saf_dfa__seooc_test__dfa_summary + :status: valid + :tags: dfa, safety, seooc_test, summary + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: dfa_summary + :failure_effect: Combined dependent failure probability assessment + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Total Dependent Failure Probability**: < 1.5e-7 per hour + + **ASIL-B Target for Dependent Failures**: < 1e-6 per hour + + **Margin**: 6.7x + + **Status**: ✓ PASSED + + **Conclusion**: The component design provides adequate independence between + safety mechanisms. All identified dependent failure modes have been analyzed + and appropriate mitigation measures are in place. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/dynamic_architecture.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/dynamic_architecture.rst new file mode 100644 index 0000000..33cf03f --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/dynamic_architecture.rst @@ -0,0 +1,66 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Dynamic Architecture +==================== + +This file contains the dynamic architectural design for the SEooC test component. + +.. comp_arc_dyn:: Data Processing Sequence + :id: comp_arc_dyn__seooc_test__data_processing + :security: NO + :safety: QM + :status: valid + :fulfils: comp_req__seooc_test__input_data_processing + + Sequence diagram showing the data processing flow from input to output. + + .. uml:: + + @startuml + participant "Client" as client + participant "SEooC Test Component" as main + participant "Data Processor" as processor + + client -> main : processData(input) + main -> processor : process(input) + processor --> main : result + main --> client : output + @enduml + +.. comp_arc_dyn:: Fault Handling Sequence + :id: comp_arc_dyn__seooc_test__fault_handling + :security: NO + :safety: ASIL_B + :status: valid + :fulfils: comp_req__seooc_test__fault_detection + + Sequence diagram showing the fault detection and safe state transition. + + .. uml:: + + @startuml + participant "Main Component" as main + participant "Fault Handler" as fault + participant "Safe State Manager" as safe + + main -> fault : checkHealth() + alt fault detected + fault -> safe : transitionToSafeState() + safe --> fault : safeStateConfirmed + fault --> main : faultHandled + else no fault + fault --> main : healthOK + end + @enduml diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/feature_requirements.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/feature_requirements.rst new file mode 100644 index 0000000..d1be18a --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/feature_requirements.rst @@ -0,0 +1,48 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Feature Requirements +==================== + +This file contains the feature requirements for the SEooC test module. + +.. feat_req:: Data Processing + :id: feat_req__seooc_test__data_processing + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: stkh_req__platform__data_handling + :status: valid + + The SEooC test component shall process input data and provide processed output. + +.. feat_req:: Safe State Management + :id: feat_req__seooc_test__safe_state + :reqtype: Functional + :security: NO + :safety: ASIL_B + :satisfies: stkh_req__platform__safety + :status: valid + + The SEooC test component shall transition to a safe state upon detection of a fault condition. + +.. feat_req:: CAN Communication + :id: feat_req__seooc_test__can_comm + :reqtype: Interface + :security: NO + :safety: QM + :satisfies: stkh_req__platform__communication + :status: valid + + The SEooC test component shall support CAN message transmission and reception. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/static_architecture.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/static_architecture.rst new file mode 100644 index 0000000..b81321c --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/static_architecture.rst @@ -0,0 +1,45 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Static Architecture +=================== + +This file contains the static architectural design for the SEooC test component. + +.. comp_arc_sta:: SEooC Test Component + :id: comp_arc_sta__seooc_test__main + :security: NO + :safety: QM + :status: valid + :fulfils: comp_req__seooc_test__input_data_processing + + The main component of the SEooC test module providing data processing capabilities. + +.. comp_arc_sta:: Data Processor + :id: comp_arc_sta__seooc_test__data_processor + :security: NO + :safety: QM + :status: valid + :fulfils: comp_req__seooc_test__output_accuracy + + Sub-component responsible for processing input data and generating output. + +.. comp_arc_sta:: Fault Handler + :id: comp_arc_sta__seooc_test__fault_handler + :security: NO + :safety: ASIL_B + :status: valid + :fulfils: comp_req__seooc_test__fault_detection + + Sub-component responsible for detecting and handling fault conditions. diff --git a/bazel/rules/rules_score/test/fixtures/test_component_main.cc b/bazel/rules/rules_score/test/fixtures/test_component_main.cc new file mode 100644 index 0000000..578f2d5 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/test_component_main.cc @@ -0,0 +1,13 @@ +// Main implementation for test_component +#include + +// Declarations from mock libraries +extern int mock_function_1(); +extern int mock_function_2(); + +int main(int argc, char** argv) { + std::cout << "Test Component Implementation" << std::endl; + std::cout << "Mock function 1 returns: " << mock_function_1() << std::endl; + std::cout << "Mock function 2 returns: " << mock_function_2() << std::endl; + return 0; +} diff --git a/bazel/rules/rules_score/test/fixtures/test_unit_test.cc b/bazel/rules/rules_score/test/fixtures/test_unit_test.cc new file mode 100644 index 0000000..3420f20 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/test_unit_test.cc @@ -0,0 +1,25 @@ +// Unit tests for mock libraries +#include + +// Declarations from mock libraries +extern int mock_function_1(); +extern int mock_function_2(); + +int main() { + // Test mock_function_1 + int result1 = mock_function_1(); + if (result1 != 42) { + std::cerr << "Test failed: mock_function_1() returned " << result1 << ", expected 42" << std::endl; + return 1; + } + + // Test mock_function_2 + int result2 = mock_function_2(); + if (result2 != 84) { + std::cerr << "Test failed: mock_function_2() returned " << result2 << ", expected 84" << std::endl; + return 1; + } + + std::cout << "All tests passed!" << std::endl; + return 0; +} diff --git a/bazel/rules/rules_score/test/html_generation_test.bzl b/bazel/rules/rules_score/test/html_generation_test.bzl new file mode 100644 index 0000000..39f06cd --- /dev/null +++ b/bazel/rules/rules_score/test/html_generation_test.bzl @@ -0,0 +1,223 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Test rules for sphinx_module HTML generation and dependencies.""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "SphinxModuleInfo", "SphinxNeedsInfo") + +# ============================================================================ +# Provider Tests +# ============================================================================ + +def _providers_test_impl(ctx): + """Test that sphinx_module provides the correct providers.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify required providers + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Target should provide SphinxModuleInfo", + ) + + asserts.true( + env, + DefaultInfo in target_under_test, + "Target should provide DefaultInfo", + ) + + return analysistest.end(env) + +providers_test = analysistest.make(_providers_test_impl) + +# ============================================================================ +# HTML Generation Tests +# ============================================================================ + +def _basic_html_generation_test_impl(ctx): + """Test that a simple document generates HTML output.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check that HTML directory exists + score_info = target_under_test[SphinxModuleInfo] + asserts.true( + env, + score_info.html_dir != None, + "Module should generate HTML directory", + ) + + return analysistest.end(env) + +basic_html_generation_test = analysistest.make(_basic_html_generation_test_impl) + +# ============================================================================ +# Needs.json Generation Tests +# ============================================================================ + +def _needs_generation_test_impl(ctx): + """Test that sphinx_module generates needs.json files.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check for SphinxNeedsInfo provider on _needs target + # Note: This test requires the _needs suffix target + asserts.true( + env, + DefaultInfo in target_under_test, + "Needs target should provide DefaultInfo", + ) + + return analysistest.end(env) + +needs_generation_test = analysistest.make(_needs_generation_test_impl) + +def _needs_transitive_test_impl(ctx): + """Test that needs.json files are collected transitively.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify SphinxNeedsInfo provider + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Needs target should provide SphinxNeedsInfo", + ) + + needs_info = target_under_test[SphinxNeedsInfo] + + # Check direct needs.json file + asserts.true( + env, + needs_info.needs_json_file != None, + "Should have direct needs.json file", + ) + + # Check transitive needs collection + asserts.true( + env, + needs_info.needs_json_files != None, + "Should have transitive needs.json files depset", + ) + + return analysistest.end(env) + +needs_transitive_test = analysistest.make(_needs_transitive_test_impl) + +# ============================================================================ +# Dependency and Integration Tests +# ============================================================================ + +def _module_dependencies_test_impl(ctx): + """Test that module dependencies are properly handled.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with dependencies should still generate HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with dependencies should generate HTML", + ) + + return analysistest.end(env) + +module_dependencies_test = analysistest.make(_module_dependencies_test_impl) + +def _html_merging_test_impl(ctx): + """Test that HTML from dependencies is merged correctly.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Verify merged HTML output exists + asserts.true( + env, + score_info.html_dir != None, + "Merged HTML should be generated", + ) + + return analysistest.end(env) + +html_merging_test = analysistest.make(_html_merging_test_impl) + +# ============================================================================ +# Config Generation Tests +# ============================================================================ + +def _auto_config_generation_test_impl(ctx): + """Test that conf.py is automatically generated when not provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module without explicit config should still generate HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with auto-generated config should produce HTML", + ) + + return analysistest.end(env) + +auto_config_generation_test = analysistest.make(_auto_config_generation_test_impl) + +def _explicit_config_test_impl(ctx): + """Test that explicit conf.py is used when provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with explicit config should generate HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with explicit config should produce HTML", + ) + + return analysistest.end(env) + +explicit_config_test = analysistest.make(_explicit_config_test_impl) + +# ============================================================================ +# Test Suite +# ============================================================================ + +def sphinx_module_test_suite(name): + """Create a comprehensive test suite for sphinx_module. + + Tests cover: + - Needs.json generation and transitive collection + - Module dependencies and HTML merging + + Args: + name: Name of the test suite + """ + + native.test_suite( + name = name, + tests = [ + # Needs generation + ":needs_transitive_test", + + # Dependencies and integration + ":module_dependencies_test", + ":html_merging_test", + ], + ) diff --git a/bazel/rules/rules_score/test/score_module_providers_test.bzl b/bazel/rules/rules_score/test/score_module_providers_test.bzl new file mode 100644 index 0000000..24fba52 --- /dev/null +++ b/bazel/rules/rules_score/test/score_module_providers_test.bzl @@ -0,0 +1,323 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Tests for sphinx_module providers and two-phase build system.""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "SphinxModuleInfo", "SphinxNeedsInfo") + +# ============================================================================ +# SphinxModuleInfo Provider Tests +# ============================================================================ + +def _sphinx_module_info_fields_test_impl(ctx): + """Test that SphinxModuleInfo provides all required fields.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Target should provide SphinxModuleInfo", + ) + + score_info = target_under_test[SphinxModuleInfo] + + # Verify html_dir field + asserts.true( + env, + hasattr(score_info, "html_dir"), + "SphinxModuleInfo should have html_dir field", + ) + + asserts.true( + env, + score_info.html_dir != None, + "html_dir should not be None", + ) + + return analysistest.end(env) + +sphinx_module_info_fields_test = analysistest.make(_sphinx_module_info_fields_test_impl) + +# ============================================================================ +# SphinxNeedsInfo Provider Tests +# ============================================================================ + +def _score_needs_info_fields_test_impl(ctx): + """Test that SphinxNeedsInfo provides all required fields.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Needs target should provide SphinxNeedsInfo", + ) + + needs_info = target_under_test[SphinxNeedsInfo] + + # Verify needs_json_file field (direct file) + asserts.true( + env, + hasattr(needs_info, "needs_json_file"), + "SphinxNeedsInfo should have needs_json_file field", + ) + + asserts.true( + env, + needs_info.needs_json_file != None, + "needs_json_file should not be None", + ) + + # Verify needs_json_files field (transitive depset) + asserts.true( + env, + hasattr(needs_info, "needs_json_files"), + "SphinxNeedsInfo should have needs_json_files field", + ) + + asserts.true( + env, + needs_info.needs_json_files != None, + "needs_json_files should not be None", + ) + + # Verify it's a depset + asserts.true( + env, + type(needs_info.needs_json_files) == type(depset([])), + "needs_json_files should be a depset", + ) + + return analysistest.end(env) + +score_needs_info_fields_test = analysistest.make(_score_needs_info_fields_test_impl) + +def _score_needs_transitive_collection_test_impl(ctx): + """Test that needs.json files are collected transitively.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + needs_info = target_under_test[SphinxNeedsInfo] + + # Get the list of transitive needs files + transitive_needs = needs_info.needs_json_files.to_list() + + # Should have at least the direct needs file + asserts.true( + env, + len(transitive_needs) >= 1, + "Should have at least the direct needs.json file", + ) + + # Direct file should be in the transitive set + direct_file = needs_info.needs_json_file + asserts.true( + env, + direct_file in transitive_needs, + "Direct needs.json file should be in transitive collection", + ) + + return analysistest.end(env) + +score_needs_transitive_collection_test = analysistest.make(_score_needs_transitive_collection_test_impl) + +def _score_needs_with_deps_test_impl(ctx): + """Test that needs.json files include dependencies.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + needs_info = target_under_test[SphinxNeedsInfo] + transitive_needs = needs_info.needs_json_files.to_list() + + # Module with dependencies should have multiple needs files + # (its own + dependencies) + asserts.true( + env, + len(transitive_needs) >= 1, + "Module with dependencies should collect transitive needs.json files", + ) + + return analysistest.end(env) + +score_needs_with_deps_test = analysistest.make(_score_needs_with_deps_test_impl) + +# ============================================================================ +# Two-Phase Build Tests +# ============================================================================ + +def _two_phase_needs_first_test_impl(ctx): + """Test that Phase 1 (needs generation) works independently.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify SphinxNeedsInfo provider + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Phase 1 should provide SphinxNeedsInfo", + ) + + # Verify DefaultInfo with needs.json output + asserts.true( + env, + DefaultInfo in target_under_test, + "Phase 1 should provide DefaultInfo", + ) + + default_info = target_under_test[DefaultInfo] + files = default_info.files.to_list() + + # Should have at least one file (needs.json) + asserts.true( + env, + len(files) >= 1, + "Phase 1 should output needs.json file", + ) + + return analysistest.end(env) + +two_phase_needs_first_test = analysistest.make(_two_phase_needs_first_test_impl) + +def _two_phase_html_second_test_impl(ctx): + """Test that Phase 2 (HTML generation) works with needs from Phase 1.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify SphinxModuleInfo provider + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Phase 2 should provide SphinxModuleInfo", + ) + + score_info = target_under_test[SphinxModuleInfo] + + # Verify HTML output + asserts.true( + env, + score_info.html_dir != None, + "Phase 2 should generate HTML directory", + ) + + return analysistest.end(env) + +two_phase_html_second_test = analysistest.make(_two_phase_html_second_test_impl) + +# ============================================================================ +# Config Generation Tests +# ============================================================================ + +def _config_auto_generation_test_impl(ctx): + """Test that conf.py is auto-generated when not provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module without explicit config should still build + asserts.true( + env, + score_info.html_dir != None, + "Auto-generated config should allow HTML generation", + ) + + return analysistest.end(env) + +config_auto_generation_test = analysistest.make(_config_auto_generation_test_impl) + +def _config_explicit_usage_test_impl(ctx): + """Test that explicit conf.py is used when provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with explicit config should build + asserts.true( + env, + score_info.html_dir != None, + "Explicit config should allow HTML generation", + ) + + return analysistest.end(env) + +config_explicit_usage_test = analysistest.make(_config_explicit_usage_test_impl) + +# ============================================================================ +# Dependency Handling Tests +# ============================================================================ + +def _deps_html_merging_test_impl(ctx): + """Test that HTML from dependencies is merged into output.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with dependencies should generate merged HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with dependencies should generate merged HTML", + ) + + return analysistest.end(env) + +deps_html_merging_test = analysistest.make(_deps_html_merging_test_impl) + +def _deps_needs_collection_test_impl(ctx): + """Test that needs from dependencies are collected.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + needs_info = target_under_test[SphinxNeedsInfo] + transitive_needs = needs_info.needs_json_files.to_list() + + # Should collect needs from dependencies + asserts.true( + env, + len(transitive_needs) >= 1, + "Should collect needs.json from dependencies", + ) + + return analysistest.end(env) + +deps_needs_collection_test = analysistest.make(_deps_needs_collection_test_impl) + +# ============================================================================ +# Test Suite +# ============================================================================ + +def sphinx_module_providers_test_suite(name): + """Create a test suite for sphinx_module providers and build phases. + + Tests cover: + - Transitive needs.json collection + - Dependency handling (HTML merging, needs collection) + + Args: + name: Name of the test suite + """ + + native.test_suite( + name = name, + tests = [ + # Provider tests + ":score_needs_with_deps_test", + + # Dependency tests + ":deps_html_merging_test", + ":deps_needs_collection_test", + ], + ) diff --git a/bazel/rules/rules_score/test/seooc_test.bzl b/bazel/rules/rules_score/test/seooc_test.bzl new file mode 100644 index 0000000..a88a637 --- /dev/null +++ b/bazel/rules/rules_score/test/seooc_test.bzl @@ -0,0 +1,135 @@ +""" +Test suite for dependable_element macro. + +Tests the SEooC (Safety Element out of Context) functionality including: +- Index generation with artifact references +- Integration with sphinx_module +- Sphinx-needs cross-referencing +- HTML output generation +""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "SphinxModuleInfo", "SphinxNeedsInfo") + +def _seooc_index_generation_test_impl(ctx): + """Test that dependable_element generates proper index.rst file.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Get the generated index file + files = target_under_test[DefaultInfo].files.to_list() + + # Find index.rst in the output files + index_file = None + for f in files: + if f.basename == "index.rst": + index_file = f + break + + # Assert index file exists + asserts.true( + env, + index_file != None, + "Expected index.rst to be generated by dependable_element_index rule", + ) + + return analysistest.end(env) + +seooc_index_generation_test = analysistest.make( + impl = _seooc_index_generation_test_impl, +) + +def _seooc_artifacts_copied_test_impl(ctx): + """Test that all dependable element artifacts are copied to output directory.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + files = target_under_test[DefaultInfo].files.to_list() + + # Expected artifact basenames - these come from the SphinxSourcesInfo providers + # and are filtered to only include .rst/.md files for the index + expected_artifacts = [ + "component_requirements.rst", # from requirements + "dfa.rst", # from :dependability_analysis_target + ] + + # Check each artifact exists + actual_basenames = [f.basename for f in files] + for artifact in expected_artifacts: + asserts.true( + env, + artifact in actual_basenames, + "Expected artifact '{}' to be in output files".format(artifact), + ) + + return analysistest.end(env) + +seooc_artifacts_copied_test = analysistest.make( + impl = _seooc_artifacts_copied_test_impl, +) + +def _seooc_sphinx_module_generated_test_impl(ctx): + """Test that dependable_element generates sphinx_module with HTML output.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check that SphinxModuleInfo provider exists + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Expected dependable_element to provide SphinxModuleInfo from sphinx_module", + ) + + return analysistest.end(env) + +seooc_sphinx_module_generated_test = analysistest.make( + impl = _seooc_sphinx_module_generated_test_impl, +) + +def _seooc_needs_provider_test_impl(ctx): + """Test that dependable_element generates needs provider for cross-referencing.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check that SphinxNeedsInfo provider exists + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Expected dependable_element_needs to provide SphinxNeedsInfo", + ) + + return analysistest.end(env) + +seooc_needs_provider_test = analysistest.make( + impl = _seooc_needs_provider_test_impl, +) + +def _seooc_description_test_impl(ctx): + """Test that SEooC includes description in generated index.rst.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Get the generated index file + files = target_under_test[DefaultInfo].files.to_list() + + # Find index.rst + index_file = None + for f in files: + if f.basename == "index.rst": + index_file = f + break + + # Note: We can't easily read file contents in analysis test, + # but we can verify the file exists. The description content + # would be validated through integration tests or manual inspection. + asserts.true( + env, + index_file != None, + "Expected index.rst to exist for description validation", + ) + + return analysistest.end(env) + +seooc_description_test = analysistest.make( + impl = _seooc_description_test_impl, +) diff --git a/bazel/rules/rules_score/test/unit_component_test.bzl b/bazel/rules/rules_score/test/unit_component_test.bzl new file mode 100644 index 0000000..f62f4b8 --- /dev/null +++ b/bazel/rules/rules_score/test/unit_component_test.bzl @@ -0,0 +1,175 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +""" +Test suite for unit, component, and dependable_element rules. + +Tests the new hierarchical structure for S-CORE process compliance: +- Unit: smallest testable element +- Component: collection of units +- Dependable Element: complete SEooC with full documentation +""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score:providers.bzl", "ComponentInfo", "SphinxSourcesInfo", "UnitInfo") + +# ============================================================================ +# Unit Tests +# ============================================================================ + +def _unit_provider_test_impl(ctx): + """Test that unit rule provides UnitInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check UnitInfo provider exists + asserts.true( + env, + UnitInfo in target_under_test, + "Unit should provide UnitInfo", + ) + + unit_info = target_under_test[UnitInfo] + + # Verify fields are populated + asserts.true( + env, + unit_info.name != None, + "UnitInfo should have name field", + ) + + asserts.true( + env, + unit_info.unit_design != None, + "UnitInfo should have unit_design field", + ) + + asserts.true( + env, + unit_info.implementation != None, + "UnitInfo should have implementation field", + ) + + asserts.true( + env, + unit_info.tests != None, + "UnitInfo should have tests field", + ) + + return analysistest.end(env) + +unit_provider_test = analysistest.make(_unit_provider_test_impl) + +def _unit_sphinx_sources_test_impl(ctx): + """Test that unit rule provides SphinxSourcesInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check SphinxSourcesInfo provider exists + asserts.true( + env, + SphinxSourcesInfo in target_under_test, + "Unit should provide SphinxSourcesInfo", + ) + + return analysistest.end(env) + +unit_sphinx_sources_test = analysistest.make(_unit_sphinx_sources_test_impl) + +# ============================================================================ +# Component Tests +# ============================================================================ + +def _component_provider_test_impl(ctx): + """Test that component rule provides ComponentInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check ComponentInfo provider exists + asserts.true( + env, + ComponentInfo in target_under_test, + "Component should provide ComponentInfo", + ) + + comp_info = target_under_test[ComponentInfo] + + # Verify fields are populated + asserts.true( + env, + comp_info.name != None, + "ComponentInfo should have name field", + ) + + asserts.true( + env, + comp_info.requirements != None, + "ComponentInfo should have component_requirements field", + ) + + asserts.true( + env, + comp_info.components != None, + "ComponentInfo should have components field", + ) + + asserts.true( + env, + comp_info.tests != None, + "ComponentInfo should have tests field", + ) + + return analysistest.end(env) + +component_provider_test = analysistest.make(_component_provider_test_impl) + +def _component_sphinx_sources_test_impl(ctx): + """Test that component rule provides SphinxSourcesInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check SphinxSourcesInfo provider exists + asserts.true( + env, + SphinxSourcesInfo in target_under_test, + "Component should provide SphinxSourcesInfo", + ) + + return analysistest.end(env) + +component_sphinx_sources_test = analysistest.make(_component_sphinx_sources_test_impl) + +# ============================================================================ +# Dependable Element Tests +# ============================================================================ +# Note: Provider tests removed as dependable_element no longer creates a +# separate provider target. The main target is now a sphinx_module. + +# ============================================================================ +# Test Suite Definition +# ============================================================================ + +def unit_component_test_suite(name): + """Create test suite for unit, component, and dependable_element rules. + + Args: + name: Name of the test suite + """ + native.test_suite( + name = name, + tests = [ + ":unit_provider_test", + ":unit_sphinx_sources_test", + ":component_provider_test", + ":component_sphinx_sources_test", + ], + ) diff --git a/coverage/README.md b/coverage/README.md index dbc46f9..c25115c 100644 --- a/coverage/README.md +++ b/coverage/README.md @@ -108,6 +108,26 @@ and point the report generator to the directory: bazel run //:rust_coverage -- --profraw-dir /path/to/profraw ``` +## Running from an integration workspace (external labels) + +You can invoke the report generator from a top-level integration repo (for +example, reference_integration) while targeting tests that live in external +modules. Use a query that references external labels and run the wrapper +target from the integration repo: + +```bash +bazel run //images/linux_x86_64:per_rust_coverage --config=ferrocene-coverage -- \ + --query 'kind("rust_test", @score_persistency//src/rust/...)' +``` + +If the `.profraw` files were produced in that same workspace, the reporter +auto-discovers them under `bazel-testlogs/` (including +`bazel-testlogs/external/+` for external labels), so you do not need +to pass `--profraw-dir`. If they were copied from elsewhere, pass +`--profraw-dir` to point to the directory containing the `.profraw` files. +External source paths are resolved via Bazel's output_base so +`external//...` paths are handled. + ## Coverage Gate Behavior `--min-line-coverage` applies per target. If any target is below the minimum, diff --git a/coverage/ferrocene_report.sh b/coverage/ferrocene_report.sh index 7acd343..298b665 100755 --- a/coverage/ferrocene_report.sh +++ b/coverage/ferrocene_report.sh @@ -335,6 +335,12 @@ label_to_path() { local label local pkg="${2:-}" label="$(strip_quotes "$1")" + # External labels look like "@repo//pkg:target". Strip the repo prefix so + # path conversion works for both workspace and external repos. + if [[ "${label}" == @*//?* ]]; then + label="//${label#*//}" + fi + # If the label still starts with "@", we do not know how to map it to a path. if [[ "${label}" == @* ]]; then echo "" return 0 @@ -403,6 +409,10 @@ label_pkg() { if [[ "${label}" =~ ^Label\\(\"(.*)\"\\)$ ]]; then label="${BASH_REMATCH[1]}" fi + # External labels include "@repo//". Strip the repo prefix to get the package. + if [[ "${label}" == @*//?* ]]; then + label="//${label#*//}" + fi if [[ "${label}" == //* ]]; then local rest="${label#//}" echo "${rest%%:*}" @@ -411,6 +421,37 @@ label_pkg() { echo "" } +# Resolve the "external/" prefix for an external label. +# We use bazel query --output=location to find a real file path, then extract +# the repo name from either "external//..." or ".../external//...". +workspace_root_for_label() { + local label + label="$(strip_quotes "$1")" + if [[ "${label}" =~ ^Label\\(\"(.*)\"\\)$ ]]; then + label="${BASH_REMATCH[1]}" + fi + # Non-external labels live in the workspace, so no external prefix is needed. + if [[ "${label}" != @* ]]; then + echo "" + return 0 + fi + # The location output may be absolute; handle both direct external paths + # and absolute paths that contain "/external//". + local location + location="$(bazel query --output=location "${label}" 2>/dev/null | head -n 1)" + location="${location%%:*}" + local rest="" + if [[ "${location}" == external/* ]]; then + rest="${location#external/}" + elif [[ "${location}" == */external/* ]]; then + rest="${location#*/external/}" + fi + local repo="${rest%%/*}" + if [[ -n "${repo}" ]]; then + echo "external/${repo}" + fi +} + resolve_runfile() { local bin="$1" local name="$2" @@ -563,10 +604,20 @@ for label in "${targets[@]}"; do pkg="${pkg%%:*}" name="${label##*:}" + # Resolve the package path and repo root so test.outputs works for + # workspace labels (//pkg:target) and external labels (@repo//pkg:target). + label_pkg_path="$(label_pkg "${label}")" + if [[ -z "${label_pkg_path}" ]]; then + label_pkg_path="${pkg}" + fi + label_repo_root="$(workspace_root_for_label "${label}")" + if [[ -n "${PROFRAW_DIR}" ]]; then test_out_dir="${PROFRAW_DIR}" + elif [[ -n "${label_repo_root}" ]]; then + test_out_dir="${PROFRAW_ROOT}/${label_repo_root}/${label_pkg_path}/${name}/test.outputs" else - test_out_dir="${PROFRAW_ROOT}/${pkg}/${name}/test.outputs" + test_out_dir="${PROFRAW_ROOT}/${label_pkg_path}/${name}/test.outputs" fi shopt -s nullglob @@ -622,46 +673,77 @@ for label in "${targets[@]}"; do if [[ -z "${crate_pkg}" ]]; then crate_pkg="${pkg}" fi + repo_root="$(workspace_root_for_label "${crate_target}")" crate_root_raw="$(query_labels_attr "${crate_target}" "crate_root")" if [[ -z "${crate_root_raw}" ]]; then crate_root_raw="$(query_attr_build "${crate_target}" "crate_root")" fi crate_root="$(label_to_path "${crate_root_raw}" "${crate_pkg}")" - if [[ -z "${crate_root}" ]]; then - # Prefer explicit srcs for rust_test targets when no crate attribute is set. - srcs_label="$(query_labels_attr "${label}" "srcs")" - if [[ -n "${srcs_label}" ]]; then - srcs_path="$(label_to_path "${srcs_label}" "${pkg}")" - if [[ -n "${srcs_path}" && "${srcs_path}" == *.rs ]]; then - crate_root="${srcs_path}" - fi - fi - fi + # First, try conventional crate roots to avoid choosing a random source file. if [[ -z "${crate_root}" ]]; then for candidate in \ "${crate_pkg}/src/lib.rs" \ "${crate_pkg}/src/main.rs" \ "${crate_pkg}/lib.rs" \ "${crate_pkg}/main.rs"; do - if [[ -f "${workspace}/${candidate}" ]]; then + if [[ -n "${repo_root}" ]]; then + if [[ -f "${exec_root}/${repo_root}/${candidate}" ]]; then + crate_root="${candidate}" + break + fi + elif [[ -f "${workspace}/${candidate}" ]]; then crate_root="${candidate}" break fi done - if [[ -z "${crate_root}" ]]; then - echo "Skipping ${label}: could not determine crate root for ${crate_target}" >&2 - continue + fi + # If there is no conventional root, fall back to the crate's declared srcs. + if [[ -z "${crate_root}" ]]; then + srcs_label="$(query_labels_attr "${crate_target}" "srcs")" + if [[ -n "${srcs_label}" ]]; then + srcs_path="$(label_to_path "${srcs_label}" "${crate_pkg}")" + if [[ -n "${srcs_path}" && "${srcs_path}" == *.rs ]]; then + crate_root="${srcs_path}" + fi fi fi - + if [[ -z "${crate_root}" ]]; then + # As a last resort, try rust_test srcs when the test target defines them. + # This handles rust_test targets that directly list their sources. + srcs_label="$(query_labels_attr "${label}" "srcs")" + if [[ -n "${srcs_label}" ]]; then + srcs_path="$(label_to_path "${srcs_label}" "${pkg}")" + if [[ -n "${srcs_path}" && "${srcs_path}" == *.rs ]]; then + crate_root="${srcs_path}" + fi + fi + fi + # Without a crate root, symbol-report cannot build the crate. + if [[ -z "${crate_root}" ]]; then + echo "Skipping ${label}: could not determine crate root for ${crate_target}" >&2 + continue + fi + # Convert the crate root into an absolute path. External repos live under + # exec_root/external/ (symlinked from output_base), while workspace + # sources live under $workspace. if [[ "${crate_root}" != /* ]]; then - crate_root="${workspace}/${crate_root}" + if [[ -n "${repo_root}" && "${crate_root}" != "${repo_root}/"* ]]; then + crate_root="${repo_root}/${crate_root}" + fi + if [[ "${crate_root}" == external/* ]]; then + crate_root="${exec_root}/${crate_root}" + else + crate_root="${workspace}/${crate_root}" + fi fi + # Keep a workspace- or exec_root-relative path for reporting and mapping. crate_root_rel="${crate_root}" if [[ "${crate_root_rel}" == "${workspace}/"* ]]; then crate_root_rel="${crate_root_rel#${workspace}/}" + elif [[ "${crate_root_rel}" == "${exec_root}/"* ]]; then + crate_root_rel="${crate_root_rel#${exec_root}/}" fi crate_name="$(normalize_scalar "$(query_attr_build "${crate_target}" "crate_name")")" @@ -754,6 +836,7 @@ for label in "${targets[@]}"; do remap_args+=("--remap-path-prefix=${workspace}/=.") fi + # Pass the absolute crate root; relative external paths fail to canonicalize. ( cd "${exec_root}" SYMBOL_REPORT_OUT="${symbol_report_json}" \ @@ -767,7 +850,7 @@ for label in "${targets[@]}"; do --sysroot "${sysroot_arg}" \ -o /dev/null \ "${remap_args[@]}" \ - "${crate_root_rel}" + "${crate_root}" ) # Normalize symbol-report paths to be workspace-relative (like the demo), @@ -779,18 +862,23 @@ for label in "${targets[@]}"; do bin_arg="${bin_rel}" fi - # Blanket expects report paths to resolve under --ferrocene-src; add a - # path-equivalence so workspace files map cleanly to report entries. + # Blanket resolves report filenames by joining them with --ferrocene-src. + # Use a path-equivalence so source files map cleanly to report entries. + # For external crates, profiler paths are absolute under output_base/external, + # so we point --ferrocene-src there instead of the workspace. ferrocene_src="${workspace}" + if [[ "${crate_root_rel}" == external/* ]]; then + ferrocene_src="${output_base}" + fi crate_root_dir_rel="$(dirname "${crate_root_rel}")" path_prefix="${crate_root_rel%%/*}" if [[ -n "${path_prefix}" && "${path_prefix}" != "${crate_root_rel}" && "${path_prefix}" != "." ]]; then # Broader remap to cover any file under the top-level directory (e.g. src/...). - path_equiv_args=("--path-equivalence" "${path_prefix},${workspace}/${path_prefix}") + path_equiv_args=("--path-equivalence" "${path_prefix},${ferrocene_src}/${path_prefix}") elif [[ "${crate_root_dir_rel}" == "." ]]; then - path_equiv_args=("--path-equivalence" ".,${workspace}") + path_equiv_args=("--path-equivalence" ".,${ferrocene_src}") else - path_equiv_args=("--path-equivalence" "${crate_root_dir_rel},${workspace}/${crate_root_dir_rel}") + path_equiv_args=("--path-equivalence" "${crate_root_dir_rel},${ferrocene_src}/${crate_root_dir_rel}") fi ( diff --git a/sbom/BUILD.bazel b/sbom/BUILD.bazel new file mode 100644 index 0000000..ec94784 --- /dev/null +++ b/sbom/BUILD.bazel @@ -0,0 +1,33 @@ +# SBOM Generation Package +# +# This package provides Bazel-native SBOM (Software Bill of Materials) generation +# using module extensions and aspects. +# +# Public API: +# - load("@score_tooling//sbom:defs.bzl", "sbom") +# - use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") + +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "defs.bzl", + "extensions.bzl", +]) + +# Filegroup for all SBOM-related bzl files +filegroup( + name = "bzl_files", + srcs = [ + "defs.bzl", + "extensions.bzl", + "//sbom/internal:bzl_files", + ], +) + +# npm wrapper (uses system-installed npm from PATH) +sh_binary( + name = "npm_wrapper", + srcs = ["npm_wrapper.sh"], +) diff --git a/sbom/SBOM_Readme.md b/sbom/SBOM_Readme.md new file mode 100644 index 0000000..b242902 --- /dev/null +++ b/sbom/SBOM_Readme.md @@ -0,0 +1,340 @@ +# SBOM Setup Guide + +## 1. Configure MODULE.bazel + +Add the SBOM metadata extension in your **root** MODULE.bazel (e.g. `reference_integration/MODULE.bazel`): + +```starlark +# Enable SBOM metadata collection from all modules in the dependency graph +sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") +use_repo(sbom_ext, "sbom_metadata") +``` + +**For modules using `local_path_override` or `git_override`**, also add a `track_module` tag for each such module. Without this, their versions cannot be auto-detected and will appear as `unknown` in the SBOM: + +```starlark +# Required for modules with local_path_override or git_override (no registry version) +sbom_ext.track_module(name = "score_baselibs") +sbom_ext.track_module(name = "score_communication") +sbom_ext.track_module(name = "score_orchestrator") +# ... one entry per overridden module +``` + +No manual license entries are needed — all license metadata is collected automatically. + +## 2. Add SBOM Target in BUILD + +```starlark +load("@score_tooling//sbom:defs.bzl", "sbom") + +sbom( + name = "my_sbom", + targets = ["//my/app:binary"], + component_name = "my_application", + component_version = "1.0.0", + # Rust crate metadata from multiple MODULE.bazel.lock files + module_lockfiles = [ + "@score_crates//:MODULE.bazel.lock", + ":MODULE.bazel.lock", # workspace's own lockfile for additional crates + ], + auto_crates_cache = True, + auto_cdxgen = True, # Requires system-installed npm/cdxgen (see below) +) +``` + +### Parameters + +| Parameter | Default | Description | +| :--- | :--- | :--- | +| `targets` | _(required)_ | Bazel targets to include in SBOM | +| `component_name` | rule name | Main component name | +| `component_version` | `""` | Version string | +| `output_formats` | `["spdx", "cyclonedx"]` | Output formats: `"spdx"` and/or `"cyclonedx"` | +| `module_lockfiles` | `[]` | List of MODULE.bazel.lock files for Rust crate metadata. Pass `@score_crates//:MODULE.bazel.lock` (centralized crate specs) and `:MODULE.bazel.lock` (workspace-local crates). Each lockfile is parsed for crate name, version, and sha256. | +| `cargo_lockfile` | `None` | Optional Cargo.lock for additional crates. Usually not needed when `module_lockfiles` covers all crates. | +| `auto_crates_cache` | `True` | Auto-generate crates cache when `module_lockfiles` or `cargo_lockfile` is set | +| `auto_cdxgen` | `False` | Auto-run cdxgen when no `cdxgen_sbom` is provided | +| `cdxgen_sbom` | `None` | Label to a pre-generated CycloneDX JSON from cdxgen for C++ enrichment | +| `producer_name` | `"Eclipse Foundation"` | SBOM producer organization name (appears in `metadata.supplier`) | +| `producer_url` | `"https://projects.eclipse.org/projects/automotive.score"` | SBOM producer URL | +| `sbom_authors` | `[]` | Author strings for `metadata.authors` (e.g. `["Eclipse SCORE Team"]`) | +| `generation_context` | `""` | Lifecycle phase: `"pre-build"`, `"build"`, or `"post-build"` | +| `sbom_tools` | `[]` | Additional tool names added to `metadata.tools` | +| `namespace` | `"https://eclipse.dev/score"` | Base URI for the SPDX document namespace | +| `exclude_patterns` | _(build tools)_ | List of repo name substrings to exclude (e.g. `rules_rust`, `bazel_tools`). Defaults exclude common Bazel build-tool repos. | +| `dep_module_files` | `[]` | Additional MODULE.bazel files from dependency modules for version extraction | + +## 3. Install Prerequisites + +### For `auto_crates_cache` (Rust crate metadata) + +License data for Rust crates is fetched via [dash-license-scan](https://github.com/eclipse-score/dash-license-scan). Description and supplier metadata is fetched from the crates.io API (parallel, ~10 concurrent requests). Requires: + +```bash +# Install uv (Python package runner) +curl -LsSf https://astral.sh/uv/install.sh | sh + +# Install Java >= 11 (required by Eclipse dash-licenses JAR) +# Option 1: Ubuntu/Debian +sudo apt install openjdk-11-jre-headless + +# Option 2: Fedora/RHEL +sudo dnf install java-11-openjdk-headless + +# Verify installation +uvx dash-license-scan --help +java -version +``` + +### For `auto_cdxgen` (C++ dependency scanning) + +If using `auto_cdxgen = True` to automatically scan C++ dependencies: + +```bash +# Install Node.js and cdxgen globally +# Option 1: Using nvm (recommended) +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash +source ~/.bashrc +nvm install 20 +npm install -g @cyclonedx/cdxgen + +# Verify installation +which cdxgen +cdxgen --version +``` + +**Note:** If you don't have npm/cdxgen installed, set `auto_cdxgen = False` in your SBOM configuration. +When `auto_cdxgen` is enabled, the SBOM rule runs cdxgen against the repository path of the selected Bazel targets (for example `external/score_baselibs+` for `@score_baselibs//...` targets). + +## 4. Build + +```bash +bazel build //:my_sbom +``` + +## 5. Output + +Generated files in `bazel-bin/`: + +- `my_sbom.spdx.json` — SPDX 2.3 format +- `my_sbom.cdx.json` — CycloneDX 1.6 format +- `my_sbom_crates_metadata.json` — Auto-generated Rust crate cache (if `auto_crates_cache = True`) +- `my_sbom_cdxgen.cdx.json` — C++ dependencies from cdxgen (if `auto_cdxgen = True`) + +--- + +## Toolchain Components + +### Core Tools + +| Tool | Role | Required For | +|------|------|--------------| +| [Bazel](https://bazel.build) | Build system — rules, aspects, and module extensions drive dependency discovery and SBOM generation | All SBOM generation | +| [Python 3](https://www.python.org) | Runtime for the SBOM generator, formatters, and metadata extraction scripts | All SBOM generation | +| [dash-license-scan](https://github.com/eclipse-score/dash-license-scan) | Rust crate license metadata via Eclipse Foundation + ClearlyDefined | Rust metadata extraction when `auto_crates_cache = True` | +| [uv / uvx](https://docs.astral.sh/uv/) | Python package runner for dash-license-scan | Rust metadata extraction when `auto_crates_cache = True` | +| [Java >= 11](https://openjdk.org) | Runtime for Eclipse dash-licenses JAR (used by dash-license-scan) | Rust metadata extraction when `auto_crates_cache = True` | +| [crates.io API](https://crates.io) | Description and supplier metadata for Rust crates (parallel fetching) | Rust metadata extraction when `auto_crates_cache = True` | +| [@cyclonedx/cdxgen](https://github.com/CycloneDX/cdxgen) | C++ dependency scanner and license discovery tool | C++ metadata extraction when `auto_cdxgen = True` | +| [Node.js / npm](https://nodejs.org) | Runtime for cdxgen | C++ metadata extraction when `auto_cdxgen = True` | + +### Architecture + +``` + ┌──────────────────┐ + │ Bazel build │ + └────────┬─────────┘ + │ + ┌───────────────┼───────────────┐ + │ │ │ + v v v + MODULE.bazel Bazel targets Lockfiles + │ │ │ + v v v + metadata.json _deps.json License + metadata + (module versions) (dep graph, (dash-license-scan + dep edges) + crates.io API + │ │ + cdxgen) + └───────────────┼───────────────┘ + │ + v + ┌──────────────────┐ + │ sbom_generator │ + │ (match & resolve)│ + └────────┬─────────┘ + │ + ┌────────┴────────┐ + v v + .spdx.json .cdx.json +``` + +**Data sources:** +- **Bazel module graph** — version, PURL, and registry info for `bazel_dep` modules +- **Bazel aspect** — transitive dependency graph and external repo dependency edges +- **dash-license-scan** — Rust crate licenses via Eclipse Foundation + ClearlyDefined (from MODULE.bazel.lock or Cargo.lock) +- **crates.io API** — description and supplier for Rust crates (supplier extracted from GitHub repository URL) +- **cdxgen** — C++ dependency licenses, descriptions, and suppliers (from source tree scan) + +### Automatically Populated Fields + +The following SBOM fields are populated automatically without manual configuration: + +| Field | Rust Crates | C++ Dependencies | Bazel Modules | +|-------|-------------|------------------|---------------| +| License | dash-license-scan | cdxgen | — | +| Description | crates.io API | cdxgen (falls back to `"Missing"` when unavailable) | — | +| Supplier | crates.io API (GitHub org from repository URL) | cdxgen | — | +| Version | MODULE.bazel.lock / Cargo.lock | cdxgen (with MODULE.bazel.lock fallback for Bazel modules) | Bazel module graph | +| Checksum (SHA-256) | MODULE.bazel.lock / Cargo.lock | BCR `source.json` `sha256` + cdxgen `hashes` (when present) | http_archive `sha256` + MODULE.bazel.lock BCR `source.json` | +| PURL | Auto-generated (`pkg:cargo/...`) | cdxgen | Auto-generated | + +### Platform-Specific Crate Handling + +Crates with platform-specific suffixes (e.g. `iceoryx2-bb-lock-free-qnx8`) that don't exist on crates.io are handled by stripping the suffix and falling back to the base crate name for description and supplier lookup. + +### What Is Excluded from SBOM + +- Dependencies not in the transitive dep graph of your `targets` +- Build toolchain repos matching `exclude_patterns` (e.g. `rules_rust`, `rules_cc`, `bazel_tools`, `platforms`) + +## Example + +See [reference_integration/BUILD](../../reference_integration/BUILD) for working SBOM targets and [reference_integration/MODULE.bazel](../../reference_integration/MODULE.bazel) for the metadata extension setup. + +Each SBOM target uses `module_lockfiles` to provide crate version/checksum data from multiple lockfiles and `auto_crates_cache = True` to automatically fetch license, description, and supplier data. + +### score_crates Integration + +The `score_crates` module provides centralized Rust crate management for the SCORE project. Its `MODULE.bazel.lock` file contains the majority of resolved crate specs (name, version, sha256) generated by `cargo-bazel`. The workspace's own `MODULE.bazel.lock` may contain additional crates not in `score_crates`. Both lockfiles should be passed via `module_lockfiles` to ensure complete coverage. + +## CISA 2025 Element Coverage (CycloneDX) + +The table below maps the CISA 2025 draft elements to CycloneDX fields and notes current support in this SBOM generator. + +| CISA 2025 Element | CycloneDX Field (JSON) | Support | Notes | +|---|---|---|---| +| Software Producer | `components[].supplier.name` | **Supported** | Root producer is set in `metadata.component.supplier`. For components, supplier is auto-extracted from crates.io repository URL (Rust) or from cdxgen (C++); in the current baselibs example, Boost BCR modules have no supplier because cdxgen does not provide one. | +| Component Name | `components[].name` | **Supported** | Single name; aliases are stored as `properties` with `cdx:alias`. | +| Component Version | `components[].version` | **Supported** | If unknown and source is git repo with `commit_date`, version can fall back to that date. | +| Software Identifiers | `components[].purl`, `components[].cpe` | **Supported (PURL)** / **Optional (CPE)** | PURL is generated for all components. CPE is optional if provided in metadata. | +| Component Hash | `components[].hashes` | **Supported** | SHA-256 is populated for Rust crates (from lockfiles) and for BCR / http_archive / some cdxgen-backed C++ components. In the current examples, Rust crates and Boost BCR modules have hashes; some QNX-specific crates and other C++ deps may not. | +| License | `components[].licenses` | **Supported (Rust) / Best-effort (C++)** | Rust licenses are auto-fetched via dash-license-scan and are present for most crates (e.g. Kyron SBOM); some crates like `iceoryx2-*` may still lack licenses. For C++ components, licenses are only present when cdxgen (or an upstream SBOM) provides them; in the current baselibs example, Boost BCR modules have empty `licenses`. Compound SPDX expressions (AND/OR) use the `expression` field per CycloneDX spec. | +| Component Description | `components[].description` | **Supported** | Auto-fetched from crates.io API (Rust) and cdxgen (C++), with C++ falling back to `"Missing"` when no description is available (as seen for Boost in the baselibs SBOM). | +| Dependency Relationship | `dependencies` | **Supported** | Uses external repo dependency edges from Bazel aspect; both Kyron and baselibs SBOMs include a dependency graph for the root component. | +| Pedigree / Derivation | `components[].pedigree` | **Supported (manual)** | Must be provided via `sbom_ext.license()` with `pedigree_*` fields. Not auto-deduced. | +| SBOM Author | `metadata.authors` | **Supported** | Set via `sbom_authors` in `sbom()` rule (e.g. `"Eclipse SCORE Team"` in the examples). | +| Tool Name | `metadata.tools` | **Supported** | Always includes `score-sbom-generator`; extra tools can be added via `sbom_tools`. | +| Timestamp | `metadata.timestamp` | **Supported** | ISO 8601 UTC timestamp generated at build time. | +| Generation Context | `metadata.lifecycles` | **Supported** | Set via `generation_context` in `sbom()` rule (`pre-build`, `build`, `post-build`). | + +### SPDX-Specific Notes + +- **LicenseRef-* declarations**: Any `LicenseRef-*` identifiers used in license fields are automatically declared in `hasExtractedLicensingInfos` as required by SPDX 2.3. +- **Supplier**: Emitted as `Organization: ` in the SPDX `supplier` field. + +### Notes on Missing Data +If a field is absent in output, it usually means the source metadata was not provided: +- Licenses and suppliers are auto-populated from dash-license-scan (Rust) or cdxgen (C++). For C++ dependencies, licenses and suppliers are available only when cdxgen can resolve the component; Bazel Central Registry modules like `boost.*` may have empty licenses if cdxgen cannot infer them. +- CPE, aliases, and pedigree are optional and must be explicitly set via `sbom_ext.license()`. +- Rust crate licenses require a crates metadata cache; this is generated automatically when `module_lockfiles` (or `cargo_lockfile`) is provided to `sbom()`. License data is fetched via `dash-license-scan` (Eclipse Foundation + ClearlyDefined). The `score_crates` MODULE.bazel.lock combined with the workspace's MODULE.bazel.lock provides complete coverage. +- If cdxgen cannot resolve C++ package metadata for a Bazel-only dependency graph, SBOM generation sets C++ dependency descriptions to `"Missing"`. + +Examples (add to `MODULE.bazel`): + +```starlark +# Optional metadata (CPE, aliases, pedigree) +# Note: sbom_ext.license() should only be used for pedigree, CPE, and aliases. +# Licenses and suppliers are auto-populated from dash-license-scan (Rust) or cdxgen (C++). +sbom_ext.license( + name = "linux-kernel", + cpe = "cpe:2.3:o:linux:linux_kernel:*:*:*:*:*:*:*:*", + aliases = ["linux", "kernel"], + pedigree_ancestors = ["pkg:generic/linux-kernel@5.10.130"], + pedigree_notes = "Backported CVE-2025-12345 fix from 5.10.130", +) +``` + +### C++ license data and dash-license-scan + +- **Rust crates** + Rust licenses are obtained via `generate_crates_metadata_cache.py`, which reads `MODULE.bazel.lock` / `Cargo.lock`, builds a synthetic `Cargo.lock`, runs `uvx dash-license-scan` (backed by Eclipse dash-licenses), and writes a `crates_metadata.json` cache that `sbom_generator.py` consumes. + +- **C++ dependencies** + C++ licenses and suppliers are resolved through two mechanisms: + + 1. **cdxgen scan** — when `auto_cdxgen = True` (or a `cdxgen_sbom` label is provided), cdxgen scans the source tree for C++ package metadata. This is the primary automated source for C++ license, supplier, version, and PURL. + + 2. **`cpp_metadata.json` cache** — populated by running `generate_cpp_metadata_cache.py` against cdxgen output. **This file must always be generated by the script, never edited by hand.** See the no-manual-fallback requirement below. + + There is currently **no dash-license-scan integration for C++ SBOMs**. `dash-license-scan` understands purls like `pkg:cargo/...`, `pkg:pypi/...`, `pkg:npm/...`, and `pkg:maven/...`, but not `pkg:generic/...` (used for BCR modules), so running it on the C++ CycloneDX SBOM does not improve C++ license coverage. + +### No-manual-fallback requirement (MUST) + +**All SBOM fields must originate from automated sources. No manually-curated fallback values are permitted for any field — not checksum, not license, not supplier, not version, not PURL, not description.** + +This applies to every data source in the pipeline: + +| Source | Status | What it provides | +|---|---|---| +| `MODULE.bazel.lock` `source.json` sha256 | ✅ Automated | Checksum for BCR C++ modules | +| `http_archive sha256 =` field | ✅ Automated | Checksum for non-BCR deps | +| cdxgen source-tree scan | ✅ Automated | License, supplier, version, PURL for C++ | +| `generate_cpp_metadata_cache.py` output | ✅ Automated (generated from cdxgen) | Persistent C++ metadata cache | +| dash-license-scan | ✅ Automated | License for Rust crates | +| `cpp_metadata.json` with hand-written entries | ❌ **Forbidden** | — | +| `BCR_KNOWN_LICENSES` dict in `sbom_generator.py` | ⚠️ Known violation — must be removed | License/supplier for BCR C++ modules | + +**Why:** A manually-written value is version-pinned to whatever version string happens to be in the file at the time of writing. If the workspace resolves a different version of that component, the value silently describes the wrong artifact. An absent field is honest and correct; a manually-guessed field is a compliance violation and a traceability lie. + +**Correct behaviour for missing data:** If an automated source cannot determine a field, the field is absent in the SBOM output. This is expected and acceptable. + +**Enforcement:** `test_cpp_enrich_checksum.py::TestNoManualFallbackInCppMetadata` asserts that `cpp_metadata.json` is empty and contains no SBOM fields. If entries are needed, regenerate the file: + +```bash +npx @cyclonedx/cdxgen -t cpp --deep -r -o cdxgen_output.cdx.json +python3 tooling/sbom/scripts/generate_cpp_metadata_cache.py \ + cdxgen_output.cdx.json tooling/sbom/cpp_metadata.json +``` + +**Known violation — `BCR_KNOWN_LICENSES`:** The `BCR_KNOWN_LICENSES` dict hardcoded in `sbom_generator.py` is a manually-maintained license/supplier table for Bazel Central Registry C++ modules. It violates this requirement and must be replaced with automated BCR metadata fetching (e.g. querying the BCR `MODULE.bazel` or `metadata.json` at build time). Until that is implemented, BCR C++ modules that cdxgen cannot resolve will have missing license fields in the SBOM — which is the correct, honest output. + +--- + +## SPDX Version Decision (stay on 2.3) + +This generator emits **SPDX 2.3** and will not migrate to SPDX 3.0 until tooling support matures. + +### Why not SPDX 3.0? + +SPDX 3.0 is a **breaking rewrite**, not an additive update: + +| Aspect | SPDX 2.3 | SPDX 3.0 | +|---|---|---| +| Serialization | Flat JSON | JSON-LD (`@context` + `@graph`) | +| Top-level key | `spdxVersion: "SPDX-2.3"` | `@context: "https://spdx.org/rdf/3.0.1/spdx-context.jsonld"` | +| Package fields | `versionInfo`, `licenseConcluded`, `SPDXID` | `software_packageVersion`, licensing profile objects, `spdxId` | +| Relationships | Array in document | Standalone elements in `@graph` | +| Profiles | None | Mandatory `profileConformance` declaration | + +**Downstream consumer support as of Feb 2026 — tools that read/process our SBOM output, none support SPDX 3.0:** + +| Tool | SPDX 2.3 | SPDX 3.0 | +|---|---|---| +| GitHub Dependabot / Dependency Submission API | ✅ SPDX 2.3 (export) / action works with 2.3 in practice | ❌ | +| Trivy | ✅ generates 2.3 | ❌ | +| Grype | ✅ consumes 2.x | ❌ | +| Syft | ✅ generates 2.3 | ❌ | +| spdx-tools (Python) | ✅ full support | ⚠️ "experimental, unstable" | + +The `spdx-tools` Python library (latest: v0.8.4, Jan 2025) still describes its SPDX 3.0 support as "neither complete nor stable" and explicitly warns against production use. v0.8.4 added Python 3.14 support but made no SPDX 3.0 improvements. + +For SCORE's use case (license data, PURL, checksums, dependency graph), SPDX 2.3 covers all requirements with zero compatibility issues. + +### Revisit trigger + +Reconsider migration when **Trivy or GitHub Dependabot** announces production SPDX 3.0 support. At that point the required changes are: + +- `tooling/sbom/internal/generator/spdx_formatter.py` — full rewrite (flat JSON → JSON-LD `@graph`, new field names) +- `tooling/sbom/tests/test_spdx_formatter.py` — all 17 tests need rewriting +- `tooling/sbom/scripts/spdx_to_github_snapshot.py` — relationship and `externalRefs` parsing + diff --git a/sbom/cpp_metadata.json b/sbom/cpp_metadata.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/sbom/cpp_metadata.json @@ -0,0 +1 @@ +{} diff --git a/sbom/crates_metadata.json b/sbom/crates_metadata.json new file mode 100644 index 0000000..2f1b7b6 --- /dev/null +++ b/sbom/crates_metadata.json @@ -0,0 +1,806 @@ +{ + "aho-corasick": { + "checksum": "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301", + "license": "Unlicense OR MIT", + "name": "aho-corasick", + "purl": "pkg:cargo/aho-corasick@1.1.4", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.1.4" + }, + "bindgen": { + "checksum": "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895", + "license": "BSD-3-Clause", + "name": "bindgen", + "purl": "pkg:cargo/bindgen@0.72.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.72.1" + }, + "bitflags": { + "checksum": "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3", + "license": "MIT OR Apache-2.0", + "name": "bitflags", + "purl": "pkg:cargo/bitflags@2.10.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.10.0" + }, + "byteorder": { + "checksum": "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b", + "license": "Unlicense OR MIT", + "name": "byteorder", + "purl": "pkg:cargo/byteorder@1.5.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.5.0" + }, + "cc": { + "checksum": "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215", + "license": "MIT OR Apache-2.0", + "name": "cc", + "purl": "pkg:cargo/cc@1.2.49", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.2.49" + }, + "cdr": { + "checksum": "9617422bf43fde9280707a7e90f8f7494389c182f5c70b0f67592d0f06d41dfa", + "license": "Apache-2.0 OR MIT", + "name": "cdr", + "purl": "pkg:cargo/cdr@0.2.4", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.4" + }, + "cexpr": { + "checksum": "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766", + "license": "Apache-2.0 OR MIT", + "name": "cexpr", + "purl": "pkg:cargo/cexpr@0.6.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.0" + }, + "cfg-if": { + "checksum": "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801", + "license": "MIT OR Apache-2.0", + "name": "cfg-if", + "purl": "pkg:cargo/cfg-if@1.0.4", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.4" + }, + "clang-sys": { + "checksum": "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4", + "license": "Apache-2.0", + "name": "clang-sys", + "purl": "pkg:cargo/clang-sys@1.8.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.8.1" + }, + "cobs": { + "checksum": "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1", + "license": "MIT OR Apache-2.0", + "name": "cobs", + "purl": "pkg:cargo/cobs@0.3.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.0" + }, + "crossbeam-channel": { + "checksum": "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2", + "license": "MIT OR Apache-2.0", + "name": "crossbeam-channel", + "purl": "pkg:cargo/crossbeam-channel@0.5.15", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.5.15" + }, + "crossbeam-utils": { + "checksum": "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28", + "license": "MIT OR Apache-2.0", + "name": "crossbeam-utils", + "purl": "pkg:cargo/crossbeam-utils@0.8.21", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.21" + }, + "deranged": { + "checksum": "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587", + "license": "MIT OR Apache-2.0", + "name": "deranged", + "purl": "pkg:cargo/deranged@0.5.5", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.5.5" + }, + "either": { + "checksum": "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719", + "license": "MIT OR Apache-2.0", + "name": "either", + "purl": "pkg:cargo/either@1.15.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.15.0" + }, + "embedded-io": { + "checksum": "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d", + "license": "MIT OR Apache-2.0", + "name": "embedded-io", + "purl": "pkg:cargo/embedded-io@0.6.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.1" + }, + "enum-iterator": { + "checksum": "a4549325971814bda7a44061bf3fe7e487d447cba01e4220a4b454d630d7a016", + "license": "0BSD OR MIT OR Apache-2.0", + "name": "enum-iterator", + "purl": "pkg:cargo/enum-iterator@2.3.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.3.0" + }, + "enum-iterator-derive": { + "checksum": "685adfa4d6f3d765a26bc5dbc936577de9abf756c1feeb3089b01dd395034842", + "license": "0BSD OR MIT OR Apache-2.0", + "name": "enum-iterator-derive", + "purl": "pkg:cargo/enum-iterator-derive@1.5.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.5.0" + }, + "equivalent": { + "checksum": "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f", + "license": "Apache-2.0 OR MIT", + "name": "equivalent", + "purl": "pkg:cargo/equivalent@1.0.2", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.2" + }, + "find-msvc-tools": { + "checksum": "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844", + "license": "MIT OR Apache-2.0", + "name": "find-msvc-tools", + "purl": "pkg:cargo/find-msvc-tools@0.1.5", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.5" + }, + "futures": { + "checksum": "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876", + "license": "MIT OR Apache-2.0", + "name": "futures", + "purl": "pkg:cargo/futures@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-channel": { + "checksum": "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10", + "license": "MIT OR Apache-2.0", + "name": "futures-channel", + "purl": "pkg:cargo/futures-channel@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-core": { + "checksum": "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e", + "license": "MIT OR Apache-2.0", + "name": "futures-core", + "purl": "pkg:cargo/futures-core@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-executor": { + "checksum": "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f", + "license": "MIT OR Apache-2.0", + "name": "futures-executor", + "purl": "pkg:cargo/futures-executor@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-io": { + "checksum": "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6", + "license": "MIT OR Apache-2.0", + "name": "futures-io", + "purl": "pkg:cargo/futures-io@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-macro": { + "checksum": "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650", + "license": "MIT OR Apache-2.0", + "name": "futures-macro", + "purl": "pkg:cargo/futures-macro@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-sink": { + "checksum": "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7", + "license": "MIT OR Apache-2.0", + "name": "futures-sink", + "purl": "pkg:cargo/futures-sink@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-task": { + "checksum": "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988", + "license": "MIT OR Apache-2.0", + "name": "futures-task", + "purl": "pkg:cargo/futures-task@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-util": { + "checksum": "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81", + "license": "MIT OR Apache-2.0", + "name": "futures-util", + "purl": "pkg:cargo/futures-util@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "generator": { + "checksum": "605183a538e3e2a9c1038635cc5c2d194e2ee8fd0d1b66b8349fad7dbacce5a2", + "license": "Apache-2.0 OR MIT", + "name": "generator", + "purl": "pkg:cargo/generator@0.8.7", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.7" + }, + "glob": { + "checksum": "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280", + "license": "MIT OR Apache-2.0", + "name": "glob", + "purl": "pkg:cargo/glob@0.3.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.3" + }, + "hashbrown": { + "checksum": "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100", + "license": "MIT OR Apache-2.0", + "name": "hashbrown", + "purl": "pkg:cargo/hashbrown@0.16.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.16.1" + }, + "indexmap": { + "checksum": "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2", + "license": "Apache-2.0 OR MIT", + "name": "indexmap", + "purl": "pkg:cargo/indexmap@2.12.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.12.1" + }, + "itertools": { + "checksum": "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186", + "license": "MIT OR Apache-2.0", + "name": "itertools", + "purl": "pkg:cargo/itertools@0.13.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.13.0" + }, + "itoa": { + "checksum": "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c", + "license": "MIT OR Apache-2.0", + "name": "itoa", + "purl": "pkg:cargo/itoa@1.0.15", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.15" + }, + "lazy_static": { + "checksum": "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe", + "license": "MIT OR Apache-2.0", + "name": "lazy_static", + "purl": "pkg:cargo/lazy_static@1.5.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.5.0" + }, + "libc": { + "checksum": "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091", + "license": "MIT OR Apache-2.0", + "name": "libc", + "purl": "pkg:cargo/libc@0.2.178", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.178" + }, + "libloading": { + "checksum": "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55", + "license": "ISC", + "name": "libloading", + "purl": "pkg:cargo/libloading@0.8.9", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.9" + }, + "log": { + "checksum": "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897", + "license": "MIT OR Apache-2.0", + "name": "log", + "purl": "pkg:cargo/log@0.4.29", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.29" + }, + "loom": { + "checksum": "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca", + "license": "MIT", + "name": "loom", + "purl": "pkg:cargo/loom@0.7.2", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.7.2" + }, + "matchers": { + "checksum": "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9", + "license": "MIT", + "name": "matchers", + "purl": "pkg:cargo/matchers@0.2.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "memchr": { + "checksum": "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273", + "license": "Unlicense OR MIT", + "name": "memchr", + "purl": "pkg:cargo/memchr@2.7.6", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.7.6" + }, + "minimal-lexical": { + "checksum": "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a", + "license": "MIT OR Apache-2.0", + "name": "minimal-lexical", + "purl": "pkg:cargo/minimal-lexical@0.2.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.1" + }, + "nom": { + "checksum": "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a", + "license": "MIT", + "name": "nom", + "purl": "pkg:cargo/nom@7.1.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "7.1.3" + }, + "nu-ansi-term": { + "checksum": "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5", + "license": "MIT", + "name": "nu-ansi-term", + "purl": "pkg:cargo/nu-ansi-term@0.50.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.50.3" + }, + "num-conv": { + "checksum": "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9", + "license": "MIT OR Apache-2.0", + "name": "num-conv", + "purl": "pkg:cargo/num-conv@0.1.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.0" + }, + "once_cell": { + "checksum": "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d", + "license": "MIT OR Apache-2.0", + "name": "once_cell", + "purl": "pkg:cargo/once_cell@1.21.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.21.3" + }, + "pin-project-lite": { + "checksum": "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b", + "license": "Apache-2.0 OR MIT", + "name": "pin-project-lite", + "purl": "pkg:cargo/pin-project-lite@0.2.16", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.16" + }, + "pin-utils": { + "checksum": "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184", + "name": "pin-utils", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.0" + }, + "postcard": { + "checksum": "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24", + "name": "postcard", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.1.3" + }, + "powerfmt": { + "checksum": "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391", + "name": "powerfmt", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "prettyplease": { + "checksum": "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b", + "license": "MIT OR Apache-2.0", + "name": "prettyplease", + "purl": "pkg:cargo/prettyplease@0.2.37", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.37" + }, + "proc-macro2": { + "checksum": "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8", + "license": "MIT OR Apache-2.0", + "name": "proc-macro2", + "purl": "pkg:cargo/proc-macro2@1.0.103", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.103" + }, + "quote": { + "checksum": "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f", + "license": "MIT OR Apache-2.0", + "name": "quote", + "purl": "pkg:cargo/quote@1.0.42", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.42" + }, + "regex": { + "checksum": "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4", + "license": "MIT OR Apache-2.0", + "name": "regex", + "purl": "pkg:cargo/regex@1.12.2", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.12.2" + }, + "regex-automata": { + "checksum": "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c", + "name": "regex-automata", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.13" + }, + "regex-syntax": { + "checksum": "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58", + "name": "regex-syntax", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.8" + }, + "rustc-hash": { + "checksum": "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d", + "license": "Apache-2.0 OR MIT", + "name": "rustc-hash", + "purl": "pkg:cargo/rustc-hash@2.1.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.1.1" + }, + "rustversion": { + "checksum": "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d", + "name": "rustversion", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.22" + }, + "ryu": { + "checksum": "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f", + "name": "ryu", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.20" + }, + "scoped-tls": { + "checksum": "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294", + "name": "scoped-tls", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.1" + }, + "serde": { + "checksum": "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e", + "license": "MIT OR Apache-2.0", + "name": "serde", + "purl": "pkg:cargo/serde@1.0.228", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.228" + }, + "serde_core": { + "checksum": "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad", + "name": "serde_core", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.228" + }, + "serde_derive": { + "checksum": "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79", + "license": "MIT OR Apache-2.0", + "name": "serde_derive", + "purl": "pkg:cargo/serde_derive@1.0.228", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.228" + }, + "serde_json": { + "checksum": "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c", + "license": "MIT OR Apache-2.0", + "name": "serde_json", + "purl": "pkg:cargo/serde_json@1.0.145", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.145" + }, + "serde_spanned": { + "checksum": "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3", + "name": "serde_spanned", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.9" + }, + "sha1_smol": { + "checksum": "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d", + "name": "sha1_smol", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.1" + }, + "sharded-slab": { + "checksum": "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6", + "name": "sharded-slab", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.7" + }, + "shlex": { + "checksum": "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64", + "license": "MIT OR Apache-2.0", + "name": "shlex", + "purl": "pkg:cargo/shlex@1.3.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.3.0" + }, + "slab": { + "checksum": "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589", + "name": "slab", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.11" + }, + "smallvec": { + "checksum": "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03", + "name": "smallvec", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.15.1" + }, + "syn": { + "checksum": "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87", + "license": "MIT OR Apache-2.0", + "name": "syn", + "purl": "pkg:cargo/syn@2.0.111", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.0.111" + }, + "thiserror": { + "checksum": "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8", + "license": "MIT OR Apache-2.0", + "name": "thiserror", + "purl": "pkg:cargo/thiserror@2.0.17", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.0.17" + }, + "thiserror-impl": { + "checksum": "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913", + "name": "thiserror-impl", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.0.17" + }, + "thread_local": { + "checksum": "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185", + "name": "thread_local", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.1.9" + }, + "time": { + "checksum": "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d", + "license": "MIT OR Apache-2.0", + "name": "time", + "purl": "pkg:cargo/time@0.3.44", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.44" + }, + "time-core": { + "checksum": "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b", + "license": "MIT OR Apache-2.0", + "name": "time-core", + "purl": "pkg:cargo/time-core@0.1.6", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.6" + }, + "time-macros": { + "checksum": "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3", + "license": "MIT OR Apache-2.0", + "name": "time-macros", + "purl": "pkg:cargo/time-macros@0.2.24", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.24" + }, + "tiny-fn": { + "checksum": "9659b108631d1e1cf3e8e489f894bee40bc9d68fd6cc67ec4d4ce9b72d565228", + "name": "tiny-fn", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.9" + }, + "toml": { + "checksum": "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362", + "license": "MIT OR Apache-2.0", + "name": "toml", + "purl": "pkg:cargo/toml@0.8.23", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.23" + }, + "toml_datetime": { + "checksum": "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c", + "name": "toml_datetime", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.11" + }, + "toml_edit": { + "checksum": "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a", + "name": "toml_edit", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.22.27" + }, + "toml_write": { + "checksum": "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801", + "name": "toml_write", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.2" + }, + "tracing": { + "checksum": "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647", + "license": "MIT", + "name": "tracing", + "purl": "pkg:cargo/tracing@0.1.43", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.43" + }, + "tracing-appender": { + "checksum": "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf", + "name": "tracing-appender", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.4" + }, + "tracing-attributes": { + "checksum": "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da", + "name": "tracing-attributes", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.31" + }, + "tracing-core": { + "checksum": "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c", + "license": "MIT", + "name": "tracing-core", + "purl": "pkg:cargo/tracing-core@0.1.35", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.35" + }, + "tracing-log": { + "checksum": "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3", + "name": "tracing-log", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "tracing-serde": { + "checksum": "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1", + "name": "tracing-serde", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "tracing-subscriber": { + "checksum": "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e", + "license": "MIT", + "name": "tracing-subscriber", + "purl": "pkg:cargo/tracing-subscriber@0.3.22", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.22" + }, + "unicode-ident": { + "checksum": "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5", + "license": "MIT OR Apache-2.0 AND Unicode-3.0", + "name": "unicode-ident", + "purl": "pkg:cargo/unicode-ident@1.0.22", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.22" + }, + "valuable": { + "checksum": "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65", + "name": "valuable", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.1" + }, + "windows": { + "checksum": "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893", + "name": "windows", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.61.3" + }, + "windows-collections": { + "checksum": "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8", + "name": "windows-collections", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "windows-core": { + "checksum": "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3", + "name": "windows-core", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.61.2" + }, + "windows-future": { + "checksum": "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e", + "name": "windows-future", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.1" + }, + "windows-implement": { + "checksum": "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf", + "name": "windows-implement", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.60.2" + }, + "windows-interface": { + "checksum": "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358", + "name": "windows-interface", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.59.3" + }, + "windows-link": { + "checksum": "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5", + "name": "windows-link", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.1" + }, + "windows-numerics": { + "checksum": "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1", + "name": "windows-numerics", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "windows-result": { + "checksum": "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6", + "name": "windows-result", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.4" + }, + "windows-strings": { + "checksum": "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57", + "name": "windows-strings", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.2" + }, + "windows-sys": { + "checksum": "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc", + "name": "windows-sys", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.61.2" + }, + "windows-targets": { + "checksum": "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c", + "name": "windows-targets", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows-threading": { + "checksum": "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6", + "name": "windows-threading", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.0" + }, + "windows_aarch64_gnullvm": { + "checksum": "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8", + "name": "windows_aarch64_gnullvm", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_aarch64_msvc": { + "checksum": "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc", + "name": "windows_aarch64_msvc", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_i686_gnu": { + "checksum": "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e", + "name": "windows_i686_gnu", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_i686_msvc": { + "checksum": "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406", + "name": "windows_i686_msvc", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_x86_64_gnu": { + "checksum": "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e", + "name": "windows_x86_64_gnu", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_x86_64_gnullvm": { + "checksum": "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc", + "name": "windows_x86_64_gnullvm", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_x86_64_msvc": { + "checksum": "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538", + "name": "windows_x86_64_msvc", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "winnow": { + "checksum": "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829", + "name": "winnow", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.7.14" + } +} \ No newline at end of file diff --git a/sbom/defs.bzl b/sbom/defs.bzl new file mode 100644 index 0000000..c60233d --- /dev/null +++ b/sbom/defs.bzl @@ -0,0 +1,136 @@ +"""Public API for SBOM generation. + +This module provides the sbom() macro, which is the main entry point for +generating Software Bill of Materials for Bazel targets. + +Example usage: + load("@score_tooling//sbom:defs.bzl", "sbom") + + sbom( + name = "product_sbom", + targets = [ + "//feature_showcase/rust:orch_per_example", + "//feature_showcase/rust:kyron_example", + ], + component_version = "1.0.0", + ) +""" + +load("//sbom/internal:rules.bzl", "sbom_rule") + +def sbom( + name, + targets, + metadata_json = "@sbom_metadata//:metadata.json", + dep_module_files = None, + cdxgen_sbom = None, + auto_cdxgen = False, + cargo_lockfile = None, + module_lockfiles = None, + auto_crates_cache = True, + output_formats = ["spdx", "cyclonedx"], + producer_name = "Eclipse Foundation", + producer_url = "https://projects.eclipse.org/projects/automotive.score", + component_name = None, + component_version = None, + sbom_authors = None, + generation_context = None, + sbom_tools = None, + namespace = None, + exclude_patterns = None, + **kwargs): + """Generates SBOM for specified targets. + + This macro creates an SBOM (Software Bill of Materials) for the specified + targets, traversing their transitive dependencies and generating output + in SPDX 2.3 and/or CycloneDX 1.6 format. + + License metadata is collected automatically: + - Rust crates: from crates_metadata.json cache (bundled with tooling) + - C++ deps: from cpp_metadata.json cache (bundled with tooling) + - Bazel modules: version/PURL auto-extracted from module graph + + Prerequisites: + In your MODULE.bazel, you must enable the sbom_metadata extension: + ``` + sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") + use_repo(sbom_ext, "sbom_metadata") + ``` + + Args: + name: Rule name, also used as output filename prefix + targets: List of targets to include in SBOM + metadata_json: Label to the metadata.json file from sbom_metadata extension + dep_module_files: MODULE.bazel files from dependency modules for automatic version extraction + cdxgen_sbom: Optional label to CycloneDX JSON from cdxgen for C++ enrichment + auto_cdxgen: Run cdxgen automatically when no cdxgen_sbom is provided + cargo_lockfile: Optional Cargo.lock for crates metadata cache generation + module_lockfiles: MODULE.bazel.lock files for crate metadata extraction (e.g., from score_crates and workspace) + auto_crates_cache: Run crates metadata cache generation when cargo_lockfile or module_lockfiles is provided + output_formats: List of formats to generate ("spdx", "cyclonedx") + producer_name: SBOM producer organization name + producer_url: SBOM producer URL + component_name: Main component name (defaults to rule name) + component_version: Component version string + namespace: SBOM namespace URI (defaults to https://eclipse.dev/score) + exclude_patterns: Repo patterns to exclude (e.g., build tools) + **kwargs: Additional arguments passed to the underlying rule + + Outputs: + {name}.spdx.json - SPDX 2.3 format (if "spdx" in output_formats) + {name}.cdx.json - CycloneDX 1.6 format (if "cyclonedx" in output_formats) + + Example: + # Single target SBOM + sbom( + name = "my_app_sbom", + targets = ["//src:my_app"], + component_version = "1.0.0", + ) + + # Multi-target SBOM + sbom( + name = "product_sbom", + targets = [ + "//feature_showcase/rust:orch_per_example", + "//feature_showcase/rust:kyron_example", + ], + component_name = "score_reference_integration", + component_version = "0.5.0-beta", + ) + """ + default_exclude_patterns = [ + "rules_rust", + "rules_cc", + "bazel_tools", + "platforms", + "bazel_skylib", + "rules_python", + "rules_proto", + "protobuf", + "local_config_", + "remote_", + ] + + sbom_rule( + name = name, + targets = targets, + metadata_json = metadata_json, + dep_module_files = dep_module_files if dep_module_files else [], + cdxgen_sbom = cdxgen_sbom, + auto_cdxgen = auto_cdxgen, + cargo_lockfile = cargo_lockfile, + module_lockfiles = module_lockfiles if module_lockfiles else [], + auto_crates_cache = auto_crates_cache, + output_formats = output_formats, + producer_name = producer_name, + producer_url = producer_url, + component_name = component_name if component_name else name, + component_version = component_version if component_version else "", + sbom_authors = sbom_authors if sbom_authors else [], + generation_context = generation_context if generation_context else "", + sbom_tools = sbom_tools if sbom_tools else [], + namespace = namespace if namespace else "https://eclipse.dev/score", + exclude_patterns = exclude_patterns if exclude_patterns else default_exclude_patterns, + **kwargs + ) diff --git a/sbom/docs/requirements/component_requirements.rst b/sbom/docs/requirements/component_requirements.rst new file mode 100644 index 0000000..4a8af86 --- /dev/null +++ b/sbom/docs/requirements/component_requirements.rst @@ -0,0 +1,88 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +.. _sbom_component_requirements: + +Component Requirements +###################### + +.. document:: SBOM Generator Component Requirements + :id: doc__sbom_component_requirements + :status: valid + :safety: QM + :security: NO + :realizes: wp__requirements_comp + + +Metadata Provenance +=================== + +.. comp_req:: Component Checksum Automated Source + :id: comp_req__sbom__checksum_automated_source + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: feat_req__sbom__cisa_2025_minimum_elements + :status: valid + + The generator shall source component SHA-256 checksums exclusively from + the following automated inputs: + + - ``MODULE.bazel.lock`` ``registryFileHashes`` entries pointing to + ``source.json`` files (for Bazel Central Registry modules), and + - the ``sha256`` field of ``http_archive`` rules (for non-BCR + dependencies). + + If neither source provides a checksum for a component, the hash field + shall be omitted from that component's SBOM entry. Omitting the field is + the correct output; emitting an incorrect or stale value is not permitted. + + +Output Format +============= + +.. comp_req:: SPDX Output Version + :id: comp_req__sbom__spdx_version + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: feat_req__sbom__dual_format_output + :status: valid + + The generator shall emit SPDX 2.3 compliant JSON. Migration to SPDX 3.0 + shall not be performed until SPDX 3.0 output is supported in production + by at least one of the following downstream consumers: Trivy, GitHub + Dependabot Dependency Submission API, or Grype. + + :rationale: SPDX 3.0 is a breaking JSON-LD rewrite of the format. As of + February 2026 none of the major consumers support it, and the + reference Python library (spdx-tools v0.8.4) describes its own + 3.0 support as experimental and not recommended for production. + + +.. comp_req:: CycloneDX Output Version + :id: comp_req__sbom__cyclonedx_version + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: feat_req__sbom__dual_format_output + :status: valid + + The generator shall emit CycloneDX 1.6 compliant JSON with + ``"$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json"`` and + ``"specVersion": "1.6"``. + + +.. needextend:: docname is not None and "sbom" in id + :+tags: sbom diff --git a/sbom/docs/requirements/feature_requirements.rst b/sbom/docs/requirements/feature_requirements.rst new file mode 100644 index 0000000..80b121d --- /dev/null +++ b/sbom/docs/requirements/feature_requirements.rst @@ -0,0 +1,92 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +.. _sbom_feature_requirements: + +Feature Requirements +#################### + +.. document:: SBOM Generator Feature Requirements + :id: doc__sbom_feature_requirements + :status: valid + :safety: QM + :security: NO + :realizes: wp__requirements_feat + + +CISA 2025 Minimum Elements +=========================== + +.. feat_req:: CISA 2025 Mandatory SBOM Elements + :id: feat_req__sbom__cisa_2025_minimum_elements + :reqtype: Functional + :security: NO + :safety: QM + :status: valid + + The SBOM generator shall produce output that contains all minimum elements + mandated by CISA 2025 for every component entry: component name, component + version, component hash (SHA-256), software identifier (PURL), license + expression, dependency relationships, SBOM author, timestamp, tool name, + and generation context (lifecycle phase). + + +Metadata Provenance +=================== + +.. feat_req:: Automated Metadata Sources + :id: feat_req__sbom__automated_metadata_sources + :reqtype: Process + :security: NO + :safety: QM + :status: valid + + All field values written into generated SBOM output shall be derived + exclusively from automated sources. No manually-curated static data, + hardcoded lookup tables, or hand-edited cache files shall be used to + supply values for any SBOM field. + +Component Scope +=============== + +.. feat_req:: Build Target Dependency Scope + :id: feat_req__sbom__build_target_scope + :reqtype: Functional + :security: NO + :safety: QM + :status: valid + + The SBOM shall include only components that are part of the transitive + dependency closure of the declared build targets. Build-time tools that + are not part of the delivered software (compilers, build systems, test + frameworks, and code generation utilities) shall be excluded from the + SBOM output. + + +Output Formats +============== + +.. feat_req:: Dual Format SBOM Output + :id: feat_req__sbom__dual_format_output + :reqtype: Interface + :security: NO + :safety: QM + :status: valid + + The SBOM generator shall produce output simultaneously in both SPDX 2.3 + JSON format and CycloneDX 1.6 JSON format from a single invocation. + + +.. needextend:: docname is not None and "sbom" in id + :+tags: sbom diff --git a/sbom/extensions.bzl b/sbom/extensions.bzl new file mode 100644 index 0000000..9534a91 --- /dev/null +++ b/sbom/extensions.bzl @@ -0,0 +1,454 @@ +"""Module extension to collect dependency metadata from bzlmod. + +This extension collects version and metadata information for all modules +and other dependencies in the workspace, making it available for +SBOM generation. License metadata is collected automatically from +bundled caches (crates_metadata.json, cpp_metadata.json). + +Usage in MODULE.bazel: + sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") + use_repo(sbom_ext, "sbom_metadata") +""" + +def _generate_purl_from_url(url, name, version): + """Generate Package URL from download URL.""" + if not url: + return "pkg:generic/{}@{}".format(name, version or "unknown") + + version_str = version or "unknown" + + # GitHub + if "github.com" in url: + parts = url.split("github.com/") + if len(parts) > 1: + path_parts = parts[1].split("/") + if len(path_parts) >= 2: + owner = path_parts[0] + repo = path_parts[1].split(".")[0].split("/")[0] + return "pkg:github/{}/{}@{}".format(owner, repo, version_str) + + # GitLab + if "gitlab.com" in url or "gitlab" in url: + if "gitlab.com/" in url: + parts = url.split("gitlab.com/") + if len(parts) > 1: + path_parts = parts[1].split("/") + if len(path_parts) >= 2: + owner = path_parts[0] + repo = path_parts[1].split(".")[0] + return "pkg:gitlab/{}/{}@{}".format(owner, repo, version_str) + + return "pkg:generic/{}@{}".format(name, version_str) + +def _generate_purl_from_git(remote, name, version): + """Generate Package URL from git remote.""" + if not remote: + return "pkg:generic/{}@{}".format(name, version or "unknown") + + version_str = version or "unknown" + + # GitHub (https or ssh) + if "github.com" in remote: + if "github.com:" in remote: + path = remote.split("github.com:")[-1] + else: + path = remote.split("github.com/")[-1] + parts = path.replace(".git", "").split("/") + if len(parts) >= 2: + return "pkg:github/{}/{}@{}".format(parts[0], parts[1], version_str) + + # GitLab + if "gitlab" in remote: + if "gitlab.com:" in remote: + path = remote.split("gitlab.com:")[-1] + elif "gitlab.com/" in remote: + path = remote.split("gitlab.com/")[-1] + else: + return "pkg:generic/{}@{}".format(name, version_str) + parts = path.replace(".git", "").split("/") + if len(parts) >= 2: + return "pkg:gitlab/{}/{}@{}".format(parts[0], parts[1], version_str) + + return "pkg:generic/{}@{}".format(name, version_str) + +def _extract_version_from_url(url): + """Extract version from URL patterns.""" + if not url: + return None + + # Try common patterns + for sep in ["/v", "/archive/v", "/archive/", "/releases/download/v", "/releases/download/"]: + if sep in url: + rest = url.split(sep)[-1] + version = rest.split("/")[0].split(".tar")[0].split(".zip")[0] + if version and len(version) > 0 and (version[0].isdigit() or version[0] == "v"): + return version.lstrip("v") + + # Try filename pattern: name-version.tar.gz + filename = url.split("/")[-1] + if "-" in filename: + parts = filename.rsplit("-", 1) + if len(parts) == 2: + version = parts[1].split(".tar")[0].split(".zip")[0] + if version and version[0].isdigit(): + return version + + return None + +def _parse_version_from_module_bazel(content): + """Parse module name and version from MODULE.bazel content using string ops. + + Starlark doesn't have regex, so we parse with string find/split operations. + + Args: + content: String content of a MODULE.bazel file + + Returns: + Tuple of (name, version) or (None, None) if not found + """ + idx = content.find("module(") + if idx < 0: + return None, None + + # Find the closing paren for the module() call + block_end = content.find(")", idx) + if block_end < 0: + return None, None + + block = content[idx:block_end] + + # Extract name + name = None + for quote in ['"', "'"]: + marker = "name = " + quote + name_idx = block.find(marker) + if name_idx >= 0: + name_start = name_idx + len(marker) + name_end = block.find(quote, name_start) + if name_end > name_start: + name = block[name_start:name_end] + break + + # Extract version + version = None + for quote in ['"', "'"]: + marker = "version = " + quote + ver_idx = block.find(marker) + if ver_idx >= 0: + ver_start = ver_idx + len(marker) + ver_end = block.find(quote, ver_start) + if ver_end > ver_start: + version = block[ver_start:ver_end] + break + + return name, version + +def _sbom_metadata_repo_impl(repository_ctx): + """Implementation of the sbom_metadata repository rule.""" + + # Start with metadata from the extension + metadata = json.decode(repository_ctx.attr.metadata_content) + modules = metadata.get("modules", {}) + + # Read MODULE.bazel from tracked dependency modules to extract versions + # Use canonical labels (@@module+) to bypass repo visibility restrictions + for module_name in repository_ctx.attr.tracked_modules: + if module_name in modules: + continue # Already have this module's info + + # Try to read the module's MODULE.bazel file using canonical label + label = Label("@@{}+//:MODULE.bazel".format(module_name)) + path = repository_ctx.path(label) + if path.exists: + content = repository_ctx.read(path) + parsed_name, parsed_version = _parse_version_from_module_bazel(content) + if parsed_name and parsed_version: + modules[parsed_name] = { + "version": parsed_version, + "purl": "pkg:generic/{}@{}".format(parsed_name, parsed_version), + } + + metadata["modules"] = modules + repository_ctx.file("metadata.json", json.encode(metadata)) + repository_ctx.file("BUILD.bazel", """\ +# Generated SBOM metadata repository +exports_files(["metadata.json"]) +""") + +_sbom_metadata_repo = repository_rule( + implementation = _sbom_metadata_repo_impl, + attrs = { + "metadata_content": attr.string(mandatory = True), + "tracked_modules": attr.string_list(default = []), + }, +) + +def _sbom_metadata_impl(module_ctx): + """Collects SBOM metadata from all modules in dependency graph.""" + all_http_archives = {} + all_git_repos = {} + all_modules = {} + all_crates = {} + all_licenses = {} + tracked_modules = [] + + for mod in module_ctx.modules: + # Collect tracked module names for version extraction + for tag in mod.tags.track_module: + if tag.name not in tracked_modules: + tracked_modules.append(tag.name) + module_name = mod.name + module_version = mod.version + + # Collect module info from bazel_dep automatically + if module_name and module_version: + all_modules[module_name] = { + "version": module_version, + "purl": "pkg:generic/{}@{}".format(module_name, module_version), + } + + # Collect http_archive metadata + for tag in mod.tags.http_archive: + url = tag.urls[0] if tag.urls else (tag.url if hasattr(tag, "url") and tag.url else "") + version = tag.version if tag.version else _extract_version_from_url(url) + purl = tag.purl if tag.purl else _generate_purl_from_url(url, tag.name, version) + + all_http_archives[tag.name] = { + "version": version or "unknown", + "url": url, + "purl": purl, + "license": tag.license if tag.license else "", + "supplier": tag.supplier if tag.supplier else "", + "sha256": tag.sha256 if tag.sha256 else "", + "cpe": tag.cpe if hasattr(tag, "cpe") and tag.cpe else "", + "aliases": tag.aliases if hasattr(tag, "aliases") and tag.aliases else [], + "pedigree_ancestors": tag.pedigree_ancestors if hasattr(tag, "pedigree_ancestors") and tag.pedigree_ancestors else [], + "pedigree_descendants": tag.pedigree_descendants if hasattr(tag, "pedigree_descendants") and tag.pedigree_descendants else [], + "pedigree_variants": tag.pedigree_variants if hasattr(tag, "pedigree_variants") and tag.pedigree_variants else [], + "pedigree_notes": tag.pedigree_notes if hasattr(tag, "pedigree_notes") and tag.pedigree_notes else "", + "declared_by": module_name, + } + + # Collect git_repository metadata + for tag in mod.tags.git_repository: + version = tag.tag if tag.tag else (tag.commit[:12] if tag.commit else "unknown") + purl = tag.purl if tag.purl else _generate_purl_from_git(tag.remote, tag.name, version) + + all_git_repos[tag.name] = { + "version": version, + "remote": tag.remote, + "commit": tag.commit if tag.commit else "", + "commit_date": tag.commit_date if hasattr(tag, "commit_date") and tag.commit_date else "", + "tag": tag.tag if tag.tag else "", + "purl": purl, + "license": tag.license if tag.license else "", + "supplier": tag.supplier if tag.supplier else "", + "cpe": tag.cpe if hasattr(tag, "cpe") and tag.cpe else "", + "aliases": tag.aliases if hasattr(tag, "aliases") and tag.aliases else [], + "pedigree_ancestors": tag.pedigree_ancestors if hasattr(tag, "pedigree_ancestors") and tag.pedigree_ancestors else [], + "pedigree_descendants": tag.pedigree_descendants if hasattr(tag, "pedigree_descendants") and tag.pedigree_descendants else [], + "pedigree_variants": tag.pedigree_variants if hasattr(tag, "pedigree_variants") and tag.pedigree_variants else [], + "pedigree_notes": tag.pedigree_notes if hasattr(tag, "pedigree_notes") and tag.pedigree_notes else "", + "declared_by": module_name, + } + + # Collect license info for bazel_dep modules, http_archive, git_repository, and crate deps + for tag in mod.tags.license: + dep_type = tag.type if hasattr(tag, "type") and tag.type else "" + url = "" + if hasattr(tag, "urls") and tag.urls: + url = tag.urls[0] + elif hasattr(tag, "url") and tag.url: + url = tag.url + remote = tag.remote if hasattr(tag, "remote") and tag.remote else "" + + explicit_version = tag.version if hasattr(tag, "version") and tag.version else "" + supplier = tag.supplier if hasattr(tag, "supplier") and tag.supplier else "" + cpe = tag.cpe if hasattr(tag, "cpe") and tag.cpe else "" + aliases = tag.aliases if hasattr(tag, "aliases") and tag.aliases else [] + pedigree_ancestors = tag.pedigree_ancestors if hasattr(tag, "pedigree_ancestors") and tag.pedigree_ancestors else [] + pedigree_descendants = tag.pedigree_descendants if hasattr(tag, "pedigree_descendants") and tag.pedigree_descendants else [] + pedigree_variants = tag.pedigree_variants if hasattr(tag, "pedigree_variants") and tag.pedigree_variants else [] + pedigree_notes = tag.pedigree_notes if hasattr(tag, "pedigree_notes") and tag.pedigree_notes else "" + + if dep_type == "cargo": + version = explicit_version if explicit_version else "unknown" + all_crates[tag.name] = { + "version": version, + "purl": tag.purl if tag.purl else "pkg:cargo/{}@{}".format(tag.name, version), + "license": tag.license, + "supplier": supplier, + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + } + elif url or (explicit_version and not remote): + version = explicit_version if explicit_version else _extract_version_from_url(url) + purl = tag.purl if tag.purl else _generate_purl_from_url(url, tag.name, version) + all_http_archives[tag.name] = { + "version": version or "unknown", + "url": url, + "purl": purl, + "license": tag.license, + "supplier": supplier, + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + "declared_by": module_name, + } + elif remote: + version = explicit_version if explicit_version else "unknown" + purl = tag.purl if tag.purl else _generate_purl_from_git(remote, tag.name, version) + all_git_repos[tag.name] = { + "version": version, + "remote": remote, + "commit": "", + "tag": "", + "purl": purl, + "license": tag.license, + "supplier": supplier, + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + "declared_by": module_name, + } + else: + all_licenses[tag.name] = { + "license": tag.license, + "supplier": supplier, + "purl": tag.purl if tag.purl else "", + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + } + + # Apply license/supplier overrides to modules + for name, license_info in all_licenses.items(): + if name in all_modules: + all_modules[name]["license"] = license_info["license"] + if license_info.get("supplier"): + all_modules[name]["supplier"] = license_info["supplier"] + if license_info.get("purl"): + all_modules[name]["purl"] = license_info["purl"] + if license_info.get("cpe"): + all_modules[name]["cpe"] = license_info["cpe"] + if license_info.get("aliases"): + all_modules[name]["aliases"] = license_info["aliases"] + if license_info.get("pedigree_ancestors"): + all_modules[name]["pedigree_ancestors"] = license_info["pedigree_ancestors"] + if license_info.get("pedigree_descendants"): + all_modules[name]["pedigree_descendants"] = license_info["pedigree_descendants"] + if license_info.get("pedigree_variants"): + all_modules[name]["pedigree_variants"] = license_info["pedigree_variants"] + if license_info.get("pedigree_notes"): + all_modules[name]["pedigree_notes"] = license_info["pedigree_notes"] + + # Generate metadata JSON + metadata_content = json.encode({ + "modules": all_modules, + "http_archives": all_http_archives, + "git_repositories": all_git_repos, + "crates": all_crates, + "licenses": all_licenses, + }) + + _sbom_metadata_repo( + name = "sbom_metadata", + metadata_content = metadata_content, + tracked_modules = tracked_modules, + ) + +# Tag for http_archive dependencies - mirrors http_archive attributes +_http_archive_tag = tag_class( + doc = "SBOM metadata for http_archive dependency (mirrors http_archive attrs)", + attrs = { + "name": attr.string(mandatory = True, doc = "Repository name"), + "urls": attr.string_list(doc = "Download URLs"), + "url": attr.string(doc = "Single download URL (alternative to urls)"), + "version": attr.string(doc = "Version (auto-extracted from URL if not provided)"), + "sha256": attr.string(doc = "SHA256 checksum"), + "license": attr.string(doc = "SPDX license identifier"), + "supplier": attr.string(doc = "Supplier/organization name"), + "purl": attr.string(doc = "Package URL (auto-generated if not provided)"), + "cpe": attr.string(doc = "CPE identifier"), + "aliases": attr.string_list(doc = "Alternate component names"), + "pedigree_ancestors": attr.string_list(doc = "Pedigree ancestor identifiers (PURL or name)"), + "pedigree_descendants": attr.string_list(doc = "Pedigree descendant identifiers (PURL or name)"), + "pedigree_variants": attr.string_list(doc = "Pedigree variant identifiers (PURL or name)"), + "pedigree_notes": attr.string(doc = "Pedigree notes"), + }, +) + +# Tag for git_repository dependencies - mirrors git_repository attributes +_git_repository_tag = tag_class( + doc = "SBOM metadata for git_repository dependency (mirrors git_repository attrs)", + attrs = { + "name": attr.string(mandatory = True, doc = "Repository name"), + "remote": attr.string(mandatory = True, doc = "Git remote URL"), + "commit": attr.string(doc = "Git commit hash"), + "tag": attr.string(doc = "Git tag"), + "commit_date": attr.string(doc = "Git commit date (ISO 8601)"), + "license": attr.string(doc = "SPDX license identifier"), + "supplier": attr.string(doc = "Supplier/organization name"), + "purl": attr.string(doc = "Package URL (auto-generated if not provided)"), + "cpe": attr.string(doc = "CPE identifier"), + "aliases": attr.string_list(doc = "Alternate component names"), + "pedigree_ancestors": attr.string_list(doc = "Pedigree ancestor identifiers (PURL or name)"), + "pedigree_descendants": attr.string_list(doc = "Pedigree descendant identifiers (PURL or name)"), + "pedigree_variants": attr.string_list(doc = "Pedigree variant identifiers (PURL or name)"), + "pedigree_notes": attr.string(doc = "Pedigree notes"), + }, +) + +# Tag to add license info to any dependency (bazel_dep, http_archive, git_repository, or crate) +_license_tag = tag_class( + doc = "Add license/supplier metadata for any dependency", + attrs = { + "name": attr.string(mandatory = True, doc = "Dependency name"), + "license": attr.string(mandatory = True, doc = "SPDX license identifier"), + "supplier": attr.string(doc = "Supplier/organization name (e.g., 'Boost.org', 'Google LLC')"), + "version": attr.string(doc = "Version string (for http_archive/git_repository/crate; auto-extracted for bazel_dep)"), + "type": attr.string(doc = "Dependency type: 'cargo' for Rust crates (affects PURL generation). Leave empty for auto-detection."), + "purl": attr.string(doc = "Override Package URL"), + "url": attr.string(doc = "Download URL for http_archive (for PURL generation)"), + "urls": attr.string_list(doc = "Download URLs for http_archive (for PURL generation)"), + "remote": attr.string(doc = "Git remote URL for git_repository (for PURL generation)"), + "cpe": attr.string(doc = "CPE identifier"), + "aliases": attr.string_list(doc = "Alternate component names"), + "pedigree_ancestors": attr.string_list(doc = "Pedigree ancestor identifiers (PURL or name)"), + "pedigree_descendants": attr.string_list(doc = "Pedigree descendant identifiers (PURL or name)"), + "pedigree_variants": attr.string_list(doc = "Pedigree variant identifiers (PURL or name)"), + "pedigree_notes": attr.string(doc = "Pedigree notes"), + }, +) + +# Tag to track a dependency module for automatic version extraction +_track_module_tag = tag_class( + doc = "Track a bazel_dep module for automatic version extraction from its MODULE.bazel", + attrs = { + "name": attr.string(mandatory = True, doc = "Module name (as declared in bazel_dep)"), + }, +) + +sbom_metadata = module_extension( + implementation = _sbom_metadata_impl, + tag_classes = { + "http_archive": _http_archive_tag, + "git_repository": _git_repository_tag, + "license": _license_tag, + "track_module": _track_module_tag, + }, + doc = "Collects SBOM metadata from dependency declarations", +) diff --git a/sbom/internal/BUILD b/sbom/internal/BUILD new file mode 100644 index 0000000..6237649 --- /dev/null +++ b/sbom/internal/BUILD @@ -0,0 +1,24 @@ +# Internal SBOM implementation package +# +# This package contains internal implementation details for SBOM generation. +# External consumers should use the public API in //sbom:defs.bzl + +package(default_visibility = ["//sbom:__subpackages__"]) + +exports_files([ + "aspect.bzl", + "metadata_rule.bzl", + "providers.bzl", + "rules.bzl", +]) + +# Filegroup for all internal bzl files +filegroup( + name = "bzl_files", + srcs = [ + "aspect.bzl", + "metadata_rule.bzl", + "providers.bzl", + "rules.bzl", + ], +) diff --git a/sbom/internal/__init__.py b/sbom/internal/__init__.py new file mode 100644 index 0000000..bd5f6fd --- /dev/null +++ b/sbom/internal/__init__.py @@ -0,0 +1 @@ +"""SBOM internal implementation package.""" diff --git a/sbom/internal/aspect.bzl b/sbom/internal/aspect.bzl new file mode 100644 index 0000000..cf68edc --- /dev/null +++ b/sbom/internal/aspect.bzl @@ -0,0 +1,115 @@ +"""Aspect to traverse and collect transitive dependencies of a target. + +This aspect traverses the dependency graph of specified targets and collects +information about all dependencies, including external repositories, which +is essential for SBOM generation. +""" + +load(":providers.bzl", "SbomDepsInfo") + +def _sbom_aspect_impl(target, ctx): + """Collects transitive dependency information for SBOM generation. + + Args: + target: The target being analyzed + ctx: The aspect context + + Returns: + A list containing SbomDepsInfo provider + """ + direct_deps = [] + transitive_deps_list = [] + external_repos_list = [] + external_repos_direct = [] + external_dep_edges_direct = [] + external_dep_edges_list = [] + + # Get this target's label info + label = target.label + if label.workspace_name: + # This is an external dependency + external_repos_direct.append(label.workspace_name) + from_repo = label.workspace_name + else: + from_repo = "" + + # Collect from rule attributes that represent dependencies + dep_attrs = ["deps", "srcs", "data", "proc_macro_deps", "crate_root", "compile_data"] + for attr_name in dep_attrs: + if hasattr(ctx.rule.attr, attr_name): + attr_val = getattr(ctx.rule.attr, attr_name) + if type(attr_val) == "list": + for dep in attr_val: + if hasattr(dep, "label"): + direct_deps.append(dep.label) + if from_repo and dep.label.workspace_name: + external_dep_edges_direct.append( + "{}::{}".format(from_repo, dep.label.workspace_name), + ) + if SbomDepsInfo in dep: + # Propagate transitive deps from dependencies + transitive_deps_list.append(dep[SbomDepsInfo].transitive_deps) + external_repos_list.append(dep[SbomDepsInfo].external_repos) + external_dep_edges_list.append(dep[SbomDepsInfo].external_dep_edges) + elif attr_val != None and hasattr(attr_val, "label"): + # Single target attribute (e.g., crate_root) + direct_deps.append(attr_val.label) + if from_repo and attr_val.label.workspace_name: + external_dep_edges_direct.append( + "{}::{}".format(from_repo, attr_val.label.workspace_name), + ) + if SbomDepsInfo in attr_val: + transitive_deps_list.append(attr_val[SbomDepsInfo].transitive_deps) + external_repos_list.append(attr_val[SbomDepsInfo].external_repos) + external_dep_edges_list.append(attr_val[SbomDepsInfo].external_dep_edges) + + # Handle cc_library specific attributes + cc_dep_attrs = ["hdrs", "textual_hdrs", "implementation_deps"] + for attr_name in cc_dep_attrs: + if hasattr(ctx.rule.attr, attr_name): + attr_val = getattr(ctx.rule.attr, attr_name) + if type(attr_val) == "list": + for dep in attr_val: + if hasattr(dep, "label"): + direct_deps.append(dep.label) + if from_repo and dep.label.workspace_name: + external_dep_edges_direct.append( + "{}::{}".format(from_repo, dep.label.workspace_name), + ) + if SbomDepsInfo in dep: + transitive_deps_list.append(dep[SbomDepsInfo].transitive_deps) + external_repos_list.append(dep[SbomDepsInfo].external_repos) + external_dep_edges_list.append(dep[SbomDepsInfo].external_dep_edges) + + return [SbomDepsInfo( + direct_deps = depset(direct_deps), + transitive_deps = depset( + direct = [label], + transitive = transitive_deps_list, + ), + external_repos = depset( + direct = external_repos_direct, + transitive = external_repos_list, + ), + external_dep_edges = depset( + direct = external_dep_edges_direct, + transitive = external_dep_edges_list, + ), + )] + +sbom_aspect = aspect( + implementation = _sbom_aspect_impl, + attr_aspects = [ + "deps", + "srcs", + "data", + "proc_macro_deps", + "crate_root", + "compile_data", + "hdrs", + "textual_hdrs", + "implementation_deps", + ], + provides = [SbomDepsInfo], + doc = "Traverses target dependencies and collects SBOM-relevant information", +) diff --git a/sbom/internal/generator/BUILD b/sbom/internal/generator/BUILD new file mode 100644 index 0000000..d3b96fc --- /dev/null +++ b/sbom/internal/generator/BUILD @@ -0,0 +1,38 @@ +# SBOM Generator Python package +# +# This package contains the Python tools for generating SBOM files +# in SPDX 2.3 and CycloneDX 1.6 formats. + +load("@rules_python//python:defs.bzl", "py_binary", "py_library") + +package(default_visibility = ["//sbom:__subpackages__"]) + +py_binary( + name = "sbom_generator", + srcs = ["sbom_generator.py"], + main = "sbom_generator.py", + deps = [ + ":cyclonedx_formatter", + ":spdx_formatter", + ], +) + +py_library( + name = "sbom_generator_lib", + srcs = ["sbom_generator.py"], + deps = [ + ":cyclonedx_formatter", + ":spdx_formatter", + ], +) + +py_library( + name = "spdx_formatter", + srcs = ["spdx_formatter.py"], +) + +py_library( + name = "cyclonedx_formatter", + srcs = ["cyclonedx_formatter.py"], +) + diff --git a/sbom/internal/generator/__init__.py b/sbom/internal/generator/__init__.py new file mode 100644 index 0000000..a34c1c3 --- /dev/null +++ b/sbom/internal/generator/__init__.py @@ -0,0 +1 @@ +"""SBOM generator package.""" diff --git a/sbom/internal/generator/cyclonedx_formatter.py b/sbom/internal/generator/cyclonedx_formatter.py new file mode 100644 index 0000000..9c31778 --- /dev/null +++ b/sbom/internal/generator/cyclonedx_formatter.py @@ -0,0 +1,376 @@ +"""CycloneDX 1.6 JSON formatter for SBOM generation. + +This module generates CycloneDX 1.6 compliant JSON output from the component +information collected by the Bazel aspect and module extension. + +CycloneDX 1.6 Specification: https://cyclonedx.org/docs/1.6/json/ +""" + +import re +import uuid +from typing import Any + + +def _normalize_spdx_license(expr: str) -> str: + """Normalize SPDX boolean operators to uppercase as required by the spec. + + dash-license-scan returns lowercase operators (e.g. 'Apache-2.0 or MIT'). + SPDX 2.3 Appendix IV and CycloneDX 1.6 both require uppercase OR/AND/WITH. + Uses space-delimited substitution to avoid modifying license identifiers + that contain 'or'/'and' as substrings (e.g. GPL-2.0-or-later). + """ + expr = re.sub(r" or ", " OR ", expr, flags=re.IGNORECASE) + expr = re.sub(r" and ", " AND ", expr, flags=re.IGNORECASE) + expr = re.sub(r" with ", " WITH ", expr, flags=re.IGNORECASE) + return expr + + +def generate_cyclonedx( + components: list[dict[str, Any]], + config: dict[str, Any], + timestamp: str, + external_dep_edges: list[str] | None = None, +) -> dict[str, Any]: + """Generate CycloneDX 1.6 JSON document. + + Args: + components: List of component dictionaries + config: Configuration dictionary with producer info + timestamp: ISO 8601 timestamp + + Returns: + CycloneDX 1.6 compliant dictionary + """ + component_name = config.get("component_name", "unknown") + component_version = config.get("component_version", "") + producer_name = config.get("producer_name", "Eclipse Foundation") + producer_url = config.get("producer_url", "") + + # Generate serial number (URN UUID) + serial_number = f"urn:uuid:{uuid.uuid4()}" + + cdx_doc: dict[str, Any] = { + "$schema": "https://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "serialNumber": serial_number, + "version": 1, + "metadata": { + "timestamp": timestamp, + "tools": { + "components": [ + { + "type": "application", + "name": "score-sbom-generator", + "description": "Eclipse SCORE SBOM Generator (Bazel-native)", + "publisher": producer_name, + } + ] + }, + "component": { + "type": "application", + "name": component_name, + "version": component_version if component_version else "unversioned", + "bom-ref": _generate_bom_ref(component_name, component_version), + "purl": f"pkg:github/eclipse-score/{component_name}@{component_version}" + if component_version + else None, + "supplier": { + "name": producer_name, + "url": [producer_url] if producer_url else [], + }, + }, + "supplier": { + "name": producer_name, + "url": [producer_url] if producer_url else [], + }, + }, + "components": [], + "dependencies": [], + } + + # Clean up None values from metadata.component + if cdx_doc["metadata"]["component"].get("purl") is None: + del cdx_doc["metadata"]["component"]["purl"] + + # Add authors if provided + authors = config.get("sbom_authors", []) + if authors: + cdx_doc["metadata"]["authors"] = [_author_entry(a) for a in authors] + + # Add generation lifecycle if provided + generation_context = config.get("generation_context", "") + if generation_context: + cdx_doc["metadata"]["lifecycles"] = [{"phase": generation_context}] + + # Add extra tool names if provided + extra_tools = config.get("sbom_tools", []) + if extra_tools: + for tool_name in extra_tools: + cdx_doc["metadata"]["tools"]["components"].append( + { + "type": "application", + "name": tool_name, + } + ) + + # Root component bom-ref for dependencies + root_bom_ref = _generate_bom_ref(component_name, component_version) + + # Add components + dependency_refs = [] + for comp in components: + cdx_component = _create_cdx_component(comp) + cdx_doc["components"].append(cdx_component) + dependency_refs.append(cdx_component["bom-ref"]) + + # Build dependency graph + depends_map: dict[str, set[str]] = {} + if external_dep_edges: + for edge in external_dep_edges: + if "::" not in edge: + continue + src, dst = edge.split("::", 1) + if not src or not dst: + continue + src_ref = _generate_bom_ref(src, _component_version_lookup(components, src)) + dst_ref = _generate_bom_ref(dst, _component_version_lookup(components, dst)) + depends_map.setdefault(src_ref, set()).add(dst_ref) + + # Add root dependency (main component depends on all components) + cdx_doc["dependencies"].append( + { + "ref": root_bom_ref, + "dependsOn": dependency_refs, + } + ) + + # Add each component's dependency entry + for comp in components: + name = comp.get("name", "") + version = comp.get("version", "") + bom_ref = _generate_bom_ref(name, version) + cdx_doc["dependencies"].append( + { + "ref": bom_ref, + "dependsOn": sorted(depends_map.get(bom_ref, set())), + } + ) + + return cdx_doc + + +def _create_cdx_component(component: dict[str, Any]) -> dict[str, Any]: + """Create a CycloneDX component from component data. + + Args: + component: Component dictionary + + Returns: + CycloneDX component dictionary + """ + name = component.get("name", "unknown") + version = component.get("version", "unknown") + purl = component.get("purl", "") + license_id = _normalize_spdx_license(component.get("license", "")) + description = component.get("description", "") + supplier = component.get("supplier", "") + comp_type = component.get("type", "library") + source = component.get("source", "") + url = component.get("url", "") + checksum = component.get("checksum", "") + cpe = component.get("cpe", "") + aliases = component.get("aliases", []) + pedigree_ancestors = component.get("pedigree_ancestors", []) + pedigree_descendants = component.get("pedigree_descendants", []) + pedigree_variants = component.get("pedigree_variants", []) + pedigree_notes = component.get("pedigree_notes", "") + + cdx_comp: dict[str, Any] = { + "type": _map_type_to_cdx_type(comp_type), + "name": name, + "version": version, + "bom-ref": _generate_bom_ref(name, version), + } + + # Add description + if description: + cdx_comp["description"] = description + + # Add PURL + if purl: + cdx_comp["purl"] = purl + + # Add license + if license_id: + if " AND " in license_id or " OR " in license_id: + # Compound SPDX expression must use "expression", not "license.id" + cdx_comp["licenses"] = [{"expression": license_id}] + else: + cdx_comp["licenses"] = [{"license": {"id": license_id}}] + + # Add supplier + if supplier: + cdx_comp["supplier"] = { + "name": supplier, + } + + # Add hashes (SHA-256 from Cargo.lock) + if checksum: + cdx_comp["hashes"] = [ + { + "alg": "SHA-256", + "content": checksum, + } + ] + if cpe: + cdx_comp["cpe"] = cpe + + if aliases: + cdx_comp["properties"] = [ + {"name": "cdx:alias", "value": alias} for alias in aliases + ] + + pedigree = _build_pedigree( + pedigree_ancestors, + pedigree_descendants, + pedigree_variants, + pedigree_notes, + ) + if pedigree: + cdx_comp["pedigree"] = pedigree + + # Add external references + external_refs = [] + + # Add download/source URL + if url: + external_refs.append( + { + "type": "distribution", + "url": url, + } + ) + elif source == "crates.io": + external_refs.append( + { + "type": "distribution", + "url": f"https://crates.io/crates/{name}/{version}", + } + ) + + # Add VCS URL for git sources + if source == "git" and url: + external_refs.append( + { + "type": "vcs", + "url": url, + } + ) + + if external_refs: + cdx_comp["externalReferences"] = external_refs + + return cdx_comp + + +def _map_type_to_cdx_type(comp_type: str) -> str: + """Map component type to CycloneDX component type. + + Args: + comp_type: Component type string + + Returns: + CycloneDX component type string + """ + type_mapping = { + "application": "application", + "library": "library", + "framework": "framework", + "file": "file", + "container": "container", + "firmware": "firmware", + "device": "device", + "data": "data", + "operating-system": "operating-system", + "device-driver": "device-driver", + "machine-learning-model": "machine-learning-model", + "platform": "platform", + } + return type_mapping.get(comp_type, "library") + + +def _generate_bom_ref(name: str, version: str) -> str: + """Generate a unique bom-ref for a component. + + Args: + name: Component name + version: Component version + + Returns: + Unique bom-ref string + """ + # Create a deterministic but unique reference + sanitized_name = _sanitize_name(name) + sanitized_version = _sanitize_name(version) if version else "unknown" + return f"{sanitized_name}@{sanitized_version}" + + +def _sanitize_name(value: str) -> str: + """Sanitize a string for use in bom-ref. + + Args: + value: String to sanitize + + Returns: + Sanitized string + """ + result = [] + for char in value: + if char.isalnum() or char in (".", "-", "_"): + result.append(char) + elif char in (" ", "/"): + result.append("-") + return "".join(result) or "unknown" + + +def _author_entry(value: str) -> dict[str, Any]: + """Create author entry from a string.""" + value = value.strip() + if "<" in value and ">" in value: + name, rest = value.split("<", 1) + email = rest.split(">", 1)[0].strip() + return {"name": name.strip(), "email": email} + return {"name": value} + + +def _build_pedigree( + ancestors: list[str], + descendants: list[str], + variants: list[str], + notes: str, +) -> dict[str, Any] | None: + pedigree: dict[str, Any] = {} + if ancestors: + pedigree["ancestors"] = [_pedigree_ref(a) for a in ancestors] + if descendants: + pedigree["descendants"] = [_pedigree_ref(d) for d in descendants] + if variants: + pedigree["variants"] = [_pedigree_ref(v) for v in variants] + if notes: + pedigree["notes"] = notes + return pedigree or None + + +def _pedigree_ref(value: str) -> dict[str, Any]: + value = value.strip() + if value.startswith("pkg:"): + return {"purl": value} + return {"name": value} + + +def _component_version_lookup(components: list[dict[str, Any]], name: str) -> str: + for comp in components: + if comp.get("name") == name: + return comp.get("version", "") + return "" diff --git a/sbom/internal/generator/sbom_generator.py b/sbom/internal/generator/sbom_generator.py new file mode 100644 index 0000000..8ae3da6 --- /dev/null +++ b/sbom/internal/generator/sbom_generator.py @@ -0,0 +1,868 @@ +#!/usr/bin/env python3 +"""SBOM generator - creates SPDX and CycloneDX output from Bazel aspect data. + +This is the main entry point for SBOM generation. It reads dependency +information collected by the Bazel aspect and metadata from the module +extension, then generates SBOM files in SPDX 2.3 and CycloneDX 1.6 formats. +""" + +import argparse +import json +import re +import sys +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +from sbom.internal.generator.spdx_formatter import generate_spdx +from sbom.internal.generator.cyclonedx_formatter import generate_cyclonedx + + +def parse_module_bazel_files(file_paths: list[str]) -> dict[str, dict[str, str]]: + """Parse MODULE.bazel files to extract module name and version. + + Reads each MODULE.bazel file and extracts the module() call's name and + version fields. This allows automatic version detection for bazel_dep + modules that don't appear in the sbom_metadata extension's module list + (because they don't use_extension for sbom_metadata). + + Args: + file_paths: List of paths to MODULE.bazel files + + Returns: + Dict mapping module name to {"version": ..., "purl": ...} + """ + modules: dict[str, dict[str, str]] = {} + for fpath in file_paths: + try: + with open(fpath, encoding="utf-8") as f: + content = f.read() + except OSError: + continue + + # Extract module(name = "...", version = "...") + module_match = re.search( + r"module\s*\((.*?)\)", + content, + re.DOTALL, + ) + if not module_match: + continue + + module_block = module_match.group(1) + name_match = re.search(r'name\s*=\s*["\']([^"\']+)["\']', module_block) + version_match = re.search(r'version\s*=\s*["\']([^"\']+)["\']', module_block) + + if name_match and version_match: + name = name_match.group(1) + version = version_match.group(1) + modules[name] = { + "version": version, + "purl": f"pkg:generic/{name}@{version}", + } + + return modules + + +def parse_module_lockfiles(file_paths: list[str]) -> dict[str, dict[str, str]]: + """Parse MODULE.bazel.lock files to infer module versions and checksums. + + Uses registry URL keys from lockfiles. Only modules with a single unique + observed version are emitted to avoid ambiguous version selection. + + For modules coming from the Bazel Central Registry, this also extracts the + SHA-256 checksum from the corresponding ``source.json`` entry so that + CycloneDX hashes can be populated for C/C++ dependencies. + """ + # Track all observed versions per module and (optional) sha256 per + # (module, version) tuple. + module_versions: dict[str, set[str]] = {} + module_sha256: dict[tuple[str, str], str] = {} + + for fpath in file_paths: + try: + with open(fpath, encoding="utf-8") as f: + lock_data = json.load(f) + except (OSError, json.JSONDecodeError): + continue + + registry_hashes = lock_data.get("registryFileHashes", {}) + if not isinstance(registry_hashes, dict): + continue + + for url, sha in registry_hashes.items(): + if not isinstance(url, str) or not isinstance(sha, str): + continue + + # MODULE.bazel entry – records which version was selected. + module_match = re.search( + r"/modules/([^/]+)/([^/]+)/MODULE\.bazel$", + url, + ) + if module_match: + module_name, version = module_match.groups() + module_versions.setdefault(module_name, set()).add(version) + + # source.json entry – carries the sha256 of the downloaded source + # tarball for this module@version. Use it as the component hash. + source_match = re.search( + r"/modules/([^/]+)/([^/]+)/source\.json$", + url, + ) + if source_match: + src_module, src_version = source_match.groups() + module_sha256[(src_module, src_version)] = sha + + modules: dict[str, dict[str, str]] = {} + for name, versions in module_versions.items(): + if len(versions) != 1: + # Skip modules with ambiguous versions. + continue + version = next(iter(versions)) + entry: dict[str, str] = { + "version": version, + "purl": f"pkg:generic/{name}@{version}", + } + sha = module_sha256.get((name, version)) + if sha: + # Expose as sha256 so downstream code can turn it into a CycloneDX + # SHA-256 hash entry. + entry["sha256"] = sha + modules[name] = entry + + return modules + + +def load_crates_cache(cache_path: str | None = None) -> dict[str, Any]: + """Load crates metadata cache generated at build time. + + Args: + cache_path: Path to crates_metadata.json (from --crates-cache) + + Returns: + Dict mapping crate name to metadata (license, checksum, etc.) + """ + if not cache_path: + return {} + try: + with open(cache_path, encoding="utf-8") as f: + return json.load(f) + except (OSError, json.JSONDecodeError): + return {} + + + + +# Known licenses for Bazel Central Registry (BCR) C++ modules. +# Used as a fallback when cdxgen and lockfile parsing cannot provide license data. +# Keys are BCR module names (exact or prefix for sub-modules like boost.*). +BCR_KNOWN_LICENSES: dict[str, dict[str, str]] = { + "boost": {"license": "BSL-1.0", "supplier": "Boost.org"}, + "abseil-cpp": {"license": "Apache-2.0", "supplier": "Google LLC"}, + "zlib": {"license": "Zlib", "supplier": "Jean-loup Gailly and Mark Adler"}, + "nlohmann_json": {"license": "MIT", "supplier": "Niels Lohmann"}, + "nlohmann-json": {"license": "MIT", "supplier": "Niels Lohmann"}, + "googletest": {"license": "BSD-3-Clause", "supplier": "Google LLC"}, + "google-benchmark": {"license": "Apache-2.0", "supplier": "Google LLC"}, + "flatbuffers": {"license": "Apache-2.0", "supplier": "Google LLC"}, + "protobuf": {"license": "BSD-3-Clause", "supplier": "Google LLC"}, + "re2": {"license": "BSD-3-Clause", "supplier": "Google LLC"}, + "openssl": {"license": "Apache-2.0", "supplier": "OpenSSL Software Foundation"}, + "curl": {"license": "curl", "supplier": "Daniel Stenberg"}, + "libpng": {"license": "libpng", "supplier": "Glenn Randers-Pehrson"}, + "libjpeg": {"license": "IJG", "supplier": "Independent JPEG Group"}, +} + + +def apply_known_licenses(metadata: dict[str, Any]) -> None: + """Apply BCR known licenses and user license overrides to modules. + + Priority (highest to lowest): + 1. Module already has a license (skip). + 2. Exact match in metadata["licenses"] (user-declared via sbom_ext.license). + 3. Parent match in metadata["licenses"] (e.g., "boost" covers "boost.config"). + 4. BCR_KNOWN_LICENSES exact match. + 5. BCR_KNOWN_LICENSES parent match (e.g., "boost" entry covers "boost.config"). + + Args: + metadata: Metadata dict with "modules" and "licenses" keys. Modified in place. + """ + modules = metadata.get("modules", {}) + licenses = metadata.get("licenses", {}) + + for module_name, module_data in modules.items(): + if module_data.get("license"): + continue # Already has a license — do not overwrite + + license_source: dict[str, str] | None = None + + # 1. Exact match in user-declared licenses (highest priority) + if module_name in licenses: + license_source = licenses[module_name] + # 2. Parent match in user-declared licenses (e.g. "boost" → "boost.config") + elif "." in module_name: + parent = module_name.split(".")[0] + if parent in licenses: + license_source = licenses[parent] + + # 3. BCR known licenses — exact match + if license_source is None and module_name in BCR_KNOWN_LICENSES: + license_source = BCR_KNOWN_LICENSES[module_name] + # 4. BCR known licenses — parent prefix match (e.g. boost.config → boost) + if license_source is None and "." in module_name: + parent = module_name.split(".")[0] + if parent in BCR_KNOWN_LICENSES: + license_source = BCR_KNOWN_LICENSES[parent] + + if license_source: + module_data["license"] = license_source["license"] + if not module_data.get("supplier") and license_source.get("supplier"): + module_data["supplier"] = license_source["supplier"] + + +def normalize_name(name: str) -> str: + """Normalize a dependency name for fuzzy matching. + + Handles naming differences between Bazel repos and C++ metadata cache: + e.g. nlohmann_json vs nlohmann-json, libfmt vs fmt. + + Args: + name: Dependency name to normalize + + Returns: + Normalized name string for comparison + """ + n = name.lower().strip() + for prefix in ("lib", "lib_"): + if n.startswith(prefix) and len(n) > len(prefix): + n = n[len(prefix) :] + n = n.replace("-", "").replace("_", "").replace(".", "") + return n + + +def enrich_components_from_cpp_cache( + components: list[dict[str, Any]], + cpp_components: list[dict[str, Any]], + metadata: dict[str, Any], +) -> list[dict[str, Any]]: + """Enrich Bazel-discovered components with C++ metadata cache. + + For each Bazel component, finds a matching C++ cache entry by normalized + name and fills in missing fields (license, supplier, version, purl). + Components not present in Bazel's discovered dependency graph are ignored. + + Args: + components: Bazel-discovered components to enrich + cpp_components: Components from C++ metadata cache + metadata: Metadata dict + + Returns: + Enriched list of components + """ + # Build lookup: normalized_name -> cache component + cpp_by_name: dict[str, dict[str, Any]] = {} + for cc in cpp_components: + norm = normalize_name(cc["name"]) + cpp_by_name[norm] = cc + cpp_by_name[cc["name"].lower()] = cc + + for comp in components: + comp_name = comp.get("name", "") + norm_name = normalize_name(comp_name) + + cpp_match = cpp_by_name.get(norm_name) or cpp_by_name.get(comp_name.lower()) + # Try parent name match (e.g., boost.config+ -> boost) + if not cpp_match: + base_name = comp_name.rstrip("+") + if "." in base_name: + parent = base_name.split(".")[0] + cpp_match = cpp_by_name.get(normalize_name(parent)) + if not cpp_match: + continue + + # Enrich missing fields only + if not comp.get("license") and cpp_match.get("license"): + comp["license"] = cpp_match["license"] + + if not comp.get("description") and cpp_match.get("description"): + comp["description"] = cpp_match["description"] + + if not comp.get("supplier") and cpp_match.get("supplier"): + comp["supplier"] = cpp_match["supplier"] + + if comp.get("version") in ("unknown", "") and cpp_match.get("version") not in ( + "unknown", + "", + ): + comp["version"] = cpp_match["version"] + + if comp.get("purl", "").endswith("@unknown") and cpp_match.get("purl"): + comp["purl"] = cpp_match["purl"] + + if not comp.get("url") and cpp_match.get("url"): + comp["url"] = cpp_match["url"] + + if not comp.get("checksum") and cpp_match.get("checksum"): + comp["checksum"] = cpp_match["checksum"] + + return components + + +def load_cdxgen_sbom(cdxgen_path: str) -> list[dict[str, Any]]: + """Load and convert cdxgen CycloneDX SBOM to component list. + + Args: + cdxgen_path: Path to cdxgen-generated CycloneDX JSON file + + Returns: + List of component dicts in internal format + """ + try: + with open(cdxgen_path, encoding="utf-8") as f: + cdx_data = json.load(f) + except (OSError, json.JSONDecodeError): + return [] + + components: list[dict[str, Any]] = [] + for comp in cdx_data.get("components", []): + # Extract license information + licenses = comp.get("licenses", []) + license_str = "" + if licenses: + # Take first license + lic = licenses[0] + if isinstance(lic, dict): + license_str = ( + lic.get("expression", "") + or lic.get("license", {}).get("id", "") + or lic.get("license", {}).get("name", "") + ) + + # Extract purl + purl = comp.get("purl", "") + + # Extract SHA-256 hash if present + checksum = "" + for h in comp.get("hashes", []): + if not isinstance(h, dict): + continue + if h.get("alg") == "SHA-256" and h.get("content"): + checksum = str(h["content"]) + break + + # Build component + component = { + "name": comp.get("name", ""), + "version": comp.get("version", "unknown"), + "purl": purl, + "type": comp.get("type", "library"), + "license": license_str, + "description": comp.get("description", ""), + "supplier": comp.get("supplier", {}).get("name", "") + if isinstance(comp.get("supplier"), dict) + else "", + "cpe": comp.get("cpe", ""), + "url": "", + "checksum": checksum, + } + + # Add component if it has a name + if component["name"]: + components.append(component) + + return components + + +def mark_missing_cpp_descriptions(components: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Mark missing descriptions for non-Rust libraries as 'Missing'.""" + for comp in components: + if comp.get("description"): + continue + if comp.get("type") != "library": + continue + purl = comp.get("purl", "") + if purl.startswith("pkg:cargo/"): + continue + comp["description"] = "Missing" + return components + + +def main() -> int: + """Main entry point for SBOM generation.""" + parser = argparse.ArgumentParser(description="Generate SBOM from Bazel deps") + parser.add_argument("--input", required=True, help="Input JSON from Bazel rule") + parser.add_argument( + "--metadata", required=True, help="Metadata JSON from module extension" + ) + parser.add_argument("--spdx-output", help="SPDX 2.3 JSON output file") + parser.add_argument("--cyclonedx-output", help="CycloneDX 1.6 output file") + parser.add_argument("--crates-cache", help="Path to crates_metadata.json override") + parser.add_argument( + "--cdxgen-sbom", + help="Path to cdxgen-generated CycloneDX JSON for C++ enrichment", + ) + args = parser.parse_args() + + # Load dependency data from Bazel + with open(args.input, encoding="utf-8") as f: + data = json.load(f) + + # Load metadata from module extension + with open(args.metadata, encoding="utf-8") as f: + metadata = json.load(f) + + # Parse MODULE.bazel files from dependency modules for version extraction + # This fills in versions for bazel_dep modules that don't use the sbom_metadata extension + dep_module_files = data.get("dep_module_files", []) + if dep_module_files: + dep_modules = parse_module_bazel_files(dep_module_files) + if "modules" not in metadata: + metadata["modules"] = {} + for name, mod_data in dep_modules.items(): + # Don't override entries already in metadata (from the extension) + if name not in metadata["modules"]: + metadata["modules"][name] = mod_data + + # Parse MODULE.bazel.lock files to infer selected module versions. + # This helps for modules that don't participate in the sbom_metadata + # extension (for example, transitive Bazel modules like boost.*). + module_lockfiles = data.get("module_lockfiles", []) + if module_lockfiles: + lock_modules = parse_module_lockfiles(module_lockfiles) + if "modules" not in metadata: + metadata["modules"] = {} + for name, mod_data in lock_modules.items(): + if name not in metadata["modules"]: + metadata["modules"][name] = mod_data + + # Load crates metadata cache (licenses + checksums + versions) + crates_cache = load_crates_cache(args.crates_cache) + + # Add crates cache to metadata + if crates_cache: + if "crates" not in metadata: + metadata["crates"] = {} + for name, cache_data in crates_cache.items(): + metadata["crates"].setdefault(name, cache_data) + + # Apply BCR known licenses and user overrides to modules + apply_known_licenses(metadata) + + # Load cdxgen SBOM if provided (C++ dependency enrichment) + cpp_components = [] + if args.cdxgen_sbom: + cpp_components = load_cdxgen_sbom(args.cdxgen_sbom) + + # Filter external repos (exclude build tools) + external_repos = data.get("external_repos", []) + exclude_patterns = data.get("exclude_patterns", []) + filtered_repos = filter_repos(external_repos, exclude_patterns) + + # Build component list with metadata + components = [] + + for repo in filtered_repos: + component = resolve_component(repo, metadata) + if component: + components.append(component) + + # Deduplicate components by name + components = deduplicate_components(components) + + # Enrich components with C++ metadata cache + if cpp_components: + components = enrich_components_from_cpp_cache( + components, cpp_components, metadata + ) + components = deduplicate_components(components) + + # Mark missing C++ descriptions explicitly when cdxgen has no description. + components = mark_missing_cpp_descriptions(components) + + # Generate timestamp in SPDX-compliant format (YYYY-MM-DDTHH:MM:SSZ) + timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + + # Get configuration + config = data.get("config", {}) + + # Auto-detect component_version from metadata if not explicitly set + component_name = config.get("component_name", "") + if not config.get("component_version") and component_name: + modules = metadata.get("modules", {}) + if component_name in modules: + config["component_version"] = modules[component_name].get("version", "") + + # Filter out the main component from the dependency list to avoid self-dependency + # (e.g., sbom for score_kyron should not list score_kyron as its own dependency) + if component_name: + components = [ + c for c in components if c.get("name") != component_name + ] + + # Generate outputs + if args.spdx_output: + spdx = generate_spdx(components, config, timestamp) + Path(args.spdx_output).write_text(json.dumps(spdx, indent=2), encoding="utf-8") + + if args.cyclonedx_output: + cdx = generate_cyclonedx( + components, + config, + timestamp, + external_dep_edges=data.get("external_dep_edges", []), + ) + Path(args.cyclonedx_output).write_text( + json.dumps(cdx, indent=2), encoding="utf-8" + ) + + return 0 + + +def filter_repos(repos: list[str], exclude_patterns: list[str]) -> list[str]: + """Filter out build tool repositories based on exclude patterns. + + Crates from crate_universe are always kept even if they match exclude patterns, + since they are legitimate dependencies, not build tools. + + Args: + repos: List of repository names + exclude_patterns: Patterns to exclude + + Returns: + Filtered list of repository names + """ + filtered = [] + for repo in repos: + # Always keep crates from crate_universe - these are real dependencies + if "crate_index__" in repo or "crates_io__" in repo or "_crates__" in repo: + filtered.append(repo) + continue + + should_exclude = False + for pattern in exclude_patterns: + if pattern in repo: + should_exclude = True + break + if not should_exclude: + filtered.append(repo) + return filtered + + +def resolve_component( + repo_name: str, metadata: dict[str, Any] +) -> dict[str, Any] | None: + """Resolve repository to component with version and PURL. + + Args: + repo_name: Name of the repository + metadata: Metadata dictionary from module extension + + Returns: + Component dictionary or None if not resolved + """ + # Normalize repo name - bzlmod adds "+" suffix to module repos + normalized_name = repo_name.rstrip("+") + + # Check if it's a bazel_dep module + modules = metadata.get("modules", {}) + if normalized_name in modules: + mod = modules[normalized_name] + result: dict[str, Any] = { + "name": normalized_name, + "version": mod.get("version", "unknown"), + "purl": mod.get("purl", f"pkg:generic/{normalized_name}@unknown"), + "type": "library", + "supplier": mod.get("supplier", ""), + "license": mod.get("license", ""), + "cpe": mod.get("cpe", ""), + "aliases": mod.get("aliases", []), + "pedigree_ancestors": mod.get("pedigree_ancestors", []), + "pedigree_descendants": mod.get("pedigree_descendants", []), + "pedigree_variants": mod.get("pedigree_variants", []), + "pedigree_notes": mod.get("pedigree_notes", ""), + } + # MODULE.bazel.lock can provide a sha256 via source.json; expose it as + # checksum so CycloneDX hashes are populated for C/C++ modules. + if mod.get("sha256"): + result["checksum"] = mod["sha256"] + return result + + # Check if it's an http_archive dependency + http_archives = metadata.get("http_archives", {}) + if normalized_name in http_archives: + archive = http_archives[normalized_name] + result = { + "name": normalized_name, + "version": archive.get("version", "unknown"), + "purl": archive.get("purl", f"pkg:generic/{normalized_name}@unknown"), + "type": "library", + "url": archive.get("url", ""), + "license": archive.get("license", ""), + "supplier": archive.get("supplier", ""), + "cpe": archive.get("cpe", ""), + "aliases": archive.get("aliases", []), + "pedigree_ancestors": archive.get("pedigree_ancestors", []), + "pedigree_descendants": archive.get("pedigree_descendants", []), + "pedigree_variants": archive.get("pedigree_variants", []), + "pedigree_notes": archive.get("pedigree_notes", ""), + } + if archive.get("sha256"): + result["checksum"] = archive["sha256"] + return result + + # Check if it's a git_repository dependency + git_repos = metadata.get("git_repositories", {}) + if normalized_name in git_repos: + repo = git_repos[normalized_name] + result = { + "name": normalized_name, + "version": repo.get("version", "unknown"), + "purl": repo.get("purl", f"pkg:generic/{normalized_name}@unknown"), + "type": "library", + "url": repo.get("remote", ""), + "license": repo.get("license", ""), + "supplier": repo.get("supplier", ""), + "cpe": repo.get("cpe", ""), + "aliases": repo.get("aliases", []), + "pedigree_ancestors": repo.get("pedigree_ancestors", []), + "pedigree_descendants": repo.get("pedigree_descendants", []), + "pedigree_variants": repo.get("pedigree_variants", []), + "pedigree_notes": repo.get("pedigree_notes", ""), + } + commit_date = repo.get("commit_date", "") + if result.get("version") in ("unknown", "") and commit_date: + result["version"] = commit_date + return result + + # Check if it's a crate from the metadata cache + # Cargo.lock uses underscores, Bazel uses hyphens — try both + crates = metadata.get("crates", {}) + crate_key = ( + normalized_name + if normalized_name in crates + else normalized_name.replace("-", "_") + ) + if crate_key in crates: + crate = crates[crate_key] + result = { + "name": normalized_name, + "version": crate.get("version", "unknown"), + "purl": crate.get("purl", f"pkg:cargo/{normalized_name}@unknown"), + "type": "library", + "source": "crates.io", + "license": crate.get("license", ""), + "description": crate.get("description", ""), + "supplier": crate.get("supplier", ""), + "cpe": crate.get("cpe", ""), + "aliases": crate.get("aliases", []), + "pedigree_ancestors": crate.get("pedigree_ancestors", []), + "pedigree_descendants": crate.get("pedigree_descendants", []), + "pedigree_variants": crate.get("pedigree_variants", []), + "pedigree_notes": crate.get("pedigree_notes", ""), + } + if crate.get("checksum"): + result["checksum"] = crate["checksum"] + return result + + # Handle score_ prefixed repos that might be modules + if normalized_name.startswith("score_"): + return { + "name": normalized_name, + "version": "unknown", + "purl": f"pkg:github/eclipse-score/{normalized_name}@unknown", + "type": "library", + "supplier": "Eclipse Foundation", + "license": "", + "cpe": "", + "aliases": [], + "pedigree_ancestors": [], + "pedigree_descendants": [], + "pedigree_variants": [], + "pedigree_notes": "", + } + + # Handle crate universe repos - bzlmod format + # e.g., rules_rust++crate+crate_index__serde-1.0.228 + # e.g., rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0 + cached_crates = metadata.get("crates", {}) + + if "crate_index__" in repo_name or "crate+" in repo_name: + # Extract the crate info part after crate_index__ + if "crate_index__" in repo_name: + crate_part = repo_name.split("crate_index__")[-1] + else: + crate_part = repo_name.split("+")[-1] + + # Parse name-version format (e.g., "serde-1.0.228") + # Handle complex names like "iceoryx2-qnx8-0.7.0" where last part is version + parts = crate_part.split("-") + if len(parts) >= 2: + # Find the version part (starts with a digit) + version_idx = -1 + for i, part in enumerate(parts): + if part and part[0].isdigit(): + version_idx = i + break + + if version_idx > 0: + crate_name = "-".join(parts[:version_idx]).replace("_", "-") + version = "-".join(parts[version_idx:]) + + # Look up crate metadata from cache + # Cargo.lock uses underscores, Bazel uses hyphens — try both + crate_meta = cached_crates.get(crate_name) or cached_crates.get( + crate_name.replace("-", "_"), {} + ) + + result = { + "name": crate_name, + "version": version, + "purl": f"pkg:cargo/{crate_name}@{version}", + "type": "library", + "source": "crates.io", + } + if crate_meta.get("license"): + result["license"] = crate_meta["license"] + if crate_meta.get("description"): + result["description"] = crate_meta["description"] + if crate_meta.get("supplier"): + result["supplier"] = crate_meta["supplier"] + if crate_meta.get("cpe"): + result["cpe"] = crate_meta["cpe"] + if crate_meta.get("aliases"): + result["aliases"] = crate_meta["aliases"] + if crate_meta.get("pedigree_ancestors"): + result["pedigree_ancestors"] = crate_meta["pedigree_ancestors"] + if crate_meta.get("pedigree_descendants"): + result["pedigree_descendants"] = crate_meta["pedigree_descendants"] + if crate_meta.get("pedigree_variants"): + result["pedigree_variants"] = crate_meta["pedigree_variants"] + if crate_meta.get("pedigree_notes"): + result["pedigree_notes"] = crate_meta["pedigree_notes"] + if crate_meta.get("repository"): + result["url"] = crate_meta["repository"] + if crate_meta.get("checksum"): + result["checksum"] = crate_meta["checksum"] + return result + + # Handle legacy crate universe format (e.g., crates_io__tokio-1.10.0) + if repo_name.startswith("crates_io__") or "_crates__" in repo_name: + parts = repo_name.split("__") + if len(parts) >= 2: + crate_info = parts[-1] + # Try to split by last hyphen to get name-version + last_hyphen = crate_info.rfind("-") + if last_hyphen > 0: + crate_name = crate_info[:last_hyphen].replace("_", "-") + version = crate_info[last_hyphen + 1 :] + + # Look up crate metadata from cache + # Cargo.lock uses underscores, Bazel uses hyphens — try both + crate_meta = cached_crates.get(crate_name) or cached_crates.get( + crate_name.replace("-", "_"), {} + ) + + result = { + "name": crate_name, + "version": version, + "purl": f"pkg:cargo/{crate_name}@{version}", + "type": "library", + "source": "crates.io", + } + if crate_meta.get("license"): + result["license"] = crate_meta["license"] + if crate_meta.get("description"): + result["description"] = crate_meta["description"] + if crate_meta.get("supplier"): + result["supplier"] = crate_meta["supplier"] + if crate_meta.get("cpe"): + result["cpe"] = crate_meta["cpe"] + if crate_meta.get("aliases"): + result["aliases"] = crate_meta["aliases"] + if crate_meta.get("pedigree_ancestors"): + result["pedigree_ancestors"] = crate_meta["pedigree_ancestors"] + if crate_meta.get("pedigree_descendants"): + result["pedigree_descendants"] = crate_meta["pedigree_descendants"] + if crate_meta.get("pedigree_variants"): + result["pedigree_variants"] = crate_meta["pedigree_variants"] + if crate_meta.get("pedigree_notes"): + result["pedigree_notes"] = crate_meta["pedigree_notes"] + if crate_meta.get("repository"): + result["url"] = crate_meta["repository"] + if crate_meta.get("checksum"): + result["checksum"] = crate_meta["checksum"] + return result + + # Check if repo is a sub-library of a known parent (e.g., boost.config+ -> boost) + # rules_boost splits Boost into individual repos like boost.config+, boost.assert+, etc. + if "." in normalized_name: + parent_name = normalized_name.split(".")[0].rstrip("+") + # Look up parent in all metadata sources (modules, http_archives, git_repos, licenses) + licenses = metadata.get("licenses", {}) + parent = None + if parent_name in modules: + parent = modules[parent_name] + elif parent_name in http_archives: + parent = http_archives[parent_name] + elif parent_name in git_repos: + parent = git_repos[parent_name] + elif parent_name in licenses: + parent = licenses[parent_name] + if parent: + parent_version = parent.get("version", "unknown") + result: dict[str, Any] = { + "name": normalized_name, + "version": parent_version, + "purl": f"pkg:generic/{normalized_name}@{parent_version}", + "type": "library", + "license": parent.get("license", ""), + "supplier": parent.get("supplier", ""), + } + # Propagate checksum from parent if available (e.g., http_archive + # sha256 or module sha256 from MODULE.bazel.lock). + if parent.get("sha256"): + result["checksum"] = parent["sha256"] + elif parent.get("checksum"): + result["checksum"] = parent["checksum"] + return result + + # Unknown repository - return with unknown version + return { + "name": repo_name, + "version": "unknown", + "purl": f"pkg:generic/{repo_name}@unknown", + "type": "library", + } + + +def deduplicate_components(components: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Remove duplicate components, keeping the one with most metadata. + + Args: + components: List of component dictionaries + + Returns: + Deduplicated list of components + """ + seen: dict[str, dict[str, Any]] = {} + for comp in components: + name = comp.get("name", "") + if name not in seen: + seen[name] = comp + else: + # Keep the one with more information (non-unknown version preferred) + existing = seen[name] + if ( + existing.get("version") == "unknown" + and comp.get("version") != "unknown" + ): + seen[name] = comp + elif comp.get("license") and not existing.get("license"): + # Prefer component with license info + seen[name] = comp + + return list(seen.values()) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/sbom/internal/generator/spdx_formatter.py b/sbom/internal/generator/spdx_formatter.py new file mode 100644 index 0000000..6ad8af4 --- /dev/null +++ b/sbom/internal/generator/spdx_formatter.py @@ -0,0 +1,242 @@ +"""SPDX 2.3 JSON formatter for SBOM generation. + +This module generates SPDX 2.3 compliant JSON output from the component +information collected by the Bazel aspect and module extension. + +SPDX 2.3 Specification: https://spdx.github.io/spdx-spec/v2.3/ +""" + +import re +import uuid +from typing import Any + + +def _normalize_spdx_license(expr: str) -> str: + """Normalize SPDX boolean operators to uppercase as required by the spec. + + dash-license-scan returns lowercase operators (e.g. 'Apache-2.0 or MIT'). + SPDX 2.3 requires uppercase OR/AND/WITH (Appendix IV). + Uses space-delimited substitution to avoid modifying license identifiers + that contain 'or'/'and' as substrings (e.g. GPL-2.0-or-later). + """ + expr = re.sub(r" or ", " OR ", expr, flags=re.IGNORECASE) + expr = re.sub(r" and ", " AND ", expr, flags=re.IGNORECASE) + expr = re.sub(r" with ", " WITH ", expr, flags=re.IGNORECASE) + return expr + + +def generate_spdx( + components: list[dict[str, Any]], + config: dict[str, Any], + timestamp: str, +) -> dict[str, Any]: + """Generate SPDX 2.3 JSON document. + + Args: + components: List of component dictionaries + config: Configuration dictionary with producer info + timestamp: ISO 8601 timestamp + + Returns: + SPDX 2.3 compliant dictionary + """ + + namespace = config.get("namespace", "https://eclipse.dev/score") + component_name = config.get("component_name", "unknown") + component_version = config.get("component_version", "") + producer_name = config.get("producer_name", "Eclipse Foundation") + + doc_uuid = uuid.uuid4() + + packages: list[dict[str, Any]] = [] + relationships: list[dict[str, Any]] = [] + + # Root package + root_spdx_id = "SPDXRef-RootPackage" + root_package: dict[str, Any] = { + "SPDXID": root_spdx_id, + "name": component_name, + "versionInfo": component_version if component_version else "unversioned", + "downloadLocation": "https://github.com/eclipse-score", + "supplier": f"Organization: {producer_name}", + "primaryPackagePurpose": "APPLICATION", + "filesAnalyzed": False, + "licenseConcluded": "NOASSERTION", + "licenseDeclared": "NOASSERTION", + "copyrightText": "NOASSERTION", + } + packages.append(root_package) + + # DESCRIBES relationship + relationships.append( + { + "spdxElementId": "SPDXRef-DOCUMENT", + "relationshipType": "DESCRIBES", + "relatedSpdxElement": root_spdx_id, + } + ) + + # Add dependency packages + for comp in components: + pkg, spdx_id = _create_spdx_package(comp) + packages.append(pkg) + + # Root depends on each component + relationships.append( + { + "spdxElementId": root_spdx_id, + "relationshipType": "DEPENDS_ON", + "relatedSpdxElement": spdx_id, + } + ) + + # Collect LicenseRef-* identifiers used in packages and declare them + extracted = _collect_extracted_license_infos(packages) + + doc: dict[str, Any] = { + "spdxVersion": "SPDX-2.3", + "dataLicense": "CC0-1.0", + "SPDXID": "SPDXRef-DOCUMENT", + "name": f"SBOM for {component_name}", + "documentNamespace": f"{namespace}/spdx/{_sanitize_id(component_name)}-{doc_uuid}", + "creationInfo": { + "created": timestamp, + "creators": [ + f"Organization: {producer_name}", + "Tool: score-sbom-generator", + ], + }, + "packages": packages, + "relationships": relationships, + } + + if extracted: + doc["hasExtractedLicensingInfos"] = extracted + + return doc + + +def _create_spdx_package( + component: dict[str, Any], +) -> tuple[dict[str, Any], str]: + """Create an SPDX 2.3 Package for a component. + + Args: + component: Component dictionary + + Returns: + Tuple of (SPDX Package dictionary, spdx_id string) + """ + name = component.get("name", "unknown") + version = component.get("version", "unknown") + purl = component.get("purl", "") + license_id = _normalize_spdx_license(component.get("license", "")) + description = component.get("description", "") + supplier = component.get("supplier", "") + comp_type = component.get("type", "library") + checksum = component.get("checksum", "") + + spdx_id = f"SPDXRef-{_sanitize_id(name)}-{_sanitize_id(version)}" + + # Determine download location + url = component.get("url", "") + source = component.get("source", "") + if url: + download_location = url + elif source == "crates.io": + download_location = f"https://crates.io/crates/{name}/{version}" + else: + download_location = "NOASSERTION" + + package: dict[str, Any] = { + "SPDXID": spdx_id, + "name": name, + "versionInfo": version, + "downloadLocation": download_location, + "primaryPackagePurpose": _map_type_to_purpose(comp_type), + "filesAnalyzed": False, + "licenseConcluded": license_id if license_id else "NOASSERTION", + "licenseDeclared": license_id if license_id else "NOASSERTION", + "copyrightText": "NOASSERTION", + } + + if checksum: + package["checksums"] = [{"algorithm": "SHA256", "checksumValue": checksum}] + + if description: + package["description"] = description + + if supplier: + package["supplier"] = f"Organization: {supplier}" + + # Add PURL as external reference + if purl: + package["externalRefs"] = [ + { + "referenceCategory": "PACKAGE-MANAGER", + "referenceType": "purl", + "referenceLocator": purl, + }, + ] + + return package, spdx_id + + +def _map_type_to_purpose(comp_type: str) -> str: + """Map component type to SPDX 2.3 primary package purpose.""" + type_mapping = { + "application": "APPLICATION", + "library": "LIBRARY", + "framework": "FRAMEWORK", + "file": "FILE", + "container": "CONTAINER", + "firmware": "FIRMWARE", + "device": "DEVICE", + "data": "DATA", + } + return type_mapping.get(comp_type, "LIBRARY") + + +def _collect_extracted_license_infos( + packages: list[dict[str, Any]], +) -> list[dict[str, str]]: + """Collect LicenseRef-* identifiers from packages and build declarations. + + SPDX requires every LicenseRef-* used in license expressions to be + declared in hasExtractedLicensingInfos. + + Args: + packages: List of SPDX package dicts + + Returns: + List of extractedLicensingInfo entries + """ + license_refs: set[str] = set() + pattern = re.compile(r"LicenseRef-[A-Za-z0-9\-.]+") + + for pkg in packages: + for field in ("licenseConcluded", "licenseDeclared"): + value = pkg.get(field, "") + license_refs.update(pattern.findall(value)) + + return [ + { + "licenseId": ref, + "extractedText": f"See package metadata for license details ({ref})", + } + for ref in sorted(license_refs) + ] + + +def _sanitize_id(value: str) -> str: + """Sanitize a string for use in SPDX IDs. + + SPDX 2.3 IDs must match [a-zA-Z0-9.-]+ + """ + result = [] + for char in value: + if char.isalnum() or char in (".", "-"): + result.append(char) + elif char in ("_", " ", "/", "@"): + result.append("-") + return "".join(result) or "unknown" diff --git a/sbom/internal/metadata_rule.bzl b/sbom/internal/metadata_rule.bzl new file mode 100644 index 0000000..7d3ffc3 --- /dev/null +++ b/sbom/internal/metadata_rule.bzl @@ -0,0 +1,49 @@ +"""Rule to expose SBOM metadata collected by the module extension. + +This rule wraps the metadata JSON file generated by the module extension +and makes it available for the SBOM generation action. +""" + +load(":providers.bzl", "SbomMetadataInfo") + +def _sbom_metadata_rule_impl(ctx): + """Implementation of sbom_metadata_rule. + + The metadata is passed as a JSON file to the SBOM generator action, + rather than being parsed at analysis time. + + Args: + ctx: The rule context + + Returns: + A list of providers including SbomMetadataInfo with file reference + """ + metadata_file = ctx.file.metadata_json + + # We can't read files at analysis time in Bazel rules, so we pass + # the file reference and let the generator read it at execution time. + # The SbomMetadataInfo provider carries empty dicts here - the actual + # metadata is read by the Python generator from the JSON file. + return [ + DefaultInfo(files = depset([metadata_file])), + SbomMetadataInfo( + modules = {}, + crates = {}, + http_archives = {}, + ), + # Also provide the file itself for the rule to use + OutputGroupInfo(metadata_file = depset([metadata_file])), + ] + +sbom_metadata_rule = rule( + implementation = _sbom_metadata_rule_impl, + attrs = { + "metadata_json": attr.label( + mandatory = True, + allow_single_file = [".json"], + doc = "JSON file containing SBOM metadata", + ), + }, + provides = [SbomMetadataInfo], + doc = "Exposes SBOM metadata collected by the module extension", +) diff --git a/sbom/internal/providers.bzl b/sbom/internal/providers.bzl new file mode 100644 index 0000000..e2d909a --- /dev/null +++ b/sbom/internal/providers.bzl @@ -0,0 +1,28 @@ +"""Providers for SBOM data propagation. + +This module defines the providers used to pass SBOM-related information +between different phases of the build: +- SbomDepsInfo: Collected by aspect - deps of a specific target +- SbomMetadataInfo: Collected by extension - metadata for all modules +""" + +# Collected by aspect - deps of a specific target +SbomDepsInfo = provider( + doc = "Transitive dependency information for SBOM generation", + fields = { + "direct_deps": "depset of direct dependency labels", + "transitive_deps": "depset of all transitive dependency labels", + "external_repos": "depset of external repository names used", + "external_dep_edges": "depset of external repo dependency edges (from::to)", + }, +) + +# Collected by extension - metadata for all modules +SbomMetadataInfo = provider( + doc = "Metadata about all available modules/crates", + fields = { + "modules": "dict of module_name -> {version, commit, registry, purl}", + "crates": "dict of crate_name -> {version, checksum, purl}", + "http_archives": "dict of repo_name -> {url, version, sha256, purl}", + }, +) diff --git a/sbom/internal/rules.bzl b/sbom/internal/rules.bzl new file mode 100644 index 0000000..80918d8 --- /dev/null +++ b/sbom/internal/rules.bzl @@ -0,0 +1,286 @@ +"""SBOM generation rule implementation. + +This module contains the main _sbom_impl rule that combines data from +the aspect (target dependencies) with metadata from the module extension +to generate SPDX and CycloneDX format SBOMs. +""" + +load(":aspect.bzl", "sbom_aspect") +load(":providers.bzl", "SbomDepsInfo") + +def _sbom_impl(ctx): + """Generates SBOM by combining aspect data with extension metadata. + + Args: + ctx: The rule context + + Returns: + DefaultInfo with generated SBOM files + """ + + # Collect all external repos used by targets + all_external_repos = depset(transitive = [ + target[SbomDepsInfo].external_repos + for target in ctx.attr.targets + ]) + + # Collect all transitive deps + all_transitive_deps = depset(transitive = [ + target[SbomDepsInfo].transitive_deps + for target in ctx.attr.targets + ]) + + # Collect external dependency edges + all_external_dep_edges = depset(transitive = [ + target[SbomDepsInfo].external_dep_edges + for target in ctx.attr.targets + ]) + + # Get the metadata JSON file from the extension + metadata_file = ctx.file.metadata_json + + # Create input file with dependency info for Python generator + deps_json = ctx.actions.declare_file(ctx.attr.name + "_deps.json") + + # Build target labels list + target_labels = [str(t.label) for t in ctx.attr.targets] + + # Infer scan root for cdxgen: + # - If all targets come from the same external repo, scan that repo tree. + # - Otherwise scan the current execroot. + target_repos = [] + for t in ctx.attr.targets: + repo = t.label.workspace_name + if repo and repo not in target_repos: + target_repos.append(repo) + cdxgen_scan_root = "." + if len(target_repos) == 1: + cdxgen_scan_root = "external/{}".format(target_repos[0]) + + # Build exclude patterns list + exclude_patterns = ctx.attr.exclude_patterns + + # Collect MODULE.bazel files from dependency modules for version extraction + dep_module_paths = [f.path for f in ctx.files.dep_module_files] + module_lock_paths = [f.path for f in ctx.files.module_lockfiles] + + deps_data = { + "external_repos": all_external_repos.to_list(), + "transitive_deps": [str(d) for d in all_transitive_deps.to_list()], + "external_dep_edges": all_external_dep_edges.to_list(), + "target_labels": target_labels, + "exclude_patterns": exclude_patterns, + "dep_module_files": dep_module_paths, + "module_lockfiles": module_lock_paths, + "config": { + "producer_name": ctx.attr.producer_name, + "producer_url": ctx.attr.producer_url, + "component_name": ctx.attr.component_name if ctx.attr.component_name else ctx.attr.name, + "component_version": ctx.attr.component_version, + "namespace": ctx.attr.namespace, + "sbom_authors": ctx.attr.sbom_authors, + "generation_context": ctx.attr.generation_context, + "sbom_tools": ctx.attr.sbom_tools, + }, + } + + ctx.actions.write( + output = deps_json, + content = json.encode(deps_data), + ) + + # Declare outputs + outputs = [] + args = ctx.actions.args() + args.add("--input", deps_json) + args.add("--metadata", metadata_file) + + if "spdx" in ctx.attr.output_formats: + spdx_out = ctx.actions.declare_file(ctx.attr.name + ".spdx.json") + outputs.append(spdx_out) + args.add("--spdx-output", spdx_out) + + if "cyclonedx" in ctx.attr.output_formats: + cdx_out = ctx.actions.declare_file(ctx.attr.name + ".cdx.json") + outputs.append(cdx_out) + args.add("--cyclonedx-output", cdx_out) + + # Build inputs list + generator_inputs = [deps_json, metadata_file] + ctx.files.dep_module_files + ctx.files.module_lockfiles + + # Auto-generate crates metadata cache if enabled and a lockfile is provided + crates_cache = None + if (ctx.file.cargo_lockfile or ctx.files.module_lockfiles) and ctx.attr.auto_crates_cache: + crates_cache = ctx.actions.declare_file(ctx.attr.name + "_crates_metadata.json") + cache_inputs = [ctx.file._crates_cache_script] + cache_cmd = "set -euo pipefail\npython3 {} {}".format( + ctx.file._crates_cache_script.path, + crates_cache.path, + ) + if ctx.file.cargo_lockfile: + cache_inputs.append(ctx.file.cargo_lockfile) + cache_cmd += " --cargo-lock {}".format(ctx.file.cargo_lockfile.path) + for lock in ctx.files.module_lockfiles: + cache_inputs.append(lock) + cache_cmd += " --module-lock {}".format(lock.path) + ctx.actions.run_shell( + inputs = cache_inputs, + outputs = [crates_cache], + command = cache_cmd, + mnemonic = "CratesCacheGenerate", + progress_message = "Generating crates metadata cache for %s" % ctx.attr.name, + execution_requirements = {"requires-network": ""}, + use_default_shell_env = True, + ) + + # Add cdxgen SBOM if provided; otherwise auto-generate if enabled + cdxgen_sbom = ctx.file.cdxgen_sbom + if not cdxgen_sbom and ctx.attr.auto_cdxgen: + cdxgen_sbom = ctx.actions.declare_file(ctx.attr.name + "_cdxgen.cdx.json") + ctx.actions.run( + outputs = [cdxgen_sbom], + executable = ctx.executable._npm, + arguments = [ + "exec", + "--", + "@cyclonedx/cdxgen", + "-t", + "cpp", + "--deep", + "-r", + "-o", + cdxgen_sbom.path, + cdxgen_scan_root, + ], + mnemonic = "CdxgenGenerate", + progress_message = "Generating cdxgen SBOM for %s" % ctx.attr.name, + # cdxgen needs to recursively scan source trees. Running sandboxed with + # only declared file inputs makes the scan effectively empty. + execution_requirements = {"no-sandbox": "1"}, + ) + + if cdxgen_sbom: + args.add("--cdxgen-sbom", cdxgen_sbom) + generator_inputs.append(cdxgen_sbom) + + if crates_cache: + args.add("--crates-cache", crates_cache) + generator_inputs.append(crates_cache) + + # Run Python generator + ctx.actions.run( + inputs = generator_inputs, + outputs = outputs, + executable = ctx.executable._generator, + arguments = [args], + mnemonic = "SbomGenerate", + progress_message = "Generating SBOM for %s" % ctx.attr.name, + ) + + return [DefaultInfo(files = depset(outputs))] + +sbom_rule = rule( + implementation = _sbom_impl, + attrs = { + "targets": attr.label_list( + mandatory = True, + aspects = [sbom_aspect], + doc = "Targets to generate SBOM for", + ), + "output_formats": attr.string_list( + default = ["spdx", "cyclonedx"], + doc = "Output formats: spdx, cyclonedx", + ), + "producer_name": attr.string( + default = "Eclipse Foundation", + doc = "SBOM producer organization name", + ), + "producer_url": attr.string( + default = "https://projects.eclipse.org/projects/automotive.score", + doc = "SBOM producer URL", + ), + "component_name": attr.string( + doc = "Component name (defaults to rule name)", + ), + "component_version": attr.string( + default = "", + doc = "Component version", + ), + "sbom_authors": attr.string_list( + default = [], + doc = "SBOM author(s) (distinct from software producers)", + ), + "generation_context": attr.string( + default = "", + doc = "SBOM generation context: pre-build, build, post-build", + ), + "sbom_tools": attr.string_list( + default = [], + doc = "Additional SBOM generation tool names", + ), + "namespace": attr.string( + default = "https://eclipse.dev/score", + doc = "SBOM namespace URI", + ), + "exclude_patterns": attr.string_list( + default = [ + "rules_rust", + "rules_cc", + "bazel_tools", + "platforms", + "bazel_skylib", + "rules_python", + "rules_proto", + "protobuf", + "local_config_", + "remote_", + ], + doc = "External repo patterns to exclude (build tools)", + ), + "metadata_json": attr.label( + mandatory = True, + allow_single_file = [".json"], + doc = "Metadata JSON file from sbom_metadata extension", + ), + "dep_module_files": attr.label_list( + allow_files = True, + default = [], + doc = "MODULE.bazel files from dependency modules for automatic version extraction", + ), + "cargo_lockfile": attr.label( + allow_single_file = True, + doc = "Optional Cargo.lock file for automatic crate metadata extraction", + ), + "module_lockfiles": attr.label_list( + allow_files = True, + doc = "MODULE.bazel.lock files for crate metadata extraction (e.g., from score_crates and workspace)", + ), + "cdxgen_sbom": attr.label( + allow_single_file = [".json"], + doc = "Optional CycloneDX JSON from cdxgen for C++ dependency enrichment", + ), + "auto_cdxgen": attr.bool( + default = False, + doc = "Automatically run cdxgen when no cdxgen_sbom is provided", + ), + "_npm": attr.label( + default = "//sbom:npm_wrapper", + executable = True, + cfg = "exec", + ), + "auto_crates_cache": attr.bool( + default = True, + doc = "Automatically build crates metadata cache when cargo_lockfile or module_lockfile is provided", + ), + "_crates_cache_script": attr.label( + default = "//sbom/scripts:generate_crates_metadata_cache.py", + allow_single_file = True, + ), + "_generator": attr.label( + default = "//sbom/internal/generator:sbom_generator", + executable = True, + cfg = "exec", + ), + }, + doc = "Generates SBOM for specified targets in SPDX and CycloneDX formats", +) diff --git a/sbom/npm_wrapper.sh b/sbom/npm_wrapper.sh new file mode 100755 index 0000000..0c312a6 --- /dev/null +++ b/sbom/npm_wrapper.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +# Wrapper to use system-installed npm/cdxgen +# This relies on npm/cdxgen being available in the system PATH + +# Add common Node.js installation paths to PATH +export PATH="/home/lj/.nvm/versions/node/v24.13.0/bin:$PATH" +export PATH="$HOME/.nvm/versions/node/v24.13.0/bin:$PATH" +export PATH="/usr/local/bin:/usr/bin:/bin:$PATH" + +# If called with "exec -- @cyclonedx/cdxgen", just run cdxgen directly +if [[ "$1" == "exec" && "$2" == "--" && "$3" == "@cyclonedx/cdxgen" ]]; then + shift 3 # Remove "exec -- @cyclonedx/cdxgen" + exec cdxgen "$@" +else + # Otherwise, run npm normally + exec npm "$@" +fi diff --git a/sbom/scripts/BUILD.bazel b/sbom/scripts/BUILD.bazel new file mode 100644 index 0000000..c33b3c6 --- /dev/null +++ b/sbom/scripts/BUILD.bazel @@ -0,0 +1,5 @@ +package(default_visibility = ["//sbom:__subpackages__"]) + +exports_files([ + "generate_crates_metadata_cache.py", +]) diff --git a/sbom/scripts/generate_cpp_metadata_cache.py b/sbom/scripts/generate_cpp_metadata_cache.py new file mode 100644 index 0000000..a45ec38 --- /dev/null +++ b/sbom/scripts/generate_cpp_metadata_cache.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +"""Generate cpp_metadata.json cache from cdxgen CycloneDX output. + +Usage: + # Generate from cdxgen output: + npx @cyclonedx/cdxgen -t cpp --deep -r -o cdxgen_output.cdx.json + python3 generate_cpp_metadata_cache.py cdxgen_output.cdx.json ../cpp_metadata.json + + # Or pipe directly: + npx @cyclonedx/cdxgen -t cpp --deep -r | python3 generate_cpp_metadata_cache.py - ../cpp_metadata.json +""" + +import argparse +import json +import sys + + +def convert_cdxgen_to_cache(cdxgen_path: str) -> dict: + """Convert CycloneDX JSON from cdxgen to internal cache format.""" + if cdxgen_path == "-": + cdx_data = json.load(sys.stdin) + else: + with open(cdxgen_path, encoding="utf-8") as f: + cdx_data = json.load(f) + + if cdx_data.get("bomFormat") != "CycloneDX": + print("Error: Input is not a CycloneDX JSON file", file=sys.stderr) + sys.exit(1) + + cache = {} + for comp in cdx_data.get("components", []): + name = comp.get("name", "") + if not name: + continue + + entry = { + "version": comp.get("version", "unknown"), + } + + # License + licenses = comp.get("licenses", []) + if licenses: + first = licenses[0] + lic_obj = first.get("license", {}) + lic_id = lic_obj.get("id", "") or lic_obj.get("name", "") + if not lic_id: + lic_id = first.get("expression", "") + if lic_id: + entry["license"] = lic_id + + # Description + if comp.get("description"): + entry["description"] = comp["description"] + + # Supplier + supplier = comp.get("supplier", {}) + if supplier and supplier.get("name"): + entry["supplier"] = supplier["name"] + elif comp.get("publisher"): + entry["supplier"] = comp["publisher"] + + # PURL + if comp.get("purl"): + entry["purl"] = comp["purl"] + + # URL from externalReferences + for ref in comp.get("externalReferences", []): + if ref.get("type") in ("website", "distribution", "vcs") and ref.get("url"): + entry["url"] = ref["url"] + break + + cache[name] = entry + + return cache + + +def main(): + parser = argparse.ArgumentParser( + description="Convert cdxgen CycloneDX output to cpp_metadata.json cache" + ) + parser.add_argument("input", help="cdxgen CycloneDX JSON file (or - for stdin)") + parser.add_argument( + "output", + nargs="?", + default="cpp_metadata.json", + help="Output cache file (default: cpp_metadata.json)", + ) + parser.add_argument( + "--merge", + help="Merge with existing cache file (existing entries take precedence)", + ) + args = parser.parse_args() + + cache = convert_cdxgen_to_cache(args.input) + + if args.merge: + try: + with open(args.merge, encoding="utf-8") as f: + existing = json.load(f) + # Existing entries take precedence + for name, data in cache.items(): + if name not in existing: + existing[name] = data + cache = existing + except (OSError, json.JSONDecodeError): + pass + + with open(args.output, "w", encoding="utf-8") as f: + json.dump(cache, f, indent=2, sort_keys=True) + f.write("\n") + + print(f"Generated {args.output} with {len(cache)} C++ dependencies") + + +if __name__ == "__main__": + main() diff --git a/sbom/scripts/generate_crates_metadata_cache.py b/sbom/scripts/generate_crates_metadata_cache.py new file mode 100755 index 0000000..20eb138 --- /dev/null +++ b/sbom/scripts/generate_crates_metadata_cache.py @@ -0,0 +1,540 @@ +#!/usr/bin/env python3 +"""Generate crates metadata cache for SBOM generation. + +This script parses Cargo.lock files and/or MODULE.bazel.lock files for +crate version/checksum data, then fetches license metadata via +dash-license-scan (Eclipse Foundation + ClearlyDefined) and creates a +cache file for SBOM generation. + +Usage: + python3 generate_crates_metadata_cache.py --module-lock + python3 generate_crates_metadata_cache.py --cargo-lock + python3 generate_crates_metadata_cache.py --cargo-lock --module-lock + +Example: + python3 generate_crates_metadata_cache.py crates_metadata.json \\ + --module-lock ../../score-crates/MODULE.bazel.lock +""" + +import argparse +import json +import os +import re +import shutil +import subprocess +import sys +import tempfile +import urllib.request +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path +from typing import Any, Dict + + +def parse_cargo_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: + """Parse Cargo.lock and extract crate information. + + Args: + lockfile_path: Path to Cargo.lock file + + Returns: + Dict mapping crate name to {version, checksum, source} + """ + try: + import tomllib as tomli # Python 3.11+ + except ImportError: + try: + import tomli + except ImportError: + print( + "ERROR: tomli/tomllib library not found. Use Python 3.11+ or install tomli", + file=sys.stderr, + ) + sys.exit(1) + + with open(lockfile_path, "rb") as f: + lock_data = tomli.load(f) + + crates = {} + for package in lock_data.get("package", []): + name = package["name"] + source = package.get("source", "") + + # Only include crates from crates.io + if "registry+https://github.com/rust-lang/crates.io-index" in source: + crates[name] = { + "name": name, + "version": package["version"], + "checksum": package.get("checksum", ""), + "source": source, + } + + return crates + + +def parse_module_bazel_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: + """Parse MODULE.bazel.lock and extract crate information from cargo-bazel resolution. + + The MODULE.bazel.lock (from score_crates or similar) contains resolved crate + specs under moduleExtensions -> crate_universe -> generatedRepoSpecs. + Each crate entry has name, version, sha256, and download URL. + + Args: + lockfile_path: Path to MODULE.bazel.lock file + + Returns: + Dict mapping crate name to {version, checksum, source} + """ + with open(lockfile_path, encoding="utf-8") as f: + lock_data = json.load(f) + + crates = {} + extensions = lock_data.get("moduleExtensions", {}) + + # Find the crate_universe extension (key contains "crate_universe" or "crate") + crate_ext = None + for ext_key, ext_val in extensions.items(): + if "crate" in ext_key.lower(): + crate_ext = ext_val + break + + if not crate_ext: + print( + " WARNING: No crate extension found in MODULE.bazel.lock", file=sys.stderr + ) + return crates + + # Get generatedRepoSpecs from 'general' (or the first available key) + general = crate_ext.get("general", {}) + specs = general.get("generatedRepoSpecs", {}) + + for repo_name, spec in specs.items(): + # Skip the crate_index meta-repo itself + if repo_name == "crate_index" or not repo_name.startswith("crate_index__"): + continue + + crate_part = repo_name.replace("crate_index__", "") + + # Parse name-version (e.g., "serde-1.0.228", "iceoryx2-qnx8-0.7.0") + m = re.match(r"^(.+?)-(\d+\.\d+\.\d+.*)$", crate_part) + if not m: + continue + + name = m.group(1) + version = m.group(2) + attrs = spec.get("attributes", {}) + sha256 = attrs.get("sha256", "") + + crates[name] = { + "name": name, + "version": version, + "checksum": sha256, + "source": "module-bazel-lock", + } + + return crates + + +def build_dash_coordinates(crates: Dict[str, Dict[str, Any]]) -> list[str]: + """Build Eclipse dash-license-scan coordinate strings from crate data. + + Args: + crates: Dict mapping crate name to {name, version, checksum, ...} + + Returns: + Sorted list of coordinate strings: "crate/cratesio/-/{name}/{version}" + """ + return [ + f"crate/cratesio/-/{info['name']}/{info['version']}" + for _key, info in sorted(crates.items()) + ] + + +def generate_synthetic_cargo_lock( + crates: Dict[str, Dict[str, Any]], output_path: str +) -> None: + """Generate a minimal synthetic Cargo.lock from parsed crate data. + + The dash-license-scan parser splits on '[[package]]' blocks and extracts + name, version, and source fields. Source must contain 'crates' if present. + + Args: + crates: Dict mapping crate name to {name, version, checksum, source} + output_path: Path to write the synthetic Cargo.lock + """ + lines = ["version = 4", ""] + for _name, info in sorted(crates.items()): + lines.append("[[package]]") + lines.append(f'name = "{info["name"]}"') + lines.append(f'version = "{info["version"]}"') + lines.append( + 'source = "registry+https://github.com/rust-lang/crates.io-index"' + ) + lines.append("") + + with open(output_path, "w", encoding="utf-8") as f: + f.write("\n".join(lines)) + + +def _find_uvx() -> str: + """Locate the uvx binary, checking PATH and common install locations.""" + found = shutil.which("uvx") + if found: + return found + + # Standard uv install location (works inside Bazel sandbox where PATH is minimal) + home = os.environ.get("HOME", os.path.expanduser("~")) + candidate = os.path.join(home, ".local", "bin", "uvx") + if os.path.isfile(candidate) and os.access(candidate, os.X_OK): + return candidate + + return "uvx" # fall back, will raise FileNotFoundError in subprocess + + +def run_dash_license_scan( + cargo_lock_path: str, summary_output_path: str +) -> None: + """Invoke dash-license-scan via uvx and write summary to file. + + Args: + cargo_lock_path: Path to (real or synthetic) Cargo.lock + summary_output_path: Path to write the dash-licenses summary CSV + + Raises: + SystemExit: If uvx/dash-license-scan is not found or fatally crashes + """ + uvx = _find_uvx() + cmd = [ + uvx, + "--from", + "dash-license-scan@git+https://github.com/eclipse-score/dash-license-scan", + "dash-license-scan", + "--summary", + summary_output_path, + cargo_lock_path, + ] + print(f"Running: {' '.join(cmd)}") + + # Redirect uv's cache and tool directories to writable temp locations. + # Inside Bazel sandbox, ~/.cache and ~/.local/share are read-only. + env = os.environ.copy() + uv_tmp = os.path.join(tempfile.gettempdir(), "uv_sbom") + if "UV_CACHE_DIR" not in env: + env["UV_CACHE_DIR"] = os.path.join(uv_tmp, "cache") + if "UV_TOOL_DIR" not in env: + env["UV_TOOL_DIR"] = os.path.join(uv_tmp, "tools") + + try: + result = subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=600, + env=env, + ) + except FileNotFoundError: + print( + "ERROR: 'uvx' not found on PATH or ~/.local/bin/. Install uv: https://docs.astral.sh/uv/", + file=sys.stderr, + ) + sys.exit(1) + except subprocess.TimeoutExpired: + print( + "ERROR: dash-license-scan timed out after 600 seconds", file=sys.stderr + ) + sys.exit(1) + + # dash-license-scan exits with returncode = number of restricted items. + # This is normal behavior, not an error. Only signal kills are fatal. + if result.returncode < 0: + print( + f"ERROR: dash-license-scan killed by signal {-result.returncode}", + file=sys.stderr, + ) + if result.stderr: + print(result.stderr, file=sys.stderr) + sys.exit(1) + + if result.stderr: + # Print dash-license-scan's own output (INFO lines from the JAR) + for line in result.stderr.splitlines(): + print(f" {line}") + + if not os.path.exists(summary_output_path): + print( + f"ERROR: dash-license-scan did not produce summary file: {summary_output_path}", + file=sys.stderr, + ) + sys.exit(1) + + if result.returncode > 0: + print( + f" NOTE: {result.returncode} crate(s) have 'restricted' license status" + ) + + +def parse_dash_summary(summary_path: str) -> Dict[str, str]: + """Parse the dash-licenses summary CSV file into a license lookup dict. + + Each line has format: + crate/cratesio/-//, , , + + Args: + summary_path: Path to the dash-licenses summary file + + Returns: + Dict mapping crate name to SPDX license expression string + """ + licenses: Dict[str, str] = {} + with open(summary_path, encoding="utf-8") as f: + for line in f: + line = line.strip() + if not line: + continue + parts = [p.strip() for p in line.split(",")] + if len(parts) < 4: + continue + + content_id = parts[0] + license_expr = parts[1].strip() + + # Extract crate name from content_id: "crate/cratesio/-//" + id_parts = content_id.split("/") + if len(id_parts) >= 5 and id_parts[0] == "crate": + crate_name = id_parts[3] + if license_expr: + licenses[crate_name] = license_expr + + return licenses + + +def _extract_supplier(repository_url: str) -> str: + """Extract supplier (GitHub org/user) from a repository URL. + + Examples: + https://github.com/serde-rs/serde -> serde-rs + https://github.com/eclipse-iceoryx/iceoryx2 -> eclipse-iceoryx + """ + if not repository_url: + return "" + m = re.match(r"https?://github\.com/([^/]+)/", repository_url) + return m.group(1) if m else "" + + +def _fetch_one_crate_meta(name: str) -> tuple[str, Dict[str, str]]: + """Fetch metadata for a single crate from crates.io API. + + Returns (name, {description, supplier}) dict. + If the crate isn't found, retries with platform suffixes stripped + (e.g. "-qnx8") to find the upstream crate. + """ + candidates = [name] + # Platform-specific forks (e.g. iceoryx2-bb-lock-free-qnx8 -> iceoryx2-bb-lock-free) + for suffix in ("-qnx8",): + if name.endswith(suffix): + candidates.append(name[: -len(suffix)]) + + for candidate in candidates: + url = f"https://crates.io/api/v1/crates/{candidate}" + req = urllib.request.Request( + url, + headers={"User-Agent": "score-sbom-tool (https://eclipse.dev/score)"}, + ) + try: + with urllib.request.urlopen(req, timeout=10) as resp: + data = json.loads(resp.read()) + crate = data.get("crate", {}) + desc = (crate.get("description") or "").strip() + supplier = _extract_supplier(crate.get("repository", "")) + if desc or supplier: + return name, {"description": desc, "supplier": supplier} + except Exception: + continue + return name, {} + + +def fetch_crate_metadata_from_cratesio( + crate_names: list[str], +) -> Dict[str, Dict[str, str]]: + """Fetch metadata (description, supplier) from crates.io API (parallel). + + Args: + crate_names: List of crate names to look up + + Returns: + Dict mapping crate name to {description, supplier} + """ + total = len(crate_names) + print(f"Fetching metadata from crates.io for {total} crates...") + + metadata: Dict[str, Dict[str, str]] = {} + done = 0 + with ThreadPoolExecutor(max_workers=10) as pool: + futures = {pool.submit(_fetch_one_crate_meta, n): n for n in crate_names} + for future in as_completed(futures): + name, meta = future.result() + if meta: + metadata[name] = meta + done += 1 + if done % 50 == 0: + print(f" ... {done}/{total} crates queried") + + with_desc = sum(1 for m in metadata.values() if m.get("description")) + with_supplier = sum(1 for m in metadata.values() if m.get("supplier")) + print( + f"Retrieved from crates.io: {with_desc} descriptions, {with_supplier} suppliers" + ) + return metadata + + +def generate_cache( + cargo_lock_path: str | None = None, + module_lock_paths: list[str] | None = None, +) -> Dict[str, Dict[str, Any]]: + """Generate metadata cache from lockfiles + dash-license-scan. + + 1. Parse Cargo.lock and/or MODULE.bazel.lock files for crate names, versions, checksums + 2. Generate a synthetic Cargo.lock combining all crates + 3. Run dash-license-scan for license data + 4. Fetch descriptions from crates.io (parallel) + 5. Combine version/checksum from lockfile with license and description + + Args: + cargo_lock_path: Optional path to Cargo.lock file + module_lock_paths: Optional list of paths to MODULE.bazel.lock files + + Returns: + Dict mapping crate name to metadata + """ + crates: Dict[str, Dict[str, Any]] = {} + + if cargo_lock_path: + print(f"Parsing {cargo_lock_path}...") + crates = parse_cargo_lock(cargo_lock_path) + print(f"Found {len(crates)} crates from Cargo.lock") + + # Merge crates from MODULE.bazel.lock files + for module_lock_path in (module_lock_paths or []): + print(f"Parsing {module_lock_path}...") + module_crates = parse_module_bazel_lock(module_lock_path) + added = 0 + for name, info in module_crates.items(): + if name not in crates: + crates[name] = info + added += 1 + print(f"Found {len(module_crates)} crates in {module_lock_path} ({added} new)") + + if not crates: + print("No crates found in lockfiles.") + return {} + + # Generate synthetic Cargo.lock containing only crates.io crates. + # This avoids dash-license-scan's ValueError on non-crates.io sources + # (git dependencies, path dependencies) that may be in a real Cargo.lock. + temp_dir = tempfile.mkdtemp(prefix="sbom_dash_") + synthetic_path = os.path.join(temp_dir, "Cargo.lock") + generate_synthetic_cargo_lock(crates, synthetic_path) + print(f"Generated synthetic Cargo.lock with {len(crates)} crates") + + summary_path = os.path.join(temp_dir, "dash_summary.txt") + + try: + print("Fetching license data via dash-license-scan...") + run_dash_license_scan(synthetic_path, summary_path) + license_map = parse_dash_summary(summary_path) + print(f"Retrieved licenses for {len(license_map)} crates") + finally: + shutil.rmtree(temp_dir, ignore_errors=True) + + # Fetch descriptions + suppliers from crates.io (parallel, ~10 concurrent requests) + cratesio_meta = fetch_crate_metadata_from_cratesio(list(crates.keys())) + + # Build final cache + cache: Dict[str, Dict[str, Any]] = {} + for name, info in crates.items(): + meta = cratesio_meta.get(name, {}) + cache[name] = { + "version": info["version"], + "checksum": info["checksum"], + "purl": f"pkg:cargo/{name}@{info['version']}", + "license": license_map.get(name, ""), + "description": meta.get("description", ""), + "supplier": meta.get("supplier", ""), + } + + return cache + + +def main(): + parser = argparse.ArgumentParser( + description="Generate crates metadata cache for SBOM generation (via dash-license-scan)" + ) + parser.add_argument( + "output", + nargs="?", + default="crates_metadata.json", + help="Output JSON file (default: crates_metadata.json)", + ) + parser.add_argument("--cargo-lock", help="Path to Cargo.lock file") + parser.add_argument( + "--module-lock", + action="append", + default=[], + help="Path to MODULE.bazel.lock for additional crates (can be repeated)", + ) + parser.add_argument( + "--merge", help="Merge with existing cache file instead of overwriting" + ) + + args = parser.parse_args() + + if not args.cargo_lock and not args.module_lock: + parser.error("At least one of --cargo-lock or --module-lock is required") + + # Generate new cache + cache = generate_cache( + cargo_lock_path=args.cargo_lock, + module_lock_paths=args.module_lock, + ) + + # Merge with existing cache if requested + if args.merge and Path(args.merge).exists(): + print(f"\nMerging with existing cache: {args.merge}") + with open(args.merge) as f: + existing = json.load(f) + + # Prefer new data, but keep entries not in current lockfiles + merged = existing.copy() + merged.update(cache) + cache = merged + print(f"Merged cache now contains {len(cache)} entries") + + if not cache: + print("\nNo crates to write.") + with open(args.output, "w") as f: + json.dump({}, f) + return 0 + + # Write cache + print(f"\nWriting cache to {args.output}...") + with open(args.output, "w") as f: + json.dump(cache, f, indent=2, sort_keys=True) + + # Print statistics + total = len(cache) + with_license = sum(1 for c in cache.values() if c.get("license")) + with_checksum = sum(1 for c in cache.values() if c.get("checksum")) + with_desc = sum(1 for c in cache.values() if c.get("description")) + with_supplier = sum(1 for c in cache.values() if c.get("supplier")) + + print(f"\n✓ Cache generated successfully!") + print(f" Total crates: {total}") + print(f" With licenses: {with_license} ({with_license / total * 100:.1f}%)") + print(f" With checksums: {with_checksum} ({with_checksum / total * 100:.1f}%)") + print(f" With descriptions: {with_desc} ({with_desc / total * 100:.1f}%)") + print(f" With suppliers: {with_supplier} ({with_supplier / total * 100:.1f}%)") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/sbom/scripts/spdx_to_github_snapshot.py b/sbom/scripts/spdx_to_github_snapshot.py new file mode 100644 index 0000000..c62e13c --- /dev/null +++ b/sbom/scripts/spdx_to_github_snapshot.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python3 +"""Convert SPDX 2.3 JSON to GitHub Dependency Submission API snapshot format. + +This script converts an SPDX 2.3 SBOM JSON file into the snapshot format +expected by the GitHub Dependency Submission API, enabling Dependabot +vulnerability alerts on dependencies declared in the SBOM. + +GitHub Dependency Submission API: + https://docs.github.com/en/rest/dependency-graph/dependency-submission + +Usage: + python3 spdx_to_github_snapshot.py \\ + --input my_sbom.spdx.json \\ + --output snapshot.json \\ + --sha \\ + --ref refs/heads/main \\ + --job-correlator my-workflow_sbom \\ + --job-id +""" + +import argparse +import json +import sys +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + + +DETECTOR_NAME = "score-sbom-generator" +DETECTOR_VERSION = "0.1.0" +DETECTOR_URL = "https://github.com/eclipse-score/tooling/tree/main/sbom" + + +def _extract_purl(package: dict[str, Any]) -> str | None: + """Extract PURL from SPDX package externalRefs.""" + for ref in package.get("externalRefs", []): + if ref.get("referenceType") == "purl": + return ref.get("referenceLocator", "") + return None + + +def _package_key(package: dict[str, Any]) -> str: + """Return a stable key for a package (name@version or SPDXID).""" + name = package.get("name", "") + version = package.get("versionInfo", "") + if name and version: + return f"{name}@{version}" + return package.get("SPDXID", name or "unknown") + + +def convert_spdx_to_snapshot( + spdx: dict[str, Any], + sha: str, + ref: str, + job_correlator: str, + job_id: str, +) -> dict[str, Any]: + """Convert SPDX 2.3 document to GitHub Dependency Submission snapshot. + + Args: + spdx: Parsed SPDX 2.3 JSON document + sha: Git commit SHA (40 hex chars) + ref: Git ref (e.g. refs/heads/main) + job_correlator: Unique string identifying the workflow + SBOM target + job_id: GitHub Actions run ID (or any unique job identifier) + + Returns: + GitHub Dependency Submission snapshot dict + """ + packages_by_id: dict[str, dict[str, Any]] = {} + for pkg in spdx.get("packages", []): + spdx_id = pkg.get("SPDXID", "") + if spdx_id: + packages_by_id[spdx_id] = pkg + + # Find the root document package (DESCRIBES relationship target) + relationships = spdx.get("relationships", []) + root_ids: set[str] = set() + direct_ids: set[str] = set() + + for rel in relationships: + rel_type = rel.get("relationshipType", "") + element = rel.get("spdxElementId", "") + related = rel.get("relatedSpdxElement", "") + + if rel_type == "DESCRIBES": + root_ids.add(related) + elif rel_type in ("DEPENDS_ON", "DYNAMIC_LINK", "STATIC_LINK", "CONTAINS"): + if element in root_ids: + direct_ids.add(related) + + # Build dependency map: which packages depend on which + depends_on: dict[str, list[str]] = {} + for rel in relationships: + rel_type = rel.get("relationshipType", "") + element = rel.get("spdxElementId", "") + related = rel.get("relatedSpdxElement", "") + if rel_type in ("DEPENDS_ON", "DYNAMIC_LINK", "STATIC_LINK", "CONTAINS"): + depends_on.setdefault(element, []).append(related) + + # Manifest name from SBOM document name or file name + doc_name = spdx.get("name", "sbom") + manifest_name = doc_name.replace(" ", "_").replace("/", "_") + + # Build resolved packages dict (exclude root/document descriptor packages) + resolved: dict[str, dict[str, Any]] = {} + + for spdx_id, pkg in packages_by_id.items(): + # Skip the SBOM document itself (SPDXRef-DOCUMENT) and root component + if spdx_id in root_ids or spdx_id == "SPDXRef-DOCUMENT": + continue + + purl = _extract_purl(pkg) + if not purl: + # Skip packages without a PURL — Dependabot can't use them + continue + + key = _package_key(pkg) + + # Relationship: direct if root explicitly depends on it, else indirect + relationship = "direct" if spdx_id in direct_ids else "indirect" + + # Dependencies of this package + dep_purls = [] + for dep_id in depends_on.get(spdx_id, []): + dep_pkg = packages_by_id.get(dep_id) + if dep_pkg: + dep_purl = _extract_purl(dep_pkg) + if dep_purl: + dep_purls.append(dep_purl) + + resolved[key] = { + "package_url": purl, + "relationship": relationship, + "dependencies": dep_purls, + } + + scanned = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + + snapshot: dict[str, Any] = { + "version": 0, + "sha": sha, + "ref": ref, + "job": { + "correlator": job_correlator, + "id": job_id, + }, + "detector": { + "name": DETECTOR_NAME, + "version": DETECTOR_VERSION, + "url": DETECTOR_URL, + }, + "scanned": scanned, + "manifests": { + manifest_name: { + "name": manifest_name, + "resolved": resolved, + } + }, + } + + return snapshot + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Convert SPDX 2.3 JSON to GitHub Dependency Submission snapshot" + ) + parser.add_argument("--input", required=True, help="Path to SPDX 2.3 JSON file") + parser.add_argument("--output", required=True, help="Output snapshot JSON path") + parser.add_argument( + "--sha", required=True, help="Git commit SHA (40 hex chars)" + ) + parser.add_argument( + "--ref", required=True, help="Git ref (e.g. refs/heads/main)" + ) + parser.add_argument( + "--job-correlator", + default="score-sbom_sbom", + help="Unique workflow+target identifier for Dependency Submission API", + ) + parser.add_argument( + "--job-id", default="0", help="GitHub Actions run ID (or unique job ID)" + ) + args = parser.parse_args() + + input_path = Path(args.input) + if not input_path.exists(): + print(f"Error: input file not found: {input_path}", file=sys.stderr) + return 1 + + with input_path.open() as f: + try: + spdx = json.load(f) + except json.JSONDecodeError as e: + print(f"Error: invalid JSON in {input_path}: {e}", file=sys.stderr) + return 1 + + spdx_version = spdx.get("spdxVersion", "") + if not spdx_version.startswith("SPDX-"): + print( + f"Warning: unexpected spdxVersion '{spdx_version}', expected SPDX-2.x", + file=sys.stderr, + ) + + snapshot = convert_spdx_to_snapshot( + spdx=spdx, + sha=args.sha, + ref=args.ref, + job_correlator=args.job_correlator, + job_id=args.job_id, + ) + + output_path = Path(args.output) + with output_path.open("w") as f: + json.dump(snapshot, f, indent=2) + + total_packages = sum( + len(m["resolved"]) for m in snapshot["manifests"].values() + ) + print( + f"Converted {len(spdx.get('packages', []))} SPDX packages → " + f"{total_packages} Dependency Submission packages" + ) + print(f"Output: {output_path}") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/sbom/tests/BUILD b/sbom/tests/BUILD new file mode 100644 index 0000000..1f90375 --- /dev/null +++ b/sbom/tests/BUILD @@ -0,0 +1,32 @@ +# SBOM Tests Package +# +# This package contains tests for the SBOM generation system. + +load("@rules_python//python:defs.bzl", "py_test") + +package(default_visibility = ["//visibility:private"]) + +py_test( + name = "test_spdx_formatter", + srcs = ["test_spdx_formatter.py"], + deps = ["//sbom/internal/generator:spdx_formatter"], +) + +py_test( + name = "test_cyclonedx_formatter", + srcs = ["test_cyclonedx_formatter.py"], + deps = ["//sbom/internal/generator:cyclonedx_formatter"], +) + +py_test( + name = "test_bcr_known_licenses", + srcs = ["test_bcr_known_licenses.py"], + deps = ["//sbom/internal/generator:sbom_generator_lib"], +) + +py_test( + name = "test_cpp_enrich_checksum", + srcs = ["test_cpp_enrich_checksum.py"], + data = ["//sbom:cpp_metadata.json"], + deps = ["//sbom/internal/generator:sbom_generator_lib"], +) diff --git a/sbom/tests/__init__.py b/sbom/tests/__init__.py new file mode 100644 index 0000000..b82b623 --- /dev/null +++ b/sbom/tests/__init__.py @@ -0,0 +1 @@ +"""SBOM tests package.""" diff --git a/sbom/tests/test_bcr_known_licenses.py b/sbom/tests/test_bcr_known_licenses.py new file mode 100644 index 0000000..16aafee --- /dev/null +++ b/sbom/tests/test_bcr_known_licenses.py @@ -0,0 +1,250 @@ +"""Tests for BCR known-license resolution in sbom_generator. + +These tests verify that C++ modules from the Bazel Central Registry +(e.g. boost.*) receive correct license data even when cdxgen and +lockfile parsing cannot provide it. +""" + +import unittest + +from sbom.internal.generator.sbom_generator import ( + BCR_KNOWN_LICENSES, + apply_known_licenses, + resolve_component, +) + + +class TestBcrKnownLicenses(unittest.TestCase): + """Verify the BCR_KNOWN_LICENSES table contents.""" + + def test_boost_entry_exists(self): + self.assertIn("boost", BCR_KNOWN_LICENSES) + self.assertEqual(BCR_KNOWN_LICENSES["boost"]["license"], "BSL-1.0") + + def test_all_entries_have_license(self): + for name, info in BCR_KNOWN_LICENSES.items(): + self.assertTrue( + info.get("license"), + f"BCR_KNOWN_LICENSES['{name}'] has no license", + ) + + +class TestApplyKnownLicenses(unittest.TestCase): + """Tests for apply_known_licenses().""" + + # -- BCR known-license fallback ------------------------------------------- + + def test_boost_submodule_gets_license(self): + """boost.config should inherit BSL-1.0 from the 'boost' BCR entry.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "Boost.org") + + def test_multiple_boost_submodules(self): + """All boost.* sub-modules should receive BSL-1.0.""" + names = [ + "boost.config", "boost.assert", "boost.mp11", "boost.container", + "boost.interprocess", "boost.core", "boost.predef", + ] + metadata = { + "modules": { + n: {"version": "1.87.0", "purl": f"pkg:bazel/{n}@1.87.0"} + for n in names + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + for n in names: + self.assertEqual( + metadata["modules"][n]["license"], "BSL-1.0", + f"{n} should have BSL-1.0 license", + ) + + def test_exact_bcr_match(self): + """A module matching a BCR key exactly gets the license.""" + metadata = { + "modules": { + "abseil-cpp": {"version": "20230802.0", "purl": "pkg:bazel/abseil-cpp@20230802.0"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["abseil-cpp"]["license"], "Apache-2.0") + + def test_unknown_module_unchanged(self): + """Modules not in BCR_KNOWN_LICENSES remain without a license.""" + metadata = { + "modules": { + "some_unknown_lib": {"version": "1.0.0", "purl": "pkg:bazel/some_unknown_lib@1.0.0"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["some_unknown_lib"].get("license", ""), "") + + # -- Explicit license overrides (sbom_ext.license) ------------------------ + + def test_explicit_license_override(self): + """User-declared license in metadata['licenses'] takes priority.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": { + "boost.config": {"license": "MIT", "supplier": "Custom"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "MIT") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "Custom") + + def test_parent_license_override(self): + """Parent-level license declaration covers all sub-modules.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + "boost.container": {"version": "1.87.0", "purl": "pkg:bazel/boost.container@1.87.0"}, + }, + "licenses": { + "boost": {"license": "BSL-1.0-custom", "supplier": "My Boost Fork"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0-custom") + self.assertEqual(metadata["modules"]["boost.container"]["license"], "BSL-1.0-custom") + + def test_explicit_beats_parent(self): + """Exact-name license takes priority over parent-level declaration.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": { + "boost": {"license": "BSL-1.0", "supplier": "Boost.org"}, + "boost.config": {"license": "MIT-override", "supplier": "Override"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "MIT-override") + + def test_explicit_beats_bcr_known(self): + """User-declared license overrides the BCR known-license database.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": { + "boost": {"license": "Apache-2.0", "supplier": "Custom Boost"}, + }, + } + apply_known_licenses(metadata) + + # User's declaration should win over BCR_KNOWN_LICENSES["boost"] + self.assertEqual(metadata["modules"]["boost.config"]["license"], "Apache-2.0") + + # -- Preserves existing data ---------------------------------------------- + + def test_existing_license_not_overwritten(self): + """Modules that already have a license are not modified.""" + metadata = { + "modules": { + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + "license": "Already-Set", + "supplier": "Original", + }, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "Already-Set") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "Original") + + def test_supplier_not_overwritten_when_present(self): + """Existing supplier is preserved even when license is filled from BCR.""" + metadata = { + "modules": { + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + "supplier": "My Custom Supplier", + }, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "My Custom Supplier") + + # -- Edge cases ----------------------------------------------------------- + + def test_empty_metadata(self): + """Empty metadata does not raise.""" + metadata = {} + apply_known_licenses(metadata) # Should not raise + + def test_no_licenses_key(self): + """Missing 'licenses' key does not raise.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0") + + def test_module_without_dot_not_treated_as_parent(self): + """A module name without dots only matches exact BCR entries.""" + metadata = { + "modules": { + "zlib": {"version": "1.3.1", "purl": "pkg:bazel/zlib@1.3.1"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["zlib"]["license"], "Zlib") + + +class TestResolveComponentWithLicenses(unittest.TestCase): + """Verify that resolve_component returns licenses from metadata modules.""" + + def test_module_with_license_from_apply(self): + """After apply_known_licenses, resolve_component picks up the license.""" + metadata = { + "modules": { + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + "license": "BSL-1.0", + "supplier": "Boost.org", + }, + }, + "licenses": {}, + } + comp = resolve_component("boost.config+", metadata) + + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "boost.config") + self.assertEqual(comp["license"], "BSL-1.0") + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_cpp_enrich_checksum.py b/sbom/tests/test_cpp_enrich_checksum.py new file mode 100644 index 0000000..258eba3 --- /dev/null +++ b/sbom/tests/test_cpp_enrich_checksum.py @@ -0,0 +1,156 @@ +"""Tests for enrich_components_from_cpp_cache and the no-manual-fallback rule. + +Requirement: All SBOM fields must originate from automated sources. +No manually-curated fallback values are permitted for any field — +not checksum, not license, not supplier, not version, not PURL. +""" + +import json +import pathlib +import unittest + +from sbom.internal.generator.sbom_generator import enrich_components_from_cpp_cache + +# SBOM fields that must never appear as manually-curated static values. +# If any of these appear in cpp_metadata.json they were hand-written and must +# be removed. The only valid sources are automated tooling (cdxgen, lockfiles). +_SBOM_FIELDS = {"checksum", "license", "supplier", "version", "purl", "description"} + + +class TestCppEnrichChecksumPropagation(unittest.TestCase): + """enrich_components_from_cpp_cache field propagation mechanics. + + These tests exercise the code path using synthetic cache data generated + by cdxgen (not manually written). The logic itself is valid — the + restriction is on what may appear in the on-disk cpp_metadata.json. + """ + + def _run(self, components, cpp_components): + return enrich_components_from_cpp_cache(components, cpp_components, {}) + + def test_checksum_propagated_when_component_has_none(self): + """SHA-256 from the cdxgen-generated cache is copied to a component with no checksum.""" + sha = "a22461d13119ac5c78f205d3df1db13403e58ce1bb1794edc9313677313f4a9d" + components = [{"name": "nlohmann-json", "version": "3.11.3", "checksum": ""}] + cpp_cache = [{"name": "nlohmann-json", "version": "3.11.3", "checksum": sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], sha) + + def test_checksum_not_overwritten_when_already_present(self): + """An existing checksum on a component is preserved — cache is skipped.""" + existing = "aaaa" * 16 + cache_sha = "bbbb" * 16 + components = [{"name": "flatbuffers", "version": "25.2.10", "checksum": existing}] + cpp_cache = [{"name": "flatbuffers", "checksum": cache_sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], existing) + + def test_no_checksum_in_cache_leaves_component_without_checksum(self): + """When the cache entry has no checksum the component remains without one.""" + components = [{"name": "boost", "version": "1.87.0", "checksum": ""}] + cpp_cache = [{"name": "boost", "license": "BSL-1.0"}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], "") + + def test_component_without_matching_cache_entry_unchanged(self): + """A component with no matching cache entry is not modified.""" + components = [{"name": "some-unknown-lib", "checksum": ""}] + cpp_cache = [{"name": "nlohmann-json", "checksum": "aaaa"}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], "") + + def test_checksum_propagated_via_normalised_name(self): + """nlohmann_json (underscore) component matches nlohmann-json cache entry.""" + sha = "a22461d13119ac5c78f205d3df1db13403e58ce1bb1794edc9313677313f4a9d" + components = [{"name": "nlohmann_json", "checksum": ""}] + cpp_cache = [{"name": "nlohmann-json", "checksum": sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], sha) + + def test_checksum_propagated_via_parent_match(self): + """boost.config component matches the 'boost' cache entry.""" + sha = "deadbeef" * 8 + components = [{"name": "boost.config", "checksum": ""}] + cpp_cache = [{"name": "boost", "checksum": sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], sha) + + +class TestNoManualFallbackInCppMetadata(unittest.TestCase): + """Enforce the no-manual-fallback requirement on the on-disk cache. + + MUST REQUIREMENT: cpp_metadata.json must never contain manually-curated + SBOM field values. The file must either be empty ({}) or contain only + entries generated automatically by generate_cpp_metadata_cache.py from + cdxgen output. + + Rationale: A manually-written value is tied to a specific version string + in the file. If the workspace resolves a different version of that library, + the value silently describes the wrong artifact — an incorrect SBOM entry + is worse than an absent one. All SBOM fields must trace back to an + automated source (cdxgen scan, MODULE.bazel.lock, http_archive sha256). + + Known violations still to be resolved: + - BCR_KNOWN_LICENSES dict in sbom_generator.py (manual license/supplier + lookup for BCR C++ modules — must be replaced by automated BCR metadata + fetching or removed). + """ + + _CACHE_PATH = pathlib.Path(__file__).parent.parent / "cpp_metadata.json" + + def setUp(self): + self._data = json.loads(self._CACHE_PATH.read_text(encoding="utf-8")) + + def test_cpp_metadata_json_is_empty(self): + """cpp_metadata.json must be empty. + + Any entry in this file was written by hand. All C++ metadata must be + produced by automated tooling at build time (cdxgen via auto_cdxgen, + or lockfile parsing). If you need to populate this file, run: + + npx @cyclonedx/cdxgen -t cpp --deep -r -o cdxgen_output.cdx.json + python3 tooling/sbom/scripts/generate_cpp_metadata_cache.py \\ + cdxgen_output.cdx.json tooling/sbom/cpp_metadata.json + """ + self.assertEqual( + self._data, + {}, + "cpp_metadata.json must be empty. Found manually-curated entries: " + + ", ".join(self._data.keys()) + + ". Remove them — use generate_cpp_metadata_cache.py to populate " + "this file from cdxgen output instead.", + ) + + def test_no_sbom_fields_in_any_entry(self): + """No entry in cpp_metadata.json may contain any SBOM metadata field. + + This is a belt-and-suspenders check: even if the file is non-empty + (which the previous test already flags), no SBOM field value may be + manually written. Automated generation via generate_cpp_metadata_cache.py + is the only permitted source. + """ + for lib, entry in self._data.items(): + manually_present = _SBOM_FIELDS & set(entry.keys()) + with self.subTest(lib=lib): + self.assertFalse( + manually_present, + f"cpp_metadata.json['{lib}'] contains manually-curated SBOM " + f"fields: {manually_present}. All SBOM fields must come from " + f"automated sources only.", + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_cyclonedx_formatter.py b/sbom/tests/test_cyclonedx_formatter.py new file mode 100644 index 0000000..46de425 --- /dev/null +++ b/sbom/tests/test_cyclonedx_formatter.py @@ -0,0 +1,199 @@ +"""Tests for CycloneDX 1.6 formatter.""" + +import unittest +from datetime import datetime, timezone + +from sbom.internal.generator.cyclonedx_formatter import generate_cyclonedx, _normalize_spdx_license + + +class TestCycloneDXFormatter(unittest.TestCase): + """Tests for CycloneDX 1.6 generation.""" + + def setUp(self): + """Set up test fixtures.""" + self.timestamp = datetime( + 2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc + ).isoformat() + self.config = { + "component_name": "test-component", + "component_version": "1.0.0", + "producer_name": "Eclipse Foundation", + "producer_url": "https://eclipse.dev/score", + "namespace": "https://eclipse.dev/score", + } + self.components = [ + { + "name": "tokio", + "version": "1.10.0", + "purl": "pkg:cargo/tokio@1.10.0", + "type": "library", + "license": "MIT", + "source": "crates.io", + }, + { + "name": "serde", + "version": "1.0.0", + "purl": "pkg:cargo/serde@1.0.0", + "type": "library", + "license": "MIT OR Apache-2.0", + "source": "crates.io", + }, + ] + + def test_generate_cyclonedx_structure(self): + """Test that generated CycloneDX has correct structure.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + self.assertEqual(cdx["bomFormat"], "CycloneDX") + self.assertEqual(cdx["specVersion"], "1.6") + self.assertIn("serialNumber", cdx) + self.assertTrue(cdx["serialNumber"].startswith("urn:uuid:")) + self.assertEqual(cdx["version"], 1) + + def test_generate_cyclonedx_metadata(self): + """Test that CycloneDX metadata is correct.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + metadata = cdx["metadata"] + self.assertEqual(metadata["timestamp"], self.timestamp) + self.assertIn("tools", metadata) + self.assertIn("component", metadata) + + root_component = metadata["component"] + self.assertEqual(root_component["name"], "test-component") + self.assertEqual(root_component["version"], "1.0.0") + self.assertEqual(root_component["type"], "application") + + def test_generate_cyclonedx_components(self): + """Test that components are properly added.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + components = cdx["components"] + self.assertEqual(len(components), 2) + + component_names = {c["name"] for c in components} + self.assertEqual(component_names, {"tokio", "serde"}) + + def test_generate_cyclonedx_component_details(self): + """Test that component details are correct.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + tokio = next(c for c in cdx["components"] if c["name"] == "tokio") + + self.assertEqual(tokio["version"], "1.10.0") + self.assertEqual(tokio["type"], "library") + self.assertEqual(tokio["purl"], "pkg:cargo/tokio@1.10.0") + self.assertIn("bom-ref", tokio) + + def test_generate_cyclonedx_licenses(self): + """Test that licenses are properly set.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + tokio = next(c for c in cdx["components"] if c["name"] == "tokio") + + self.assertIn("licenses", tokio) + self.assertEqual(len(tokio["licenses"]), 1) + self.assertEqual(tokio["licenses"][0]["license"]["id"], "MIT") + + def test_generate_cyclonedx_dependencies(self): + """Test that dependencies are created.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + dependencies = cdx["dependencies"] + + # Should have root + 2 component dependency entries + self.assertEqual(len(dependencies), 3) + + # Find root dependency + root_dep = next(d for d in dependencies if d["ref"] == "test-component@1.0.0") + self.assertEqual(len(root_dep["dependsOn"]), 2) + + def test_generate_cyclonedx_external_references(self): + """Test that external references are added for crates.io sources.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + tokio = next(c for c in cdx["components"] if c["name"] == "tokio") + + self.assertIn("externalReferences", tokio) + ext_refs = tokio["externalReferences"] + + distribution_ref = next( + (r for r in ext_refs if r["type"] == "distribution"), None + ) + self.assertIsNotNone(distribution_ref) + self.assertIn("crates.io", distribution_ref["url"]) + + def test_generate_cyclonedx_cratesio_external_ref_from_source_field(self): + """Crates with source=crates.io get a distribution externalReference URL.""" + components = [ + { + "name": "serde", + "version": "1.0.228", + "purl": "pkg:cargo/serde@1.0.228", + "type": "library", + "license": "MIT OR Apache-2.0", + "source": "crates.io", + } + ] + cdx = generate_cyclonedx(components, self.config, self.timestamp) + serde = next(c for c in cdx["components"] if c["name"] == "serde") + ext_refs = serde.get("externalReferences", []) + dist_ref = next((r for r in ext_refs if r["type"] == "distribution"), None) + self.assertIsNotNone(dist_ref, "Expected distribution externalReference for crates.io crate") + self.assertIn("crates.io/crates/serde/1.0.228", dist_ref["url"]) + + def test_generate_cyclonedx_schema_url_uses_https(self): + """Test that $schema URL uses https:// not http://.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + self.assertTrue( + cdx["$schema"].startswith("https://"), + f"$schema should use https://, got: {cdx['$schema']}", + ) + + def test_generate_cyclonedx_with_empty_components(self): + """Test generating CycloneDX with no components.""" + cdx = generate_cyclonedx([], self.config, self.timestamp) + + self.assertEqual(len(cdx["components"]), 0) + self.assertEqual(len(cdx["dependencies"]), 1) # Just root + + def test_generate_cyclonedx_bom_refs_unique(self): + """Test that bom-refs are unique across components.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + bom_refs = [c["bom-ref"] for c in cdx["components"]] + self.assertEqual(len(bom_refs), len(set(bom_refs))) + + +class TestNormalizeSpdxLicenseCdx(unittest.TestCase): + """Verify lowercase operator normalization for CycloneDX formatter.""" + + def test_lowercase_or_normalized(self): + self.assertEqual(_normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT") + + def test_gpl_or_later_not_mangled(self): + self.assertEqual(_normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later") + + def test_lowercase_or_routes_to_expression_field(self): + """'Apache-2.0 or MIT' from dash-license-scan must use expression field, not license.id.""" + config = { + "component_name": "test", + "component_version": "1.0", + "producer_name": "Test", + "namespace": "https://example.com", + } + timestamp = "2024-01-01T00:00:00+00:00" + components = [{"name": "serde", "version": "1.0.228", "purl": "pkg:cargo/serde@1.0.228", + "type": "library", "license": "Apache-2.0 or MIT"}] + cdx = generate_cyclonedx(components, config, timestamp) + serde = next(c for c in cdx["components"] if c["name"] == "serde") + licenses = serde.get("licenses", []) + self.assertEqual(len(licenses), 1) + # Must use 'expression' field with uppercase OR, not 'license.id' + self.assertIn("expression", licenses[0], "compound license must use 'expression' field") + self.assertEqual(licenses[0]["expression"], "Apache-2.0 OR MIT") + self.assertNotIn("license", licenses[0]) + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_generate_crates_metadata_cache.py b/sbom/tests/test_generate_crates_metadata_cache.py new file mode 100644 index 0000000..30bdc3e --- /dev/null +++ b/sbom/tests/test_generate_crates_metadata_cache.py @@ -0,0 +1,398 @@ +"""Tests for generate_crates_metadata_cache.py. + +These tests verify the core parsing and data transformation functions +used to extract Rust crate license metadata via dash-license-scan. +""" + +import json +import os +import tempfile +import unittest + +# The script lives under sbom/scripts/ and is not a regular Python package. +# Import functions by adding the scripts directory to sys.path. +import sys + +sys.path.insert( + 0, + os.path.join(os.path.dirname(__file__), "..", "scripts"), +) + +from generate_crates_metadata_cache import ( + build_dash_coordinates, + generate_synthetic_cargo_lock, + parse_dash_summary, + parse_module_bazel_lock, +) + + +class TestParseDashSummary(unittest.TestCase): + """Tests for parse_dash_summary — the JAR summary CSV parser.""" + + def _write_summary(self, content: str) -> str: + """Helper: write content to a temp file and return its path.""" + fd, path = tempfile.mkstemp(suffix=".txt") + with os.fdopen(fd, "w") as f: + f.write(content) + self.addCleanup(os.unlink, path) + return path + + def test_basic_parsing(self): + """Standard summary lines produce correct crate→license mapping.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/tokio/1.10.0, MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(result["serde"], "Apache-2.0 OR MIT") + self.assertEqual(result["tokio"], "MIT") + + def test_empty_license_skipped(self): + """Entries with empty license expressions are not included.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/unknown-crate/0.1.0, , restricted, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertIn("serde", result) + self.assertNotIn("unknown-crate", result) + + def test_compound_spdx_expression(self): + """Compound SPDX expressions (AND/OR) are preserved.""" + summary = ( + "crate/cratesio/-/ring/0.17.14, " + "Apache-2.0 AND LicenseRef-scancode-iso-8879 AND (GPL-2.0-only AND MIT), " + "restricted, #25641\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertIn("ring", result) + self.assertIn("Apache-2.0", result["ring"]) + + def test_malformed_lines_skipped(self): + """Lines with fewer than 4 comma-separated fields are ignored.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, MIT, approved, clearlydefined\n" + "this is not a valid line\n" + "only, two, parts\n" + "\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(len(result), 1) + self.assertEqual(result["serde"], "MIT") + + def test_non_crate_entries_skipped(self): + """Non-crate entries (pypi, npm, etc.) are ignored.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, MIT, approved, clearlydefined\n" + "pypi/pypi/-/requests/2.31.0, Apache-2.0, approved, clearlydefined\n" + "npm/npmjs/-/express/4.18.2, MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(len(result), 1) + self.assertIn("serde", result) + + def test_empty_file(self): + """An empty summary file produces an empty dict.""" + path = self._write_summary("") + result = parse_dash_summary(path) + self.assertEqual(result, {}) + + def test_restricted_crate_still_gets_license(self): + """Restricted crates still have their license extracted.""" + summary = ( + "crate/cratesio/-/openssl-sys/0.9.104, OpenSSL, restricted, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(result["openssl-sys"], "OpenSSL") + + def test_licenseref_expression(self): + """LicenseRef-* expressions are preserved.""" + summary = ( + "crate/cratesio/-/ring/0.17.14, LicenseRef-ring, restricted, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(result["ring"], "LicenseRef-ring") + + +class TestBuildDashCoordinates(unittest.TestCase): + """Tests for build_dash_coordinates — coordinate string construction.""" + + def test_basic_coordinate_building(self): + """Crate data produces correct coordinate strings.""" + crates = { + "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc123"}, + "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def456"}, + } + coords = build_dash_coordinates(crates) + + self.assertEqual(len(coords), 2) + self.assertIn("crate/cratesio/-/serde/1.0.228", coords) + self.assertIn("crate/cratesio/-/tokio/1.10.0", coords) + + def test_empty_crates(self): + """Empty crates dict produces empty coordinates list.""" + coords = build_dash_coordinates({}) + self.assertEqual(coords, []) + + def test_coordinates_are_sorted(self): + """Coordinates are sorted by crate name.""" + crates = { + "z-crate": {"name": "z-crate", "version": "1.0.0", "checksum": ""}, + "a-crate": {"name": "a-crate", "version": "2.0.0", "checksum": ""}, + } + coords = build_dash_coordinates(crates) + + self.assertEqual(coords[0], "crate/cratesio/-/a-crate/2.0.0") + self.assertEqual(coords[1], "crate/cratesio/-/z-crate/1.0.0") + + def test_hyphenated_crate_name(self): + """Crate names with hyphens are preserved in coordinates.""" + crates = { + "iceoryx2-bb-lock-free": { + "name": "iceoryx2-bb-lock-free", + "version": "0.7.0", + "checksum": "", + }, + } + coords = build_dash_coordinates(crates) + + self.assertEqual( + coords[0], "crate/cratesio/-/iceoryx2-bb-lock-free/0.7.0" + ) + + +class TestParseModuleBazelLock(unittest.TestCase): + """Tests for parse_module_bazel_lock — MODULE.bazel.lock crate extraction.""" + + def _write_lockfile(self, data: dict) -> str: + """Helper: write JSON data to a temp file and return its path.""" + fd, path = tempfile.mkstemp(suffix=".json") + with os.fdopen(fd, "w") as f: + json.dump(data, f) + self.addCleanup(os.unlink, path) + return path + + def test_basic_crate_extraction(self): + """Crates are correctly extracted from generatedRepoSpecs.""" + lockfile = { + "moduleExtensions": { + "@@rules_rust+//crate_universe:extensions.bzl%crate": { + "general": { + "generatedRepoSpecs": { + "crate_index__serde-1.0.228": { + "attributes": {"sha256": "abc123def456"} + }, + "crate_index__tokio-1.10.0": { + "attributes": {"sha256": "789xyz"} + }, + } + } + } + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(len(result), 2) + self.assertEqual(result["serde"]["version"], "1.0.228") + self.assertEqual(result["serde"]["checksum"], "abc123def456") + self.assertEqual(result["tokio"]["version"], "1.10.0") + + def test_crate_index_meta_repo_skipped(self): + """The crate_index meta-repo entry is not treated as a crate.""" + lockfile = { + "moduleExtensions": { + "crate_universe": { + "general": { + "generatedRepoSpecs": { + "crate_index": {"attributes": {}}, + "crate_index__serde-1.0.228": { + "attributes": {"sha256": "abc"} + }, + } + } + } + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(len(result), 1) + self.assertIn("serde", result) + + def test_complex_crate_name(self): + """Crate names with multiple hyphens (e.g. iceoryx2-qnx8) are parsed correctly.""" + lockfile = { + "moduleExtensions": { + "crate": { + "general": { + "generatedRepoSpecs": { + "crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0": { + "attributes": {"sha256": "xyz"} + }, + } + } + } + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(len(result), 1) + self.assertIn("iceoryx2-bb-lock-free-qnx8", result) + self.assertEqual(result["iceoryx2-bb-lock-free-qnx8"]["version"], "0.7.0") + + def test_no_crate_extension(self): + """Lockfile without crate extension returns empty dict.""" + lockfile = { + "moduleExtensions": { + "some_other_extension": {"general": {}} + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(result, {}) + + def test_empty_lockfile(self): + """Lockfile with no moduleExtensions returns empty dict.""" + path = self._write_lockfile({}) + result = parse_module_bazel_lock(path) + self.assertEqual(result, {}) + + +class TestGenerateSyntheticCargoLock(unittest.TestCase): + """Tests for generate_synthetic_cargo_lock.""" + + def test_generates_valid_toml(self): + """Generated Cargo.lock has correct TOML structure.""" + crates = { + "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc"}, + "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def"}, + } + fd, path = tempfile.mkstemp(suffix=".lock") + os.close(fd) + self.addCleanup(os.unlink, path) + + generate_synthetic_cargo_lock(crates, path) + + with open(path) as f: + content = f.read() + + self.assertIn("version = 4", content) + self.assertIn('name = "serde"', content) + self.assertIn('version = "1.0.228"', content) + self.assertIn('name = "tokio"', content) + self.assertIn("[[package]]", content) + self.assertIn("crates.io-index", content) + + def test_entries_are_sorted(self): + """Cargo.lock entries are sorted by crate name.""" + crates = { + "z-crate": {"name": "z-crate", "version": "1.0.0", "checksum": ""}, + "a-crate": {"name": "a-crate", "version": "2.0.0", "checksum": ""}, + } + fd, path = tempfile.mkstemp(suffix=".lock") + os.close(fd) + self.addCleanup(os.unlink, path) + + generate_synthetic_cargo_lock(crates, path) + + with open(path) as f: + content = f.read() + + a_pos = content.index('name = "a-crate"') + z_pos = content.index('name = "z-crate"') + self.assertLess(a_pos, z_pos) + + +class TestEndToEndLicenseExtraction(unittest.TestCase): + """Integration tests verifying the full license extraction pipeline. + + These tests verify that the parse_dash_summary function correctly + handles the output format of the Eclipse dash-licenses JAR, which + is the format that build_dash_coordinates + JAR invocation produces. + """ + + def _write_summary(self, content: str) -> str: + fd, path = tempfile.mkstemp(suffix=".txt") + with os.fdopen(fd, "w") as f: + f.write(content) + self.addCleanup(os.unlink, path) + return path + + def test_coordinates_match_summary_format(self): + """Coordinates built by build_dash_coordinates match the format + that parse_dash_summary expects in the JAR output.""" + crates = { + "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc"}, + "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def"}, + } + + # Build coordinates (what we send to the JAR) + coords = build_dash_coordinates(crates) + self.assertEqual(coords[0], "crate/cratesio/-/serde/1.0.228") + self.assertEqual(coords[1], "crate/cratesio/-/tokio/1.10.0") + + # Simulate JAR summary output (what the JAR would produce) + summary = ( + "crate/cratesio/-/serde/1.0.228, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/tokio/1.10.0, MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + license_map = parse_dash_summary(path) + + # Verify licenses are correctly mapped back to crate names + self.assertEqual(license_map["serde"], "Apache-2.0 OR MIT") + self.assertEqual(license_map["tokio"], "MIT") + + # Verify all crates got licenses + for name in crates: + self.assertIn(name, license_map, f"Missing license for crate: {name}") + + def test_kyron_style_crates(self): + """Verify license extraction works for crates typical in the score_kyron module.""" + crates = { + "proc-macro2": {"name": "proc-macro2", "version": "1.0.92", "checksum": ""}, + "quote": {"name": "quote", "version": "1.0.37", "checksum": ""}, + "syn": {"name": "syn", "version": "2.0.96", "checksum": ""}, + "iceoryx2": {"name": "iceoryx2", "version": "0.7.0", "checksum": ""}, + } + + coords = build_dash_coordinates(crates) + self.assertEqual(len(coords), 4) + + # Simulate JAR output + summary = ( + "crate/cratesio/-/proc-macro2/1.0.92, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/quote/1.0.37, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/syn/2.0.96, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/iceoryx2/0.7.0, Apache-2.0 OR MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + license_map = parse_dash_summary(path) + + # All crates should have licenses + for name in crates: + self.assertIn(name, license_map, f"Missing license for {name}") + self.assertTrue( + license_map[name], f"Empty license for {name}" + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_spdx_formatter.py b/sbom/tests/test_spdx_formatter.py new file mode 100644 index 0000000..7074e97 --- /dev/null +++ b/sbom/tests/test_spdx_formatter.py @@ -0,0 +1,191 @@ +"""Tests for SPDX 2.3 formatter.""" + +import unittest +from datetime import datetime, timezone + +from sbom.internal.generator.spdx_formatter import generate_spdx, _normalize_spdx_license + + +class TestSpdxFormatter(unittest.TestCase): + """Tests for SPDX 2.3 generation.""" + + def setUp(self): + """Set up test fixtures.""" + self.timestamp = datetime( + 2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc + ).isoformat() + self.config = { + "component_name": "test-component", + "component_version": "1.0.0", + "producer_name": "Eclipse Foundation", + "producer_url": "https://eclipse.dev/score", + "namespace": "https://eclipse.dev/score", + } + self.components = [ + { + "name": "tokio", + "version": "1.10.0", + "purl": "pkg:cargo/tokio@1.10.0", + "type": "library", + "license": "MIT", + }, + { + "name": "serde", + "version": "1.0.0", + "purl": "pkg:cargo/serde@1.0.0", + "type": "library", + "license": "MIT OR Apache-2.0", + }, + ] + + def test_generate_spdx_structure(self): + """Test that generated SPDX has correct structure.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + self.assertEqual(spdx["spdxVersion"], "SPDX-2.3") + self.assertEqual(spdx["dataLicense"], "CC0-1.0") + self.assertEqual(spdx["SPDXID"], "SPDXRef-DOCUMENT") + self.assertIn("documentNamespace", spdx) + self.assertIn("packages", spdx) + self.assertIn("relationships", spdx) + + def test_generate_spdx_document_info(self): + """Test that SPDX document has correct metadata.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + self.assertEqual(spdx["name"], "SBOM for test-component") + creation_info = spdx["creationInfo"] + self.assertEqual(creation_info["created"], self.timestamp) + creators = creation_info["creators"] + self.assertIn("Organization: Eclipse Foundation", creators) + self.assertIn("Tool: score-sbom-generator", creators) + + def test_generate_spdx_components(self): + """Test that components are properly added to SPDX.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + packages = spdx["packages"] + # root package + 2 components + self.assertEqual(len(packages), 3) + + def test_generate_spdx_relationships(self): + """Test that dependency relationships are created.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + relationships = spdx["relationships"] + # DESCRIBES + 2 DEPENDS_ON + describes = [r for r in relationships if r["relationshipType"] == "DESCRIBES"] + depends_on = [r for r in relationships if r["relationshipType"] == "DEPENDS_ON"] + + self.assertEqual(len(describes), 1) + self.assertEqual(len(depends_on), 2) + + def test_generate_spdx_with_empty_components(self): + """Test generating SPDX with no components.""" + spdx = generate_spdx([], self.config, self.timestamp) + + packages = spdx["packages"] + # Only root package + self.assertEqual(len(packages), 1) + + def test_generate_spdx_component_purl(self): + """Test that component PURLs are properly set.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + packages = spdx["packages"] + tokio_pkg = next((p for p in packages if p["name"] == "tokio"), None) + + self.assertIsNotNone(tokio_pkg) + ext_refs = tokio_pkg.get("externalRefs", []) + purl_ref = next( + (r for r in ext_refs if r.get("referenceType") == "purl"), + None, + ) + self.assertIsNotNone(purl_ref) + self.assertEqual(purl_ref["referenceLocator"], "pkg:cargo/tokio@1.10.0") + + + def test_generate_spdx_component_checksum(self): + """Test that SHA-256 checksums are emitted when available.""" + components_with_hash = [ + { + "name": "serde", + "version": "1.0.0", + "purl": "pkg:cargo/serde@1.0.0", + "type": "library", + "license": "MIT OR Apache-2.0", + "checksum": "abc123def456abc123def456abc123def456abc123def456abc123def456abcd", + } + ] + spdx = generate_spdx(components_with_hash, self.config, self.timestamp) + + packages = spdx["packages"] + serde_pkg = next((p for p in packages if p["name"] == "serde"), None) + self.assertIsNotNone(serde_pkg) + self.assertIn("checksums", serde_pkg) + self.assertEqual(len(serde_pkg["checksums"]), 1) + self.assertEqual(serde_pkg["checksums"][0]["algorithm"], "SHA256") + self.assertEqual( + serde_pkg["checksums"][0]["checksumValue"], + "abc123def456abc123def456abc123def456abc123def456abc123def456abcd", + ) + + def test_generate_spdx_no_checksum_when_absent(self): + """Test that checksums field is absent when no checksum available.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + packages = spdx["packages"] + tokio_pkg = next((p for p in packages if p["name"] == "tokio"), None) + self.assertIsNotNone(tokio_pkg) + self.assertNotIn("checksums", tokio_pkg) + + +class TestNormalizeSpdxLicense(unittest.TestCase): + """Tests for SPDX boolean operator normalization.""" + + def test_lowercase_or_uppercased(self): + self.assertEqual(_normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT") + + def test_lowercase_and_uppercased(self): + self.assertEqual(_normalize_spdx_license("MIT and Apache-2.0"), "MIT AND Apache-2.0") + + def test_lowercase_with_uppercased(self): + self.assertEqual(_normalize_spdx_license("GPL-2.0 with Classpath-exception-2.0"), "GPL-2.0 WITH Classpath-exception-2.0") + + def test_already_uppercase_unchanged(self): + self.assertEqual(_normalize_spdx_license("Apache-2.0 OR MIT"), "Apache-2.0 OR MIT") + + def test_gpl_or_later_identifier_not_mangled(self): + """GPL-2.0-or-later has '-or-' (hyphen-delimited) — must not be uppercased.""" + self.assertEqual(_normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later") + + def test_mixed_compound_expression(self): + self.assertEqual( + _normalize_spdx_license("(Apache-2.0 or MIT) and Unicode-DFS-2016"), + "(Apache-2.0 OR MIT) AND Unicode-DFS-2016", + ) + + def test_empty_string(self): + self.assertEqual(_normalize_spdx_license(""), "") + + def test_single_license_unchanged(self): + self.assertEqual(_normalize_spdx_license("MIT"), "MIT") + + def test_lowercase_operator_in_spdx_output_end_to_end(self): + """Verify that lowercase 'or' from dash-license-scan is normalized in SPDX output.""" + config = { + "component_name": "test", + "component_version": "1.0", + "producer_name": "Test", + "namespace": "https://example.com", + } + timestamp = "2024-01-01T00:00:00+00:00" + components = [{"name": "serde", "version": "1.0.228", "license": "Apache-2.0 or MIT"}] + spdx = generate_spdx(components, config, timestamp) + serde_pkg = next(p for p in spdx["packages"] if p["name"] == "serde") + self.assertEqual(serde_pkg["licenseConcluded"], "Apache-2.0 OR MIT") + self.assertEqual(serde_pkg["licenseDeclared"], "Apache-2.0 OR MIT") + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_spdx_to_github_snapshot.py b/sbom/tests/test_spdx_to_github_snapshot.py new file mode 100644 index 0000000..de93f6a --- /dev/null +++ b/sbom/tests/test_spdx_to_github_snapshot.py @@ -0,0 +1,189 @@ +"""Tests for SPDX 2.3 → GitHub Dependency Submission snapshot conversion.""" + +import unittest + +from sbom.scripts.spdx_to_github_snapshot import convert_spdx_to_snapshot + + +def _make_spdx( + packages: list[dict], + relationships: list[dict] | None = None, + doc_name: str = "test-sbom", +) -> dict: + return { + "spdxVersion": "SPDX-2.3", + "name": doc_name, + "SPDXID": "SPDXRef-DOCUMENT", + "packages": packages, + "relationships": relationships or [], + } + + +def _cargo_pkg( + spdx_id: str, name: str, version: str, purl: str | None = None +) -> dict: + pkg: dict = { + "SPDXID": spdx_id, + "name": name, + "versionInfo": version, + "downloadLocation": "https://crates.io", + } + if purl: + pkg["externalRefs"] = [ + {"referenceCategory": "PACKAGE-MANAGER", "referenceType": "purl", "referenceLocator": purl} + ] + return pkg + + +class TestConvertSpdxToSnapshot(unittest.TestCase): + + def _base_snapshot(self, spdx: dict, **kwargs) -> dict: + return convert_spdx_to_snapshot( + spdx=spdx, + sha="abc123" * 6 + "ab", # 38 chars, close enough for test + ref="refs/heads/main", + job_correlator="test-workflow_sbom", + job_id="42", + **kwargs, + ) + + def test_snapshot_top_level_fields(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + self.assertEqual(snapshot["version"], 0) + self.assertIn("sha", snapshot) + self.assertIn("ref", snapshot) + self.assertIn("job", snapshot) + self.assertIn("detector", snapshot) + self.assertIn("scanned", snapshot) + self.assertIn("manifests", snapshot) + + def test_detector_fields(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + detector = snapshot["detector"] + self.assertEqual(detector["name"], "score-sbom-generator") + self.assertIn("version", detector) + self.assertIn("url", detector) + + def test_job_correlator(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + self.assertEqual(snapshot["job"]["correlator"], "test-workflow_sbom") + self.assertEqual(snapshot["job"]["id"], "42") + + def test_packages_without_purl_are_excluded(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + no_purl_pkg = _cargo_pkg("SPDXRef-nopurl", "internal-tool", "0.1.0") + spdx = _make_spdx( + packages=[root_pkg, no_purl_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + # no_purl_pkg has no PURL → excluded + self.assertFalse(any("internal-tool" in k for k in resolved)) + + def test_root_package_excluded_from_resolved(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + dep_pkg = _cargo_pkg("SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228") + spdx = _make_spdx( + packages=[root_pkg, dep_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-serde"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + # Root package (myapp) should not appear in resolved + self.assertFalse(any("myapp" in k for k in resolved)) + # Dep package should appear + self.assertTrue(any("serde" in k for k in resolved)) + + def test_direct_vs_indirect_relationship(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + direct_pkg = _cargo_pkg("SPDXRef-tokio", "tokio", "1.0.0", purl="pkg:cargo/tokio@1.0.0") + indirect_pkg = _cargo_pkg("SPDXRef-mio", "mio", "0.8.0", purl="pkg:cargo/mio@0.8.0") + spdx = _make_spdx( + packages=[root_pkg, direct_pkg, indirect_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-tokio"}, + {"spdxElementId": "SPDXRef-tokio", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-mio"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + + tokio_entry = next(v for k, v in resolved.items() if "tokio" in k) + mio_entry = next(v for k, v in resolved.items() if "mio" in k) + + self.assertEqual(tokio_entry["relationship"], "direct") + self.assertEqual(mio_entry["relationship"], "indirect") + + def test_package_url_preserved(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + dep_pkg = _cargo_pkg("SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228") + spdx = _make_spdx( + packages=[root_pkg, dep_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-serde"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + serde_entry = next(v for k, v in resolved.items() if "serde" in k) + self.assertEqual(serde_entry["package_url"], "pkg:cargo/serde@1.0.228") + + def test_manifest_name_from_spdx_document_name(self): + spdx = _make_spdx(packages=[], doc_name="my-sbom-component") + snapshot = self._base_snapshot(spdx) + self.assertIn("my-sbom-component", snapshot["manifests"]) + + def test_empty_spdx_produces_empty_manifest(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + self.assertEqual(manifest["resolved"], {}) + + def test_sha_and_ref_set_correctly(self): + spdx = _make_spdx(packages=[]) + snapshot = convert_spdx_to_snapshot( + spdx=spdx, + sha="deadbeef" * 5, + ref="refs/tags/v1.0.0", + job_correlator="ci_sbom", + job_id="99", + ) + self.assertEqual(snapshot["sha"], "deadbeef" * 5) + self.assertEqual(snapshot["ref"], "refs/tags/v1.0.0") + + def test_generic_purl_included(self): + """pkg:generic/ PURLs (BCR modules) are accepted by GitHub Dependency Graph.""" + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + boost_pkg = _cargo_pkg("SPDXRef-boost", "boost.filesystem", "1.83.0", purl="pkg:generic/boost.filesystem@1.83.0") + spdx = _make_spdx( + packages=[root_pkg, boost_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-boost"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + boost_entry = next((v for k, v in resolved.items() if "boost" in k), None) + self.assertIsNotNone(boost_entry) + self.assertEqual(boost_entry["package_url"], "pkg:generic/boost.filesystem@1.83.0") + + +if __name__ == "__main__": + unittest.main()