Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ load("@score_tooling//:defs.bzl", "cli_helper", "copyright_checker")
load("//:docs.bzl", "docs")

package(default_visibility = ["//visibility:public"])
exports_files(["pyproject.toml"])

copyright_checker(
name = "copyright",
Expand Down
83 changes: 41 additions & 42 deletions docs.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def _missing_requirements(deps):
fail(msg)
fail("This case should be unreachable?!")

def docs(source_dir = "docs", data = [], deps = [], scan_code = []):
def docs(known_good = None, source_dir = "docs", data = [], deps = [], scan_code = []):
"""Creates all targets related to documentation.

By using this function, you'll get any and all updates for documentation targets in one place.
Expand Down Expand Up @@ -176,33 +176,43 @@ def docs(source_dir = "docs", data = [], deps = [], scan_code = []):
data_with_docs_sources = _rewrite_needs_json_to_docs_sources(data)
additional_combo_sourcelinks = _rewrite_needs_json_to_sourcelinks(data)
_merge_sourcelinks(name = "merged_sourcelinks", sourcelinks = [":sourcelinks_json"] + additional_combo_sourcelinks)
docs_data = data + [":sourcelinks_json"]
combo_data = data_with_docs_sources + [":merged_sourcelinks"]

docs_env = {
"SOURCE_DIRECTORY": source_dir,
"DATA": str(data),
"ACTION": "incremental",
"SCORE_SOURCELINKS": "$(location :sourcelinks_json)",
}
docs_sources_env = {
"SOURCE_DIRECTORY": source_dir,
"DATA": str(data_with_docs_sources),
"ACTION": "incremental",
"SCORE_SOURCELINKS": "$(location :merged_sourcelinks)",
}
if known_good:
docs_env["KNOWN_GOOD"] = "$(location "+ known_good + ")"
docs_sources_env["KNOWN_GOOD"] = "$(location "+ known_good + ")"
docs_data.append(known_good)
combo_data.append(known_good)

py_binary(
name = "docs",
tags = ["cli_help=Build documentation:\nbazel run //:docs"],
srcs = ["@score_docs_as_code//src:incremental.py"],
data = data + [":sourcelinks_json"],
data = docs_data,
deps = deps,
env = {
"SOURCE_DIRECTORY": source_dir,
"DATA": str(data),
"ACTION": "incremental",
"SCORE_SOURCELINKS": "$(location :sourcelinks_json)",
},
env = docs_env,
)

py_binary(
name = "docs_combo_experimental",
tags = ["cli_help=Build full documentation with all dependencies:\nbazel run //:docs_combo_experimental"],
srcs = ["@score_docs_as_code//src:incremental.py"],
data = data_with_docs_sources + [":merged_sourcelinks"],
data = combo_data,
deps = deps,
env = {
"SOURCE_DIRECTORY": source_dir,
"DATA": str(data_with_docs_sources),
"ACTION": "incremental",
"SCORE_SOURCELINKS": "$(location :merged_sourcelinks)",
},
env = docs_sources_env
)

py_binary(
Expand All @@ -222,42 +232,27 @@ def docs(source_dir = "docs", data = [], deps = [], scan_code = []):
name = "docs_check",
tags = ["cli_help=Verify documentation:\nbazel run //:docs_check"],
srcs = ["@score_docs_as_code//src:incremental.py"],
data = data + [":sourcelinks_json"],
data = docs_data,
deps = deps,
env = {
"SOURCE_DIRECTORY": source_dir,
"DATA": str(data),
"ACTION": "check",
"SCORE_SOURCELINKS": "$(location :sourcelinks_json)",
},
env = docs_env
)

py_binary(
name = "live_preview",
tags = ["cli_help=Live preview documentation in the browser:\nbazel run //:live_preview"],
srcs = ["@score_docs_as_code//src:incremental.py"],
data = data + [":sourcelinks_json"],
data = docs_data,
deps = deps,
env = {
"SOURCE_DIRECTORY": source_dir,
"DATA": str(data),
"ACTION": "live_preview",
"SCORE_SOURCELINKS": "$(location :sourcelinks_json)",
},
env = docs_env
)

py_binary(
name = "live_preview_combo_experimental",
tags = ["cli_help=Live preview full documentation with all dependencies in the browser:\nbazel run //:live_preview_combo_experimental"],
srcs = ["@score_docs_as_code//src:incremental.py"],
data = data_with_docs_sources + [":merged_sourcelinks"],
data = combo_data,
deps = deps,
env = {
"SOURCE_DIRECTORY": source_dir,
"DATA": str(data_with_docs_sources),
"ACTION": "live_preview",
"SCORE_SOURCELINKS": "$(location :merged_sourcelinks)",
},
env = docs_sources_env
)

score_virtualenv(
Expand Down Expand Up @@ -309,15 +304,19 @@ def _sourcelinks_json(name, srcs):
"""
output_file = name + ".json"

native.genrule(
name = name,
srcs = srcs,
outs = [output_file],
cmd = """
cmd = """
$(location @score_docs_as_code//scripts_bazel:generate_sourcelinks) \
--output $@ \
$(SRCS)
""",
"""

rule_srcs = srcs

native.genrule(
name = name,
srcs = rule_srcs,
outs = [output_file],
cmd = cmd,
tools = ["@score_docs_as_code//scripts_bazel:generate_sourcelinks"],
visibility = ["//visibility:public"],
)
44 changes: 44 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,47 @@ extend-exclude = [
"bazel-*",
".venv*/**",
]
[tool.pytest.ini_options]
log_cli = true
log_cli_level = "Debug"
log_cli_format = "[%(asctime)s.%(msecs)03d] [%(levelname)-3s] [%(name)s] %(message)s"
log_cli_date_format = "%Y-%m-%d %H:%M:%S"

log_format = "[%(asctime)s.%(msecs)03d] [%(levelname)-3s] [%(name)s] %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S"

log_file_level = "Debug"
log_file_format = "[%(asctime)s.%(msecs)03d] [%(levelname)-3s] [%(name)s] %(message)s"
log_file_date_format = "%Y-%m-%d %H:%M:%S"

markers = [
"metadata",
"test_properties(dict): Add custom properties to test XML output",
]

norecursedirs = [
".*", # hidden folders like .git, .venv, .cache, etc.
"_build*", # common docs-as-code directory
"bazel-*", # Bazel output folders
]

junit_duration_report = "call"
junit_family = "xunit1"

filterwarnings = [
"ignore::pytest.PytestExperimentalApiWarning",
# Silence third-party deprecations from sphinx_needs targeting Python 3.14 removals.
# We'll drop these ignores once sphinx_needs releases a fix.
"ignore:.*deprecated.*Python 3\\.14.*:DeprecationWarning:sphinx_needs\\..*",
# Docutils is deprecating OptionParser in favor of argparse (0.21+).
# This one originates inside sphinx_needs.layout.
# We'll drop these ignores once sphinx/sphinx_needs releases a fix.
"ignore:^The frontend\\.OptionParser class will be replaced by a subclass of argparse\\.ArgumentParser in Docutils 0\\.21 or later\\.:DeprecationWarning:sphinx_needs\\.layout",
# This one bubbles up from stdlib optparse but is *explicitly* a Docutils message.
# We match the full message to avoid silencing unrelated optparse warnings.
# We'll drop these ignores once sphinx/sphinx_needs releases a fix.
"ignore:^The frontend\\.Option class will be removed in Docutils 0\\.21 or later\\.:DeprecationWarning:optparse",
]
pythonpath = [
"src/extensions/",
]
18 changes: 14 additions & 4 deletions scripts_bazel/generate_sourcelinks_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from src.extensions.score_source_code_linker.needlinks import (
store_source_code_links_json,
)
from src.helper_lib import get_runfiles_dir, parse_filename

logging.basicConfig(level=logging.INFO, format="%(message)s")
logger = logging.getLogger(__name__)
Expand All @@ -53,11 +54,20 @@ def main():
args = parser.parse_args()

all_need_references = []
for file_path in args.files:
abs_file_path = file_path.resolve()
assert abs_file_path.exists(), abs_file_path

all_files = args.files

runfiles_dir = get_runfiles_dir()
for raw_file_path in all_files:
assert raw_file_path.exists(), raw_file_path
prefix, module_name, file_path, file_name = parse_filename(
raw_file_path, runfiles_dir
)
references = _extract_references_from_file(
abs_file_path.parent, Path(abs_file_path.name)
prefix=prefix,
file_name=file_name,
file_path=Path(file_path),
module_name=module_name,
)
all_need_references.extend(references)

Expand Down
2 changes: 2 additions & 0 deletions scripts_bazel/tests/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ score_py_pytest(
"//scripts_bazel:generate_sourcelinks",
"//src/extensions/score_source_code_linker",
] + all_requirements,
pytest_config = "//:pyproject.toml",
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe also move this to different PR?

Had to enable pyrproject.toml as pytest.ini from score_pytest made it impossible to tests in ref-integration.

Though 'strictly' speaking has nothing to do with this PR

)

score_py_pytest(
Expand All @@ -29,4 +30,5 @@ score_py_pytest(
deps = [
"//scripts_bazel:merge_sourcelinks",
] + all_requirements,
pytest_config = "//:pyproject.toml",
)
1 change: 1 addition & 0 deletions src/extensions/score_metamodel/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -66,4 +66,5 @@ score_py_pytest(
],
) + ["tests/rst/conf.py"],
deps = [":score_metamodel"],
pytest_config = "//:pyproject.toml"
)
1 change: 1 addition & 0 deletions src/extensions/score_source_code_linker/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -75,4 +75,5 @@ score_py_pytest(
":score_source_code_linker",
"//src/extensions/score_metamodel",
],
pytest_config = "//:pyproject.toml",
)
31 changes: 25 additions & 6 deletions src/extensions/score_source_code_linker/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,10 @@
find_git_root,
find_ws_root,
)
from src.helper_lib.additional_functions import get_github_link
from src.helper_lib.additional_functions import (
get_github_link,
get_module_has_from_known_good_json,
)

LOGGER = get_logger(__name__)
# Uncomment this to enable more verbose logging
Expand Down Expand Up @@ -355,20 +358,35 @@ def inject_links_into_needs(app: Sphinx, env: BuildEnvironment) -> None:
source_code_links_by_need = load_source_code_links_combined_json(
get_cache_filename(app.outdir, "score_scl_grouped_cache.json")
)

if known_good_path := os.getenv("KNOWN_GOOD"):
module_hash_mapping = get_module_has_from_known_good_json(Path(known_good_path))
else:
module_hash_mapping = None
for source_code_links in source_code_links_by_need:
need = find_need(needs_copy, source_code_links.need)
if need is None:
# TODO: print github annotations as in https://github.com/eclipse-score/bazel_registry/blob/7423b9996a45dd0a9ec868e06a970330ee71cf4f/tools/verify_semver_compatibility_level.py#L126-L129
for n in source_code_links.links.CodeLinks:
full_file_path = (
f"{n.module}/{n.path}/{n.file}"
if n.module
else f"{n.path}/{n.file}"
)
LOGGER.warning(
f"{n.file}:{n.line}: Could not find {source_code_links.need} "
f"{full_file_path}:{n.line}: "
f"Could not find {source_code_links.need} "
"in documentation [CODE LINK]",
type="score_source_code_linker",
)
for n in source_code_links.links.TestLinks:
full_file_path = (
f"{n.module}/{n.path}/{n.file}"
if n.module
else f"{n.path}/{n.file}"
)
LOGGER.warning(
f"{n.file}:{n.line}: Could not find {source_code_links.need} "
f"{full_file_path}:{n.line}: "
f"Could not find {source_code_links.need} "
"in documentation [TEST LINK]",
type="score_source_code_linker",
)
Expand All @@ -377,11 +395,12 @@ def inject_links_into_needs(app: Sphinx, env: BuildEnvironment) -> None:
need_as_dict = cast(dict[str, object], need)

need_as_dict["source_code_link"] = ", ".join(
f"{get_github_link(n)}<>{n.file}:{n.line}"
f"{get_github_link(n, module_hash_mapping)}<>{n.file}:{n.line}"
for n in source_code_links.links.CodeLinks
)
need_as_dict["testlink"] = ", ".join(
f"{get_github_link(n)}<>{n.name}" for n in source_code_links.links.TestLinks
f"{get_github_link(n, module_hash_mapping)}<>{n.name}"
for n in source_code_links.links.TestLinks
)

# NOTE: Removing & adding the need is important to make sure
Expand Down
Loading
Loading