From faaf654b99583421c1d7f521129bb52d37a5361d Mon Sep 17 00:00:00 2001 From: Ian Pascoe Date: Sun, 1 Mar 2026 08:17:21 -0500 Subject: [PATCH 1/6] feat(cli): add bundle workflows and dependency explainability --- AGENTS.md | 1 + Cargo.lock | 2 + README.md | 21 +- crates/crosspack-cli/Cargo.toml | 2 + crates/crosspack-cli/src/bundle_flows.rs | 485 ++++++++ crates/crosspack-cli/src/command_flows.rs | 583 ++++++++- crates/crosspack-cli/src/core_flows.rs | 267 ++++- crates/crosspack-cli/src/dispatch.rs | 58 +- crates/crosspack-cli/src/main.rs | 71 +- crates/crosspack-cli/src/metadata.rs | 16 +- crates/crosspack-cli/src/tests.rs | 1303 +++++++++++++++++++-- crates/crosspack-core/src/lib.rs | 2 +- crates/crosspack-core/src/manifest.rs | 15 + crates/crosspack-core/src/tests.rs | 51 + docs/architecture.md | 9 +- docs/manifest-spec.md | 11 + 16 files changed, 2745 insertions(+), 152 deletions(-) create mode 100644 crates/crosspack-cli/src/bundle_flows.rs diff --git a/AGENTS.md b/AGENTS.md index 54171b2..9e1d45a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -74,3 +74,4 @@ scripts/validate-snapshot-flow.sh ## USER PREFERENCES - Write planning/design documents to `.agents/plans/` instead of `docs/plans/`. +- Avoid Homebrew-specific terminology (`tap`, `cask`) in Crosspack UX; prefer Crosspack-native naming. diff --git a/Cargo.lock b/Cargo.lock index 9a12f5a..647c892 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -226,7 +226,9 @@ dependencies = [ "indicatif", "reqwest", "semver", + "serde", "serde_json", + "toml", ] [[package]] diff --git a/README.md b/README.md index b0e6d27..c08826c 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,8 @@ Anything described as v0.4/v0.5 in docs is roadmap design work and is **not** pa ## Current capabilities - Search and inspect package metadata from verified local snapshots. +- Explain installed dependency relationships with `depends`, `uses`, and `why`. +- Audit available upgrades with `outdated`. - Configure multiple registry sources with deterministic precedence. - Install packages with transitive dependency resolution and target selection. - Install package-declared shell completion files (bash/zsh/fish/powershell) into Crosspack-managed completion directories. @@ -43,6 +45,7 @@ Anything described as v0.4/v0.5 in docs is roadmap design work and is **not** pa - Enforce per-package version pins. - Upgrade single packages or all installed roots. - Uninstall with dependency-aware blocking and orphan pruning. +- Manage artifact cache lifecycle with `cache list`, `cache gc`, and `cache prune`. - Recover transaction state with `rollback`, `repair`, and `doctor`. ## Prerequisites @@ -175,11 +178,25 @@ cargo run -p crosspack-cli -- --registry-root /path/to/registry install ripgrep |---|---| | `search ` | Search package names. | | `info ` | Show versions and policy metadata for a package. | -| `install [--target ] [--dry-run] [--force-redownload] [--provider ]` | Resolve and install a package graph. `--dry-run` prints a deterministic transaction preview without mutating state. | -| `upgrade [name[@constraint]] [--dry-run] [--provider ]` | Upgrade one package or all installed root packages. `--dry-run` prints a deterministic transaction preview without mutating state. | +| `install [--target ] [--dry-run] [--explain] [--build-from-source] [--force-redownload] [--provider ]` | Resolve and install a package graph. `--dry-run` prints deterministic transaction preview lines; `--explain` adds deterministic policy explainability lines in dry-run mode only. `--build-from-source` is currently a guarded non-GA flag and fails closed. | +| `upgrade [name[@constraint]] [--dry-run] [--explain] [--provider ]` | Upgrade one package or all installed root packages. `--dry-run` prints deterministic transaction preview lines; `--explain` adds deterministic policy explainability lines in dry-run mode only. | | `pin ` | Pin a package version constraint. | +| `outdated` | Show installed packages with newer versions available in configured metadata snapshots. | +| `depends ` | Show recorded dependency names for an installed package. | +| `uses ` | Show installed packages that currently depend on a package. | +| `why ` | Explain why an installed package exists by showing a root dependency path when applicable. | +| `bundle export [--output ]` | Export a deterministic environment bundle from installed roots and pins. | +| `bundle apply [--file ] [--dry-run] [--explain] [--build-from-source] [--force-redownload] [--provider ]` | Apply a bundle as install roots. `--dry-run` preserves transaction preview contracts; `--explain` is additive in dry-run mode only. `--build-from-source` is currently a guarded non-GA flag and fails closed. | | `uninstall ` | Remove a package when not required by remaining roots and prune orphan dependencies. | | `list` | List installed packages. | +| `services list` | List managed service states for installed packages with Crosspack service-state records. | +| `services status ` | Show managed service state (`running`/`stopped`) for an installed package. | +| `services start ` | Set managed service state to `running` for an installed package. | +| `services stop ` | Set managed service state to `stopped` for an installed package. | +| `services restart ` | Set managed service state to `running` for an installed package. | +| `cache list` | List cached artifact files and sizes. | +| `cache gc` | Remove unreferenced artifact cache files while retaining receipt-referenced files. | +| `cache prune` | Remove all artifact cache files. | | `registry add --kind --priority --fingerprint <64-hex>` | Add a trusted source. | | `registry list` | List configured sources and snapshot state. | | `registry remove [--purge-cache]` | Remove a source and optionally purge cached snapshots. | diff --git a/crates/crosspack-cli/Cargo.toml b/crates/crosspack-cli/Cargo.toml index b5085f5..8c0fd3b 100644 --- a/crates/crosspack-cli/Cargo.toml +++ b/crates/crosspack-cli/Cargo.toml @@ -26,6 +26,8 @@ indicatif.workspace = true semver.workspace = true serde_json.workspace = true reqwest.workspace = true +serde.workspace = true +toml.workspace = true [dev-dependencies] ed25519-dalek.workspace = true diff --git a/crates/crosspack-cli/src/bundle_flows.rs b/crates/crosspack-cli/src/bundle_flows.rs new file mode 100644 index 0000000..36a3a79 --- /dev/null +++ b/crates/crosspack-cli/src/bundle_flows.rs @@ -0,0 +1,485 @@ +const BUNDLE_FORMAT_MARKER: &str = "crosspack.bundle"; +const BUNDLE_FORMAT_VERSION: u32 = 1; +const DEFAULT_BUNDLE_FILE: &str = "crosspack.bundle.toml"; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(deny_unknown_fields)] +struct BundleDocument { + format: String, + version: u32, + #[serde(default)] + roots: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + snapshot_context: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(deny_unknown_fields)] +struct BundleRoot { + name: String, + #[serde(skip_serializing_if = "Option::is_none")] + target: Option, + #[serde(skip_serializing_if = "Option::is_none")] + requirement: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(deny_unknown_fields)] +struct BundleSnapshotContext { + sources: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(deny_unknown_fields)] +struct BundleSnapshotSource { + name: String, + enabled: bool, + snapshot: String, +} + +#[derive(Debug, Clone)] +struct BundleApplyGroupPlan { + target: Option, + roots: Vec, + root_names: Vec, + resolved: Vec, +} + +#[derive(Debug, Clone)] +struct BundleApplyOptions<'a> { + file: Option<&'a Path>, + dry_run: bool, + explain: bool, + build_from_source: bool, + force_redownload: bool, + provider_values: &'a [String], +} + +fn default_bundle_file_path() -> PathBuf { + PathBuf::from(DEFAULT_BUNDLE_FILE) +} + +fn run_bundle_command( + layout: &PrefixLayout, + registry_root: Option<&Path>, + command: BundleCommands, +) -> Result<()> { + match command { + BundleCommands::Export { output } => run_bundle_export_command(layout, output.as_deref()), + BundleCommands::Apply { + file, + dry_run, + explain, + build_from_source, + force_redownload, + provider, + } => run_bundle_apply_command( + layout, + registry_root, + BundleApplyOptions { + file: file.as_deref(), + dry_run, + explain, + build_from_source, + force_redownload, + provider_values: &provider, + }, + ), + } +} + +fn run_bundle_export_command(layout: &PrefixLayout, output: Option<&Path>) -> Result<()> { + layout.ensure_base_dirs()?; + let bundle = build_export_bundle_document(layout)?; + let rendered = render_bundle_document(&bundle)?; + + match output { + Some(path) => { + if let Some(parent) = path + .parent() + .filter(|parent| !parent.as_os_str().is_empty()) + { + fs::create_dir_all(parent).with_context(|| { + format!( + "failed creating bundle output directory: {}", + parent.display() + ) + })?; + } + fs::write(path, rendered) + .with_context(|| format!("failed writing bundle file: {}", path.display()))?; + println!("bundle exported: {}", path.display()); + } + None => { + print!("{rendered}"); + } + } + + Ok(()) +} + +fn run_bundle_apply_command( + layout: &PrefixLayout, + registry_root: Option<&Path>, + options: BundleApplyOptions<'_>, +) -> Result<()> { + ensure_explain_requires_dry_run("bundle apply", options.dry_run, options.explain)?; + ensure_build_from_source_not_supported("bundle apply", options.build_from_source)?; + layout.ensure_base_dirs()?; + ensure_no_active_transaction_for(layout, "bundle apply")?; + let provider_overrides = parse_provider_overrides(options.provider_values)?; + + let bundle_path = options + .file + .map(Path::to_path_buf) + .unwrap_or_else(default_bundle_file_path); + let bundle = load_bundle_document_from_path(&bundle_path)?; + + let backend = select_metadata_backend(registry_root, layout)?; + let group_plans = build_bundle_apply_group_plans( + layout, + &backend, + &bundle, + &provider_overrides, + options.build_from_source, + )?; + let receipts = read_install_receipts(layout)?; + for plan in &group_plans { + for package in &plan.resolved { + validate_install_preflight_for_resolved(layout, package, &receipts)?; + } + } + + let mut planned_changes = Vec::new(); + for plan in &group_plans { + planned_changes.extend(build_planned_package_changes(&plan.resolved, &receipts)?); + } + + if options.dry_run { + let preview = build_transaction_preview("bundle-apply", &planned_changes); + let mut explainability = DependencyPolicyExplainability::default(); + if options.explain { + for plan in &group_plans { + merge_dependency_policy_explainability( + &mut explainability, + build_dependency_policy_explainability(&plan.resolved, &receipts, &plan.roots)?, + ); + } + } + for line in render_dry_run_output_lines( + &preview, + TransactionPreviewMode::DryRun, + options.explain.then_some(&explainability), + ) { + println!("{line}"); + } + return Ok(()); + } + + let output_style = current_output_style(); + let install_progress_mode = current_install_progress_mode(output_style); + let snapshot_id = match registry_root { + Some(_) => None, + None => Some(resolve_transaction_snapshot_id(layout, "bundle-apply")?), + }; + + execute_with_transaction(layout, "bundle-apply", snapshot_id.as_deref(), |tx| { + let interaction_policy = InstallInteractionPolicy::default(); + let mut journal_seq = 1_u64; + for plan in &group_plans { + append_transaction_journal_entry( + layout, + &tx.txid, + &TransactionJournalEntry { + seq: journal_seq, + step: format!("resolve_plan:{}", plan.target.as_deref().unwrap_or("host")), + state: "done".to_string(), + path: plan.target.clone(), + }, + )?; + journal_seq += 1; + + let planned_dependency_overrides = build_planned_dependency_overrides(&plan.resolved); + for package in &plan.resolved { + let snapshot_path = + capture_package_state_snapshot(layout, &tx.txid, &package.manifest.name)?; + append_transaction_journal_entry( + layout, + &tx.txid, + &TransactionJournalEntry { + seq: journal_seq, + step: format!("backup_package_state:{}", package.manifest.name), + state: "done".to_string(), + path: Some(snapshot_path.display().to_string()), + }, + )?; + journal_seq += 1; + + append_transaction_journal_entry( + layout, + &tx.txid, + &TransactionJournalEntry { + seq: journal_seq, + step: package_apply_step_name( + "install", + &package.manifest.name, + install_mode_for_archive_type(package.archive_type), + ), + state: "done".to_string(), + path: Some(package.manifest.name.clone()), + }, + )?; + journal_seq += 1; + + let dependencies = build_dependency_receipts(package, &plan.resolved); + let outcome = install_resolved( + layout, + package, + &dependencies, + &plan.root_names, + &planned_dependency_overrides, + InstallResolvedOptions { + snapshot_id: snapshot_id.as_deref(), + force_redownload: options.force_redownload, + interaction_policy, + install_progress_mode, + }, + )?; + print_install_outcome(&outcome, output_style); + } + } + + append_transaction_journal_entry( + layout, + &tx.txid, + &TransactionJournalEntry { + seq: journal_seq, + step: "apply_complete".to_string(), + state: "done".to_string(), + path: None, + }, + )?; + + Ok(()) + })?; + + if let Err(err) = sync_completion_assets_best_effort(layout, "bundle-apply") { + eprintln!("{err}"); + } + + Ok(()) +} + +fn build_export_bundle_document(layout: &PrefixLayout) -> Result { + let receipts = read_install_receipts(layout)?; + let pins = read_all_pins(layout)?; + + let mut roots = receipts + .into_iter() + .filter(|receipt| receipt.install_reason == InstallReason::Root) + .map(|receipt| BundleRoot { + requirement: pins + .get(&receipt.name) + .cloned() + .or_else(|| Some(format!("={}", receipt.version))), + name: receipt.name, + target: receipt.target, + }) + .collect::>(); + roots.sort_by(|left, right| { + left.name + .cmp(&right.name) + .then_with(|| left.target.cmp(&right.target)) + .then_with(|| left.requirement.cmp(&right.requirement)) + }); + + let snapshot_context = load_bundle_snapshot_context_best_effort(layout); + + Ok(BundleDocument { + format: BUNDLE_FORMAT_MARKER.to_string(), + version: BUNDLE_FORMAT_VERSION, + roots, + snapshot_context, + }) +} + +fn render_bundle_document(bundle: &BundleDocument) -> Result { + let mut rendered = toml::to_string_pretty(bundle).context("failed rendering bundle as TOML")?; + if !rendered.ends_with('\n') { + rendered.push('\n'); + } + Ok(rendered) +} + +fn load_bundle_snapshot_context_best_effort( + layout: &PrefixLayout, +) -> Option { + let source_state_root = registry_state_root(layout); + let store = RegistrySourceStore::new(&source_state_root); + let mut sources = match store.list_sources_with_snapshot_state() { + Ok(sources) => sources, + Err(_) => return None, + }; + if sources.is_empty() { + return None; + } + + sources.sort_by(|left, right| left.source.name.cmp(&right.source.name)); + let mapped = sources + .into_iter() + .map(|source| BundleSnapshotSource { + name: source.source.name, + enabled: source.source.enabled, + snapshot: bundle_snapshot_token(&source.snapshot), + }) + .collect::>(); + Some(BundleSnapshotContext { sources: mapped }) +} + +fn bundle_snapshot_token(snapshot: &RegistrySourceSnapshotState) -> String { + match snapshot { + RegistrySourceSnapshotState::Ready { snapshot_id } => format!("ready:{snapshot_id}"), + RegistrySourceSnapshotState::None => "none".to_string(), + RegistrySourceSnapshotState::Error { reason_code, .. } => format!("error:{reason_code}"), + } +} + +fn parse_bundle_document(raw: &str) -> Result { + let bundle: BundleDocument = toml::from_str(raw).context("failed parsing bundle TOML")?; + validate_bundle_document(&bundle)?; + Ok(bundle) +} + +fn load_bundle_document_from_path(path: &Path) -> Result { + let raw = match fs::read_to_string(path) { + Ok(raw) => raw, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + return Err(anyhow!( + "bundle file not found: {} (use --file or create {})", + path.display(), + DEFAULT_BUNDLE_FILE + )); + } + Err(err) => { + return Err(err) + .with_context(|| format!("failed reading bundle file: {}", path.display())); + } + }; + parse_bundle_document(&raw).with_context(|| format!("invalid bundle file: {}", path.display())) +} + +fn validate_bundle_document(bundle: &BundleDocument) -> Result<()> { + if bundle.format != BUNDLE_FORMAT_MARKER { + return Err(anyhow!( + "unsupported bundle format marker '{}': expected '{}'", + bundle.format, + BUNDLE_FORMAT_MARKER + )); + } + if bundle.version != BUNDLE_FORMAT_VERSION { + return Err(anyhow!( + "unsupported bundle format version '{}': expected {}", + bundle.version, + BUNDLE_FORMAT_VERSION + )); + } + if bundle.roots.is_empty() { + return Err(anyhow!("bundle must include at least one root package")); + } + + let mut seen = BTreeSet::new(); + for root in &bundle.roots { + if !is_policy_token(&root.name) { + return Err(anyhow!( + "invalid bundle root package '{}': expected package-name grammar", + root.name + )); + } + if let Some(requirement) = &root.requirement { + VersionReq::parse(requirement).with_context(|| { + format!( + "invalid bundle requirement for '{}' in bundle: {}", + root.name, requirement + ) + })?; + } + + if !seen.insert((root.name.clone(), root.target.clone())) { + return Err(anyhow!( + "duplicate bundle root entry for package '{}' target '{}'", + root.name, + root.target.as_deref().unwrap_or("host") + )); + } + } + + Ok(()) +} + +fn build_bundle_apply_group_plans( + layout: &PrefixLayout, + backend: &MetadataBackend, + bundle: &BundleDocument, + provider_overrides: &BTreeMap, + build_from_source: bool, +) -> Result> { + let mut grouped_roots = BTreeMap::, Vec>::new(); + for root in &bundle.roots { + let requirement = root.requirement.as_deref().unwrap_or("*"); + grouped_roots + .entry(root.target.clone()) + .or_default() + .push(RootInstallRequest { + name: root.name.clone(), + requirement: VersionReq::parse(requirement).with_context(|| { + format!( + "invalid bundle requirement for '{}' in bundle: {}", + root.name, requirement + ) + })?, + }); + } + + let mut plans = Vec::new(); + let mut resolved_dependency_tokens = HashSet::new(); + for (target, mut roots) in grouped_roots { + roots.sort_by(|left, right| left.name.cmp(&right.name)); + let root_names = roots + .iter() + .map(|root| root.name.clone()) + .collect::>(); + let (resolved, plan_tokens) = resolve_install_graph_with_tokens( + layout, + backend, + &roots, + target.as_deref(), + provider_overrides, + false, + build_from_source, + )?; + resolved_dependency_tokens.extend(plan_tokens); + plans.push(BundleApplyGroupPlan { + target, + roots, + root_names, + resolved, + }); + } + + validate_provider_overrides_used(provider_overrides, &resolved_dependency_tokens)?; + let overlap_check = plans + .iter() + .map(|plan| { + ( + plan.target.as_deref(), + plan.resolved + .iter() + .map(|package| package.manifest.name.clone()) + .collect::>(), + ) + }) + .collect::>(); + enforce_disjoint_multi_target_upgrade(&overlap_check)?; + plans.sort_by(|left, right| left.target.cmp(&right.target)); + Ok(plans) +} diff --git a/crates/crosspack-cli/src/command_flows.rs b/crates/crosspack-cli/src/command_flows.rs index cd04873..9d699ab 100644 --- a/crates/crosspack-cli/src/command_flows.rs +++ b/crates/crosspack-cli/src/command_flows.rs @@ -3,14 +3,570 @@ fn ensure_upgrade_command_ready(layout: &PrefixLayout) -> Result<()> { ensure_no_active_transaction_for(layout, "upgrade") } +fn run_outdated_command(layout: &PrefixLayout, registry_root: Option<&Path>) -> Result<()> { + let backend = select_metadata_backend(registry_root, layout)?; + let receipts = read_install_receipts(layout)?; + if receipts.is_empty() { + println!("No installed packages"); + return Ok(()); + } + + let mut rows = Vec::new(); + for receipt in receipts { + let installed_version = match Version::parse(&receipt.version) { + Ok(version) => version, + Err(_) => { + rows.push(format!( + "{}\t{}\tunknown\tinvalid-installed-version", + receipt.name, receipt.version + )); + continue; + } + }; + + let Some((source, manifests)) = backend.package_versions_with_source(&receipt.name)? else { + continue; + }; + let Some(latest) = manifests.first() else { + continue; + }; + + if latest.version > installed_version { + rows.push(format!( + "{}\t{}\t{}\t{}", + receipt.name, receipt.version, latest.version, source + )); + } + } + + rows.sort(); + if rows.is_empty() { + println!("All installed packages are up to date"); + return Ok(()); + } + + println!("name\tinstalled\tlatest\tsource"); + for row in rows { + println!("{row}"); + } + Ok(()) +} + +fn parse_receipt_dependency_name(entry: &str) -> Option<&str> { + entry.split_once('@').map(|(name, _)| name) +} + +fn run_depends_command(layout: &PrefixLayout, name: &str) -> Result<()> { + let receipts = read_install_receipts(layout)?; + let Some(target) = receipts.iter().find(|receipt| receipt.name == name) else { + println!("No installed package found: {name}"); + return Ok(()); + }; + + let mut deps = target + .dependencies + .iter() + .filter_map(|entry| parse_receipt_dependency_name(entry)) + .map(ToString::to_string) + .collect::>(); + deps.sort(); + deps.dedup(); + + if deps.is_empty() { + println!("{name} has no recorded dependencies"); + return Ok(()); + } + + println!("{name} dependency_count={}", deps.len()); + for dependency in deps { + println!("dependency {dependency}"); + } + Ok(()) +} + +fn run_uses_command(layout: &PrefixLayout, name: &str) -> Result<()> { + let receipts = read_install_receipts(layout)?; + let mut users = Vec::new(); + for receipt in receipts { + if receipt + .dependencies + .iter() + .filter_map(|entry| parse_receipt_dependency_name(entry)) + .any(|dependency_name| dependency_name == name) + { + users.push(receipt.name); + } + } + + users.sort(); + users.dedup(); + + if users.is_empty() { + println!("{name} is not required by any installed package"); + return Ok(()); + } + + println!("{name} reverse_dependency_count={}", users.len()); + for user in users { + println!("required_by {user}"); + } + Ok(()) +} + +fn run_why_command(layout: &PrefixLayout, name: &str) -> Result<()> { + let receipts = read_install_receipts(layout)?; + let receipt_map = receipts + .iter() + .map(|receipt| (receipt.name.clone(), receipt)) + .collect::>(); + let Some(target) = receipt_map.get(name) else { + println!("No installed package found: {name}"); + return Ok(()); + }; + + if target.install_reason == InstallReason::Root { + println!("{name} is installed as a root package"); + return Ok(()); + } + + let mut roots = receipts + .iter() + .filter(|receipt| receipt.install_reason == InstallReason::Root) + .map(|receipt| receipt.name.clone()) + .collect::>(); + roots.sort(); + + if let Some(path) = find_dependency_path_from_roots(name, &roots, &receipt_map) { + println!("dependency path: {}", path.join(" -> ")); + return Ok(()); + } + + println!("no root dependency path found for {name}"); + Ok(()) +} + +fn find_dependency_path_from_roots( + target: &str, + roots: &[String], + receipt_map: &HashMap, +) -> Option> { + let mut queue = std::collections::VecDeque::new(); + for root in roots { + queue.push_back(vec![root.clone()]); + } + + let mut visited = HashSet::new(); + while let Some(path) = queue.pop_front() { + let current = path.last()?.clone(); + if current == target { + return Some(path); + } + if !visited.insert(current.clone()) { + continue; + } + + let Some(receipt) = receipt_map.get(¤t) else { + continue; + }; + let mut dependencies = receipt + .dependencies + .iter() + .filter_map(|entry| parse_receipt_dependency_name(entry)) + .map(ToString::to_string) + .collect::>(); + dependencies.sort(); + dependencies.dedup(); + + for dependency in dependencies { + let mut next_path = path.clone(); + next_path.push(dependency); + queue.push_back(next_path); + } + } + + None +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +enum ManagedServiceState { + Stopped, + Running, +} + +impl ManagedServiceState { + fn as_str(self) -> &'static str { + match self { + Self::Stopped => "stopped", + Self::Running => "running", + } + } + + fn from_str(raw: &str) -> Option { + match raw { + "stopped" => Some(Self::Stopped), + "running" => Some(Self::Running), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct ManagedServiceRow { + name: String, + state: ManagedServiceState, +} + +fn managed_services_state_dir(layout: &PrefixLayout) -> PathBuf { + layout.state_dir().join("services") +} + +fn managed_service_state_path(layout: &PrefixLayout, name: &str) -> PathBuf { + managed_services_state_dir(layout).join(format!("{name}.service")) +} + +fn validate_service_name(name: &str) -> Result<()> { + if !is_policy_token(name) { + return Err(anyhow!( + "invalid service name '{name}': use package-token grammar" + )); + } + Ok(()) +} + +fn ensure_service_package_installed(layout: &PrefixLayout, name: &str) -> Result<()> { + validate_service_name(name)?; + let installed = read_install_receipts(layout)? + .iter() + .any(|receipt| receipt.name == name); + if !installed { + return Err(anyhow!( + "No installed package found: {name}. Install it first with `crosspack install {name}`" + )); + } + Ok(()) +} + +fn read_managed_service_state(layout: &PrefixLayout, name: &str) -> Result { + validate_service_name(name)?; + let path = managed_service_state_path(layout, name); + let raw = match std::fs::read_to_string(&path) { + Ok(raw) => raw, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + return Ok(ManagedServiceState::Stopped); + } + Err(err) => { + return Err(err) + .with_context(|| format!("failed reading service state file: {}", path.display())); + } + }; + + let mut parsed_state = None; + for line in raw.lines().map(str::trim).filter(|line| !line.is_empty()) { + let Some(value) = line.strip_prefix("state=") else { + return Err(anyhow!( + "invalid service state file format: {}", + path.display() + )); + }; + let Some(state) = ManagedServiceState::from_str(value) else { + return Err(anyhow!( + "invalid service state '{value}' in {}", + path.display() + )); + }; + if parsed_state.is_some() { + return Err(anyhow!( + "duplicate service state entries in {}", + path.display() + )); + } + parsed_state = Some(state); + } + + parsed_state.ok_or_else(|| anyhow!("missing service state in {}", path.display())) +} + +fn write_managed_service_state( + layout: &PrefixLayout, + name: &str, + state: ManagedServiceState, +) -> Result { + validate_service_name(name)?; + let state_dir = managed_services_state_dir(layout); + std::fs::create_dir_all(&state_dir).with_context(|| { + format!( + "failed creating service state directory: {}", + state_dir.display() + ) + })?; + + let path = managed_service_state_path(layout, name); + std::fs::write(&path, format!("state={}\n", state.as_str())) + .with_context(|| format!("failed writing service state file: {}", path.display()))?; + Ok(path) +} + +fn collect_managed_service_rows(layout: &PrefixLayout) -> Result> { + let installed = read_install_receipts(layout)? + .into_iter() + .map(|receipt| receipt.name) + .collect::>(); + + let state_root = managed_services_state_dir(layout); + let entries = match std::fs::read_dir(&state_root) { + Ok(entries) => entries, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(Vec::new()), + Err(err) => { + return Err(err).with_context(|| { + format!( + "failed reading services state directory: {}", + state_root.display() + ) + }) + } + }; + + let mut rows = Vec::new(); + for entry in entries { + let entry = entry.with_context(|| { + format!( + "failed iterating services state directory: {}", + state_root.display() + ) + })?; + let file_type = entry.file_type().with_context(|| { + format!( + "failed reading service state entry metadata: {}", + entry.path().display() + ) + })?; + if !file_type.is_file() { + continue; + } + + let file_name = entry.file_name(); + let Some(name) = file_name + .to_str() + .and_then(|value| value.strip_suffix(".service")) + else { + continue; + }; + + if !installed.contains(name) { + continue; + } + + rows.push(ManagedServiceRow { + name: name.to_string(), + state: read_managed_service_state(layout, name)?, + }); + } + + rows.sort_by(|left, right| left.name.cmp(&right.name)); + Ok(rows) +} + +fn run_service_status_command(layout: &PrefixLayout, name: &str) -> Result<()> { + ensure_service_package_installed(layout, name)?; + let state = read_managed_service_state(layout, name)?; + println!("service_state name={name} state={}", state.as_str()); + Ok(()) +} + +fn run_service_start_command(layout: &PrefixLayout, name: &str) -> Result<()> { + ensure_service_package_installed(layout, name)?; + write_managed_service_state(layout, name, ManagedServiceState::Running)?; + println!("service_state name={name} state=running action=start"); + Ok(()) +} + +fn run_service_stop_command(layout: &PrefixLayout, name: &str) -> Result<()> { + ensure_service_package_installed(layout, name)?; + write_managed_service_state(layout, name, ManagedServiceState::Stopped)?; + println!("service_state name={name} state=stopped action=stop"); + Ok(()) +} + +fn run_service_restart_command(layout: &PrefixLayout, name: &str) -> Result<()> { + ensure_service_package_installed(layout, name)?; + write_managed_service_state(layout, name, ManagedServiceState::Running)?; + println!("service_state name={name} state=running action=restart"); + Ok(()) +} + +fn run_services_command(layout: &PrefixLayout, command: ServicesCommands) -> Result<()> { + layout.ensure_base_dirs()?; + match command { + ServicesCommands::List => { + let rows = collect_managed_service_rows(layout)?; + if rows.is_empty() { + println!("No managed services"); + } else { + for row in rows { + println!("{} {}", row.name, row.state.as_str()); + } + } + } + ServicesCommands::Status { name } => run_service_status_command(layout, &name)?, + ServicesCommands::Start { name } => run_service_start_command(layout, &name)?, + ServicesCommands::Stop { name } => run_service_stop_command(layout, &name)?, + ServicesCommands::Restart { name } => run_service_restart_command(layout, &name)?, + } + Ok(()) +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct CacheFileEntry { + path: PathBuf, + size: u64, +} + +fn run_cache_command(layout: &PrefixLayout, command: CacheCommands) -> Result<()> { + layout.ensure_base_dirs()?; + match command { + CacheCommands::List => run_cache_list_command(layout), + CacheCommands::Prune => run_cache_prune_command(layout), + CacheCommands::Gc => run_cache_gc_command(layout), + } +} + +fn run_cache_list_command(layout: &PrefixLayout) -> Result<()> { + let cache_root = layout.artifacts_cache_dir(); + let mut entries = collect_cache_files(&cache_root)?; + entries.sort_by(|left, right| left.path.cmp(&right.path)); + + if entries.is_empty() { + println!("cache is empty"); + return Ok(()); + } + + println!("path\tbytes"); + for entry in entries { + let relative = entry + .path + .strip_prefix(layout.prefix()) + .unwrap_or(&entry.path) + .display() + .to_string(); + println!("{}\t{}", relative, entry.size); + } + Ok(()) +} + +fn run_cache_prune_command(layout: &PrefixLayout) -> Result<()> { + let cache_root = layout.artifacts_cache_dir(); + let entries = collect_cache_files(&cache_root)?; + let removed_files = entries.len(); + let removed_bytes = entries.iter().map(|entry| entry.size).sum::(); + + if cache_root.exists() { + fs::remove_dir_all(&cache_root).with_context(|| { + format!("failed to remove cache directory: {}", cache_root.display()) + })?; + } + fs::create_dir_all(&cache_root).with_context(|| { + format!( + "failed to recreate cache directory: {}", + cache_root.display() + ) + })?; + + println!("cache prune removed_files={removed_files} removed_bytes={removed_bytes}"); + Ok(()) +} + +fn run_cache_gc_command(layout: &PrefixLayout) -> Result<()> { + let cache_root = layout.artifacts_cache_dir(); + let mut entries = collect_cache_files(&cache_root)?; + entries.sort_by(|left, right| left.path.cmp(&right.path)); + + let receipts = read_install_receipts(layout)?; + let referenced = receipts + .iter() + .filter_map(|receipt| receipt.cache_path.as_deref()) + .filter_map(|cache_path| safe_artifact_cache_path(layout, cache_path)) + .collect::>(); + + let mut removed_files = 0_u64; + let mut removed_bytes = 0_u64; + for entry in entries { + if referenced.contains(&entry.path) { + continue; + } + remove_file_if_exists(&entry.path) + .with_context(|| format!("failed to remove cache file: {}", entry.path.display()))?; + removed_files += 1; + removed_bytes += entry.size; + } + + let kept_files = referenced.iter().filter(|path| path.exists()).count(); + println!( + "cache gc removed_files={} removed_bytes={} kept_files={}", + removed_files, removed_bytes, kept_files + ); + Ok(()) +} + +fn safe_artifact_cache_path(layout: &PrefixLayout, cache_path: &str) -> Option { + let path = PathBuf::from(cache_path); + if !path.is_absolute() { + return None; + } + if path + .components() + .any(|component| matches!(component, Component::ParentDir)) + { + return None; + } + if !path.starts_with(layout.artifacts_cache_dir()) { + return None; + } + Some(path) +} + +fn collect_cache_files(cache_root: &Path) -> Result> { + if !cache_root.exists() { + return Ok(Vec::new()); + } + let mut entries = Vec::new(); + collect_cache_files_recursive(cache_root, &mut entries)?; + Ok(entries) +} + +fn collect_cache_files_recursive( + cache_root: &Path, + entries: &mut Vec, +) -> Result<()> { + for item in fs::read_dir(cache_root) + .with_context(|| format!("failed to read cache directory: {}", cache_root.display()))? + { + let item = item?; + let path = item.path(); + let metadata = item.metadata()?; + if metadata.is_dir() { + collect_cache_files_recursive(&path, entries)?; + continue; + } + if metadata.is_file() { + entries.push(CacheFileEntry { + path, + size: metadata.len(), + }); + } + } + Ok(()) +} + fn run_upgrade_command( layout: &PrefixLayout, registry_root: Option<&Path>, spec: Option, dry_run: bool, + explain: bool, provider_overrides: &BTreeMap, interaction_policy: InstallInteractionPolicy, ) -> Result<()> { + ensure_explain_requires_dry_run("upgrade", dry_run, explain)?; let output_style = current_output_style(); let renderer = TerminalRenderer::from_style(output_style); ensure_upgrade_command_ready(layout)?; @@ -29,6 +585,7 @@ fn run_upgrade_command( if dry_run { let mut planned_changes = Vec::new(); + let mut explainability = DependencyPolicyExplainability::default(); match spec.as_deref() { Some(single) => { @@ -49,12 +606,19 @@ fn run_upgrade_command( &roots, installed_receipt.target.as_deref(), provider_overrides, + false, )?; enforce_no_downgrades(&receipts, &resolved, "upgrade")?; for package in &resolved { validate_install_preflight_for_resolved(layout, package, &receipts)?; } planned_changes.extend(build_planned_package_changes(&resolved, &receipts)?); + if explain { + merge_dependency_policy_explainability( + &mut explainability, + build_dependency_policy_explainability(&resolved, &receipts, &roots)?, + ); + } } None => { let plans = build_upgrade_plans(&receipts); @@ -73,9 +637,20 @@ fn run_upgrade_command( plan.target.as_deref(), provider_overrides, false, + false, )?; enforce_no_downgrades(&receipts, &resolved, "upgrade")?; resolved_dependency_tokens.extend(plan_tokens); + if explain { + merge_dependency_policy_explainability( + &mut explainability, + build_dependency_policy_explainability( + &resolved, + &receipts, + &plan.roots, + )?, + ); + } grouped_resolved.push(resolved); } @@ -106,7 +681,11 @@ fn run_upgrade_command( } let preview = build_transaction_preview("upgrade", &planned_changes); - for line in render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun) { + for line in render_dry_run_output_lines( + &preview, + TransactionPreviewMode::DryRun, + explain.then_some(&explainability), + ) { println!("{line}"); } return Ok(()); @@ -141,6 +720,7 @@ fn run_upgrade_command( &roots, installed_receipt.target.as_deref(), provider_overrides, + false, )?; let planned_dependency_overrides = build_planned_dependency_overrides(&resolved); enforce_no_downgrades(&receipts, &resolved, "upgrade")?; @@ -270,6 +850,7 @@ fn run_upgrade_command( plan.target.as_deref(), provider_overrides, false, + false, )?; enforce_no_downgrades(&receipts, &resolved, "upgrade")?; diff --git a/crates/crosspack-cli/src/core_flows.rs b/crates/crosspack-cli/src/core_flows.rs index e81461a..1d0ad2a 100644 --- a/crates/crosspack-cli/src/core_flows.rs +++ b/crates/crosspack-cli/src/core_flows.rs @@ -111,6 +111,18 @@ fn format_info_lines(name: &str, versions: &[PackageManifest]) -> Vec { .collect::>(); lines.push(format!(" Replaces: {}", replaces.join(", "))); } + + if !manifest.provides.is_empty() + || !manifest.conflicts.is_empty() + || !manifest.replaces.is_empty() + { + lines.push(format!( + " Policy: provides={} conflicts={} replaces={}", + manifest.provides.len(), + manifest.conflicts.len(), + manifest.replaces.len() + )); + } } lines @@ -270,6 +282,37 @@ struct TransactionPreview { risk_flags: Vec, } +#[derive(Debug, Clone, Default)] +struct DependencyPolicyExplainability { + provider_substitutions: Vec, + replacement_removals: Vec, + conflict_constraints: Vec, +} + +#[derive(Debug, Clone)] +struct PolicyProviderSubstitution { + capability: String, + selected_package: String, + selected_version: String, +} + +#[derive(Debug, Clone)] +struct PolicyReplacementRemoval { + selected_package: String, + selected_version: String, + removed_package: String, + removed_version: String, + replacement_requirement: String, +} + +#[derive(Debug, Clone)] +struct PolicyConflictConstraint { + selected_package: String, + selected_version: String, + conflict_package: String, + conflict_requirement: String, +} + #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum TransactionPreviewMode { DryRun, @@ -516,6 +559,7 @@ fn resolve_install_graph( roots: &[RootInstallRequest], requested_target: Option<&str>, provider_overrides: &BTreeMap, + build_from_source: bool, ) -> Result> { let (resolved, _) = resolve_install_graph_with_tokens( layout, @@ -524,6 +568,7 @@ fn resolve_install_graph( requested_target, provider_overrides, true, + build_from_source, )?; Ok(resolved) } @@ -535,6 +580,7 @@ fn resolve_install_graph_with_tokens( requested_target: Option<&str>, provider_overrides: &BTreeMap, validate_overrides: bool, + build_from_source: bool, ) -> Result<(Vec, HashSet)> { let mut pins = BTreeMap::new(); for (name, raw_req) in read_all_pins(layout)? { @@ -575,19 +621,8 @@ fn resolve_install_graph_with_tokens( .ok_or_else(|| anyhow!("resolver selected package missing from graph: {name}"))? .clone(); - let artifact = manifest - .artifacts - .iter() - .find(|artifact| artifact.target == resolved_target) - .ok_or_else(|| { - anyhow!( - "no artifact available for target {} in {} {}", - resolved_target, - manifest.name, - manifest.version - ) - })? - .clone(); + let artifact = + select_artifact_for_target(&manifest, &resolved_target, build_from_source)?; let archive_type = artifact.archive_type()?; Ok(ResolvedInstall { @@ -602,6 +637,60 @@ fn resolve_install_graph_with_tokens( Ok((resolved, resolved_dependency_tokens)) } +fn ensure_build_from_source_not_supported(_operation: &str, build_from_source: bool) -> Result<()> { + if build_from_source { + return Err(source_build_not_supported_error()); + } + Ok(()) +} + +fn ensure_explain_requires_dry_run(operation: &str, dry_run: bool, explain: bool) -> Result<()> { + if explain && !dry_run { + return Err(anyhow!("--explain requires --dry-run for '{}'", operation)); + } + Ok(()) +} + +fn source_build_not_supported_error() -> anyhow::Error { + anyhow!( + "source builds are not yet supported; remove --build-from-source and use a target with published binary artifacts" + ) +} + +fn select_artifact_for_target( + manifest: &PackageManifest, + resolved_target: &str, + build_from_source: bool, +) -> Result { + if let Some(artifact) = manifest + .artifacts + .iter() + .find(|artifact| artifact.target == resolved_target) + { + return Ok(artifact.clone()); + } + + if manifest.source_build.is_some() { + if build_from_source { + return Err(source_build_not_supported_error()); + } + + return Err(anyhow!( + "source build required for {} {} on target {}: no binary artifact published; source builds are not yet supported", + manifest.name, + manifest.version, + resolved_target + )); + } + + Err(anyhow!( + "no artifact available for target {} in {} {}", + resolved_target, + manifest.name, + manifest.version + )) +} + fn build_planned_package_changes( resolved: &[ResolvedInstall], receipts: &[InstallReceipt], @@ -769,6 +858,158 @@ fn render_transaction_preview_lines( lines } +fn build_dependency_policy_explainability( + resolved: &[ResolvedInstall], + receipts: &[InstallReceipt], + roots: &[RootInstallRequest], +) -> Result { + let mut explainability = DependencyPolicyExplainability::default(); + + let mut requested_tokens = roots + .iter() + .map(|root| root.name.clone()) + .collect::>(); + for package in resolved { + requested_tokens.extend(package.manifest.dependencies.keys().cloned()); + } + + for capability in requested_tokens { + let mut provider_candidates = resolved + .iter() + .filter(|package| { + package.manifest.name == capability + || package + .manifest + .provides + .iter() + .any(|provided| provided == &capability) + }) + .collect::>(); + + if provider_candidates + .iter() + .any(|candidate| candidate.manifest.name == capability) + { + continue; + } + + provider_candidates.sort_by(|left, right| { + left.manifest + .name + .cmp(&right.manifest.name) + .then_with(|| right.manifest.version.cmp(&left.manifest.version)) + }); + + for selected in provider_candidates { + explainability + .provider_substitutions + .push(PolicyProviderSubstitution { + capability: capability.clone(), + selected_package: selected.manifest.name.clone(), + selected_version: selected.manifest.version.to_string(), + }); + } + } + + let mut ordered_resolved = resolved.iter().collect::>(); + ordered_resolved.sort_by(|left, right| left.manifest.name.cmp(&right.manifest.name)); + + for package in &ordered_resolved { + let replacement_receipts = collect_replacement_receipts(&package.manifest, receipts)?; + for replacement in replacement_receipts { + if let Some(requirement) = package.manifest.replaces.get(&replacement.name) { + explainability + .replacement_removals + .push(PolicyReplacementRemoval { + selected_package: package.manifest.name.clone(), + selected_version: package.manifest.version.to_string(), + removed_package: replacement.name, + removed_version: replacement.version, + replacement_requirement: requirement.to_string(), + }); + } + } + + for (conflict_package, conflict_requirement) in &package.manifest.conflicts { + explainability + .conflict_constraints + .push(PolicyConflictConstraint { + selected_package: package.manifest.name.clone(), + selected_version: package.manifest.version.to_string(), + conflict_package: conflict_package.clone(), + conflict_requirement: conflict_requirement.to_string(), + }); + } + } + + Ok(explainability) +} + +fn merge_dependency_policy_explainability( + destination: &mut DependencyPolicyExplainability, + mut source: DependencyPolicyExplainability, +) { + destination + .provider_substitutions + .append(&mut source.provider_substitutions); + destination + .replacement_removals + .append(&mut source.replacement_removals); + destination + .conflict_constraints + .append(&mut source.conflict_constraints); +} + +fn render_dependency_policy_explainability_lines( + explainability: &DependencyPolicyExplainability, +) -> Vec { + let mut lines = Vec::new(); + + for substitution in &explainability.provider_substitutions { + lines.push(format!( + "explain_provider capability={} selected={}@{}", + substitution.capability, substitution.selected_package, substitution.selected_version + )); + } + + for replacement in &explainability.replacement_removals { + lines.push(format!( + "explain_replacement selected={}@{} removes={}@{} declared={}", + replacement.selected_package, + replacement.selected_version, + replacement.removed_package, + replacement.removed_version, + replacement.replacement_requirement + )); + } + + for conflict in &explainability.conflict_constraints { + lines.push(format!( + "explain_conflict selected={}@{} conflicts={}({})", + conflict.selected_package, + conflict.selected_version, + conflict.conflict_package, + conflict.conflict_requirement + )); + } + + lines +} + +fn render_dry_run_output_lines( + preview: &TransactionPreview, + mode: TransactionPreviewMode, + explainability: Option<&DependencyPolicyExplainability>, +) -> Vec { + let mut lines = render_transaction_preview_lines(preview, mode); + if let Some(explainability) = explainability { + lines.extend(render_dependency_policy_explainability_lines( + explainability, + )); + } + lines +} + fn validate_install_preflight_for_resolved( layout: &PrefixLayout, resolved: &ResolvedInstall, diff --git a/crates/crosspack-cli/src/dispatch.rs b/crates/crosspack-cli/src/dispatch.rs index c7f019f..9fd14f3 100644 --- a/crates/crosspack-cli/src/dispatch.rs +++ b/crates/crosspack-cli/src/dispatch.rs @@ -27,12 +27,16 @@ fn run_cli(cli: Cli) -> Result<()> { spec, target, dry_run, + explain, + build_from_source, force_redownload, provider, escalation, } => { let (name, requirement) = parse_spec(&spec)?; let provider_overrides = parse_provider_overrides(&provider)?; + ensure_explain_requires_dry_run("install", dry_run, explain)?; + ensure_build_from_source_not_supported("install", build_from_source)?; let escalation_policy = resolve_escalation_policy(escalation); let interaction_policy = install_interaction_policy(escalation_policy); let output_style = current_output_style(); @@ -56,6 +60,7 @@ fn run_cli(cli: Cli) -> Result<()> { &roots, target.as_deref(), &provider_overrides, + build_from_source, )?; let receipts = read_install_receipts(&layout)?; for package in &resolved { @@ -63,9 +68,18 @@ fn run_cli(cli: Cli) -> Result<()> { } let planned_changes = build_planned_package_changes(&resolved, &receipts)?; let preview = build_transaction_preview("install", &planned_changes); - for line in - render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun) - { + let explainability = if explain { + Some(build_dependency_policy_explainability( + &resolved, &receipts, &roots, + )?) + } else { + None + }; + for line in render_dry_run_output_lines( + &preview, + TransactionPreviewMode::DryRun, + explainability.as_ref(), + ) { println!("{line}"); } return Ok(()); @@ -84,6 +98,7 @@ fn run_cli(cli: Cli) -> Result<()> { &roots, target.as_deref(), &provider_overrides, + build_from_source, )?; append_transaction_journal_entry( @@ -168,6 +183,7 @@ fn run_cli(cli: Cli) -> Result<()> { Commands::Upgrade { spec, dry_run, + explain, provider, escalation, } => { @@ -181,6 +197,7 @@ fn run_cli(cli: Cli) -> Result<()> { cli.registry_root.as_deref(), spec, dry_run, + explain, &provider_overrides, interaction_policy, )?; @@ -224,6 +241,41 @@ fn run_cli(cli: Cli) -> Result<()> { println!("pinned {name} to {requirement}"); println!("pin: {}", pin_path.display()); } + Commands::Outdated => { + let prefix = default_user_prefix()?; + let layout = PrefixLayout::new(prefix); + run_outdated_command(&layout, cli.registry_root.as_deref())?; + } + Commands::Depends { name } => { + let prefix = default_user_prefix()?; + let layout = PrefixLayout::new(prefix); + run_depends_command(&layout, &name)?; + } + Commands::Uses { name } => { + let prefix = default_user_prefix()?; + let layout = PrefixLayout::new(prefix); + run_uses_command(&layout, &name)?; + } + Commands::Why { name } => { + let prefix = default_user_prefix()?; + let layout = PrefixLayout::new(prefix); + run_why_command(&layout, &name)?; + } + Commands::Services { command } => { + let prefix = default_user_prefix()?; + let layout = PrefixLayout::new(prefix); + run_services_command(&layout, command)?; + } + Commands::Cache { command } => { + let prefix = default_user_prefix()?; + let layout = PrefixLayout::new(prefix); + run_cache_command(&layout, command)?; + } + Commands::Bundle { command } => { + let prefix = default_user_prefix()?; + let layout = PrefixLayout::new(prefix); + run_bundle_command(&layout, cli.registry_root.as_deref(), command)?; + } Commands::Registry { command } => { let prefix = default_user_prefix()?; let layout = PrefixLayout::new(prefix); diff --git a/crates/crosspack-cli/src/main.rs b/crates/crosspack-cli/src/main.rs index 81bd54f..4bfe46c 100644 --- a/crates/crosspack-cli/src/main.rs +++ b/crates/crosspack-cli/src/main.rs @@ -2,7 +2,7 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::ffi::OsString; use std::fs::{self, OpenOptions}; use std::io::{IsTerminal, Read, Write}; -use std::path::{Path, PathBuf}; +use std::path::{Component, Path, PathBuf}; use std::process::Command; #[cfg(unix)] use std::process::Stdio; @@ -39,6 +39,7 @@ use crosspack_registry::{ use crosspack_resolver::{resolve_dependency_graph, RootRequirement}; use crosspack_security::verify_sha256_file; use semver::{Version, VersionReq}; +use serde::{Deserialize, Serialize}; use serde_json::Value; #[derive(Parser, Debug)] @@ -454,6 +455,10 @@ enum Commands { #[arg(long)] dry_run: bool, #[arg(long)] + explain: bool, + #[arg(long)] + build_from_source: bool, + #[arg(long)] force_redownload: bool, #[arg(long = "provider", value_name = "capability=package")] provider: Vec, @@ -464,6 +469,8 @@ enum Commands { spec: Option, #[arg(long)] dry_run: bool, + #[arg(long)] + explain: bool, #[arg(long = "provider", value_name = "capability=package")] provider: Vec, #[command(flatten)] @@ -487,6 +494,28 @@ enum Commands { Pin { spec: String, }, + Outdated, + Depends { + name: String, + }, + Uses { + name: String, + }, + Why { + name: String, + }, + Services { + #[command(subcommand)] + command: ServicesCommands, + }, + Cache { + #[command(subcommand)] + command: CacheCommands, + }, + Bundle { + #[command(subcommand)] + command: BundleCommands, + }, Registry { #[command(subcommand)] command: RegistryCommands, @@ -534,6 +563,44 @@ enum RegistryCommands { }, } +#[derive(Subcommand, Debug)] +enum CacheCommands { + List, + Prune, + Gc, +} + +#[derive(Subcommand, Debug)] +enum ServicesCommands { + List, + Status { name: String }, + Start { name: String }, + Stop { name: String }, + Restart { name: String }, +} + +#[derive(Subcommand, Debug)] +enum BundleCommands { + Export { + #[arg(long)] + output: Option, + }, + Apply { + #[arg(long)] + file: Option, + #[arg(long)] + dry_run: bool, + #[arg(long)] + explain: bool, + #[arg(long)] + build_from_source: bool, + #[arg(long)] + force_redownload: bool, + #[arg(long = "provider", value_name = "capability=package")] + provider: Vec, + }, +} + #[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)] enum CliRegistryKind { Git, @@ -604,4 +671,6 @@ include!("command_flows.rs"); include!("core_flows.rs"); +include!("bundle_flows.rs"); + include!("tests.rs"); diff --git a/crates/crosspack-cli/src/metadata.rs b/crates/crosspack-cli/src/metadata.rs index 086c668..b2e28dc 100644 --- a/crates/crosspack-cli/src/metadata.rs +++ b/crates/crosspack-cli/src/metadata.rs @@ -105,6 +105,12 @@ fn classify_search_match(name: &str, query: &str) -> Option { } fn best_available_short_description(manifest: &PackageManifest) -> Option { + if let Some(description) = &manifest.description { + let trimmed = description.trim(); + if !trimmed.is_empty() { + return Some(sanitize_metadata_cell(trimmed)); + } + } if !manifest.provides.is_empty() { return Some(format!("provides: {}", manifest.provides.join(", "))); } @@ -117,6 +123,15 @@ fn best_available_short_description(manifest: &PackageManifest) -> Option String { + raw.chars() + .map(|ch| match ch { + '\t' | '\n' | '\r' => ' ', + other => other, + }) + .collect::() +} + fn format_search_results(results: &[SearchResult], query: &str) -> Vec { if results.is_empty() { return vec![format!( @@ -320,4 +335,3 @@ fn update_failure_reason_code(error: Option<&str>) -> String { "unknown".to_string() } - diff --git a/crates/crosspack-cli/src/tests.rs b/crates/crosspack-cli/src/tests.rs index ae71980..fbe952d 100644 --- a/crates/crosspack-cli/src/tests.rs +++ b/crates/crosspack-cli/src/tests.rs @@ -147,6 +147,7 @@ mod tests { None, None, false, + false, &BTreeMap::new(), InstallInteractionPolicy::default(), ) @@ -3738,6 +3739,49 @@ ripgrep-legacy = "*" } } + #[test] + fn cli_parses_install_with_dry_run_explain_flag() { + let cli = + Cli::try_parse_from(["crosspack", "install", "ripgrep", "--dry-run", "--explain"]) + .expect("command must parse"); + + match cli.command { + Commands::Install { + dry_run, explain, .. + } => { + assert!(dry_run); + assert!(explain); + } + other => panic!("unexpected command: {other:?}"), + } + } + + #[test] + fn install_explain_without_dry_run_is_rejected() { + let cli = Cli::try_parse_from(["crosspack", "install", "ripgrep", "--explain"]) + .expect("command must parse"); + let err = run_cli(cli).expect_err("--explain must require --dry-run"); + assert_eq!( + err.to_string(), + "--explain requires --dry-run for 'install'" + ); + } + + #[test] + fn cli_parses_install_with_build_from_source_flag() { + let cli = Cli::try_parse_from(["crosspack", "install", "ripgrep", "--build-from-source"]) + .expect("command must parse"); + + match cli.command { + Commands::Install { + build_from_source, .. + } => { + assert!(build_from_source); + } + other => panic!("unexpected command: {other:?}"), + } + } + #[test] fn cli_rejects_install_with_conflicting_escalation_flags() { let err = Cli::try_parse_from([ @@ -3809,6 +3853,118 @@ ripgrep-legacy = "*" } } + #[test] + fn cli_parses_upgrade_with_dry_run_explain_flag() { + let cli = + Cli::try_parse_from(["crosspack", "upgrade", "ripgrep", "--dry-run", "--explain"]) + .expect("command must parse"); + + match cli.command { + Commands::Upgrade { + dry_run, explain, .. + } => { + assert!(dry_run); + assert!(explain); + } + other => panic!("unexpected command: {other:?}"), + } + } + + #[test] + fn cli_parses_bundle_export_with_optional_output_flag() { + let cli = Cli::try_parse_from([ + "crosspack", + "bundle", + "export", + "--output", + "state/export.toml", + ]) + .expect("command must parse"); + + match cli.command { + Commands::Bundle { + command: BundleCommands::Export { output }, + } => { + assert_eq!(output, Some(PathBuf::from("state/export.toml"))); + } + other => panic!("unexpected command: {other:?}"), + } + } + + #[test] + fn cli_parses_bundle_apply_with_flags() { + let cli = Cli::try_parse_from([ + "crosspack", + "bundle", + "apply", + "--file", + "state/bundle.toml", + "--dry-run", + "--force-redownload", + "--provider", + "c-compiler=clang", + "--provider", + "rust-toolchain=rustup", + ]) + .expect("command must parse"); + + match cli.command { + Commands::Bundle { + command: + BundleCommands::Apply { + file, + dry_run, + force_redownload, + provider, + .. + }, + } => { + assert_eq!(file, Some(PathBuf::from("state/bundle.toml"))); + assert!(dry_run); + assert!(force_redownload); + assert_eq!(provider, vec!["c-compiler=clang", "rust-toolchain=rustup"]); + } + other => panic!("unexpected command: {other:?}"), + } + } + + #[test] + fn cli_parses_bundle_apply_with_dry_run_explain_flag() { + let cli = Cli::try_parse_from(["crosspack", "bundle", "apply", "--dry-run", "--explain"]) + .expect("command must parse"); + + match cli.command { + Commands::Bundle { + command: + BundleCommands::Apply { + dry_run, explain, .. + }, + } => { + assert!(dry_run); + assert!(explain); + } + other => panic!("unexpected command: {other:?}"), + } + } + + #[test] + fn cli_parses_bundle_apply_with_build_from_source_flag() { + let cli = Cli::try_parse_from(["crosspack", "bundle", "apply", "--build-from-source"]) + .expect("command must parse"); + + match cli.command { + Commands::Bundle { + command: + BundleCommands::Apply { + build_from_source, .. + }, + } => { + assert!(build_from_source); + } + other => panic!("unexpected command: {other:?}"), + } + } + #[test] fn cli_parses_uninstall_with_escalation_flags() { let cli = Cli::try_parse_from([ @@ -3957,156 +4113,914 @@ ripgrep-legacy = "*" } #[test] - fn build_self_update_install_args_includes_registry_root_and_flags() { - let registry_root = PathBuf::from("/tmp/registry"); - let args = build_self_update_install_args( - Some(registry_root.as_path()), - true, - true, - EscalationArgs { - non_interactive: true, - allow_escalation: true, - no_escalation: false, - }, - ); - let rendered = args - .iter() - .map(|value| value.to_string_lossy().to_string()) - .collect::>(); - assert_eq!( - rendered, - vec![ - "--registry-root", - "/tmp/registry", - "install", - "crosspack", - "--dry-run", - "--force-redownload", - "--non-interactive", - "--allow-escalation", - ] - ); + fn cli_parses_outdated_subcommand() { + let cli = Cli::try_parse_from(["crosspack", "outdated"]).expect("command must parse"); + assert!(matches!(cli.command, Commands::Outdated)); } #[test] - fn build_self_update_install_args_omits_optional_values() { - let args = build_self_update_install_args(None, false, false, EscalationArgs::default()); - let rendered = args - .iter() - .map(|value| value.to_string_lossy().to_string()) - .collect::>(); - assert_eq!(rendered, vec!["install", "crosspack"]); + fn cli_parses_depends_subcommand() { + let cli = + Cli::try_parse_from(["crosspack", "depends", "ripgrep"]).expect("command must parse"); + match cli.command { + Commands::Depends { name } => assert_eq!(name, "ripgrep"), + other => panic!("unexpected command: {other:?}"), + } } #[test] - fn cli_supports_global_version_flag() { - let err = Cli::try_parse_from(["crosspack", "--version"]) - .expect_err("version flag should exit with version output"); - assert_eq!(err.kind(), ErrorKind::DisplayVersion); + fn cli_parses_uses_subcommand() { + let cli = Cli::try_parse_from(["crosspack", "uses", "pcre2"]).expect("command must parse"); + match cli.command { + Commands::Uses { name } => assert_eq!(name, "pcre2"), + other => panic!("unexpected command: {other:?}"), + } } #[test] - fn render_transaction_preview_lines_is_deterministic_and_script_friendly() { - let preview = build_transaction_preview( - "upgrade", - &[ - PlannedPackageChange { - name: "tool".to_string(), - target: "x86_64-unknown-linux-gnu".to_string(), - new_version: "2.0.0".to_string(), - old_version: Some("1.0.0".to_string()), - replacement_removals: vec![PlannedRemoval { - name: "old-tool".to_string(), - version: "0.9.0".to_string(), - }], - }, - PlannedPackageChange { - name: "dep".to_string(), - target: "x86_64-unknown-linux-gnu".to_string(), - new_version: "1.1.0".to_string(), - old_version: None, - replacement_removals: Vec::new(), - }, - ], - ); - - let lines = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); - assert_eq!( - lines[0], - "transaction_preview operation=upgrade mode=dry-run" - ); - assert_eq!( - lines[1], - "transaction_summary adds=1 removals=1 replacements=1 transitions=1" - ); - assert_eq!( - lines[2], - "risk_flags=adds,multi-package-transaction,removals,replacements,version-transitions" - ); - assert_eq!( - lines[3], - "change_add name=dep version=1.1.0 target=x86_64-unknown-linux-gnu" - ); - assert_eq!( - lines[4], - "change_remove name=old-tool version=0.9.0 reason=replacement" - ); - assert_eq!(lines[5], "change_replace from=old-tool@0.9.0 to=tool@2.0.0"); - assert_eq!(lines[6], "change_transition name=tool from=1.0.0 to=2.0.0"); + fn cli_parses_why_subcommand() { + let cli = Cli::try_parse_from(["crosspack", "why", "pcre2"]).expect("command parses"); + match cli.command { + Commands::Why { name } => assert_eq!(name, "pcre2"), + other => panic!("unexpected command: {other:?}"), + } } #[test] - fn transaction_preview_dry_run_output_is_stable_for_same_plan() { - let preview = build_transaction_preview( - "install", - &[PlannedPackageChange { - name: "tool".to_string(), - target: "x86_64-unknown-linux-gnu".to_string(), - new_version: "1.2.3".to_string(), - old_version: Some("1.2.2".to_string()), - replacement_removals: Vec::new(), - }], - ); - let first = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); - let second = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + fn cli_parses_cache_subcommands() { + let list = Cli::try_parse_from(["crosspack", "cache", "list"]).expect("list parses"); + match list.command { + Commands::Cache { + command: CacheCommands::List, + } => {} + other => panic!("unexpected command: {other:?}"), + } - assert_eq!(first, second); - assert_eq!( - first[0], - "transaction_preview operation=install mode=dry-run" - ); + let prune = Cli::try_parse_from(["crosspack", "cache", "prune"]).expect("prune parses"); + match prune.command { + Commands::Cache { + command: CacheCommands::Prune, + } => {} + other => panic!("unexpected command: {other:?}"), + } + + let gc = Cli::try_parse_from(["crosspack", "cache", "gc"]).expect("gc parses"); + match gc.command { + Commands::Cache { + command: CacheCommands::Gc, + } => {} + other => panic!("unexpected command: {other:?}"), + } } #[test] - fn transaction_preview_omits_multi_package_flag_when_no_mutations() { - let preview = build_transaction_preview( - "upgrade", - &[ - PlannedPackageChange { - name: "a".to_string(), - target: "x86_64-unknown-linux-gnu".to_string(), - new_version: "1.0.0".to_string(), - old_version: Some("1.0.0".to_string()), - replacement_removals: Vec::new(), - }, - PlannedPackageChange { - name: "b".to_string(), - target: "x86_64-unknown-linux-gnu".to_string(), - new_version: "2.0.0".to_string(), - old_version: Some("2.0.0".to_string()), - replacement_removals: Vec::new(), - }, - ], - ); + fn cli_parses_services_subcommands() { + let list = Cli::try_parse_from(["crosspack", "services", "list"]).expect("list parses"); + match list.command { + Commands::Services { + command: ServicesCommands::List, + } => {} + other => panic!("unexpected command: {other:?}"), + } - let lines = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); - assert_eq!( - lines[1], - "transaction_summary adds=0 removals=0 replacements=0 transitions=0" + let status = Cli::try_parse_from(["crosspack", "services", "status", "demo"]) + .expect("status parses"); + match status.command { + Commands::Services { + command: ServicesCommands::Status { name }, + } => assert_eq!(name, "demo"), + other => panic!("unexpected command: {other:?}"), + } + + let start = + Cli::try_parse_from(["crosspack", "services", "start", "demo"]).expect("start parses"); + match start.command { + Commands::Services { + command: ServicesCommands::Start { name }, + } => assert_eq!(name, "demo"), + other => panic!("unexpected command: {other:?}"), + } + + let stop = + Cli::try_parse_from(["crosspack", "services", "stop", "demo"]).expect("stop parses"); + match stop.command { + Commands::Services { + command: ServicesCommands::Stop { name }, + } => assert_eq!(name, "demo"), + other => panic!("unexpected command: {other:?}"), + } + + let restart = Cli::try_parse_from(["crosspack", "services", "restart", "demo"]) + .expect("restart parses"); + match restart.command { + Commands::Services { + command: ServicesCommands::Restart { name }, + } => assert_eq!(name, "demo"), + other => panic!("unexpected command: {other:?}"), + } + } + + #[test] + fn find_dependency_path_from_roots_returns_shortest_root_path() { + let root_a = InstallReceipt { + name: "root-a".to_string(), + version: "1.0.0".to_string(), + dependencies: vec!["shared@1.0.0".to_string()], + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 1, + }; + let shared = InstallReceipt { + name: "shared".to_string(), + version: "1.0.0".to_string(), + dependencies: vec!["leaf@1.0.0".to_string()], + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Dependency, + install_status: "installed".to_string(), + installed_at_unix: 1, + }; + let leaf = InstallReceipt { + name: "leaf".to_string(), + version: "1.0.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Dependency, + install_status: "installed".to_string(), + installed_at_unix: 1, + }; + + let receipt_map = HashMap::from([ + (root_a.name.clone(), &root_a), + (shared.name.clone(), &shared), + (leaf.name.clone(), &leaf), + ]); + let roots = vec!["root-a".to_string()]; + + let path = find_dependency_path_from_roots("leaf", &roots, &receipt_map) + .expect("dependency path should exist"); + assert_eq!(path, vec!["root-a", "shared", "leaf"]); + } + + #[test] + fn safe_artifact_cache_path_rejects_parent_traversal() { + let layout = test_layout(); + let invalid = format!("{}/../escape.bin", layout.artifacts_cache_dir().display()); + assert_eq!(safe_artifact_cache_path(&layout, &invalid), None); + } + + #[test] + fn safe_artifact_cache_path_accepts_absolute_artifacts_path() { + let layout = test_layout(); + let valid = layout + .artifacts_cache_dir() + .join("ripgrep/14.1.0/x86_64-unknown-linux-gnu/artifact.tar.zst"); + let resolved = safe_artifact_cache_path(&layout, &valid.display().to_string()) + .expect("path should be accepted"); + assert_eq!(resolved, valid); + } + + #[test] + fn managed_service_state_transitions_are_deterministic() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + write_install_receipt( + &layout, + &InstallReceipt { + name: "demo".to_string(), + version: "1.0.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 1, + }, + ) + .expect("must write receipt"); + + assert_eq!( + read_managed_service_state(&layout, "demo").expect("must read default state"), + ManagedServiceState::Stopped + ); + + run_service_start_command(&layout, "demo").expect("start must succeed"); + assert_eq!( + read_managed_service_state(&layout, "demo").expect("must read running state"), + ManagedServiceState::Running + ); + assert_eq!( + std::fs::read_to_string(managed_service_state_path(&layout, "demo")) + .expect("must read running state file"), + "state=running\n" + ); + + run_service_stop_command(&layout, "demo").expect("stop must succeed"); + assert_eq!( + read_managed_service_state(&layout, "demo").expect("must read stopped state"), + ManagedServiceState::Stopped + ); + assert_eq!( + std::fs::read_to_string(managed_service_state_path(&layout, "demo")) + .expect("must read stopped state file"), + "state=stopped\n" + ); + + run_service_restart_command(&layout, "demo").expect("restart must succeed"); + assert_eq!( + read_managed_service_state(&layout, "demo").expect("must read restarted state"), + ManagedServiceState::Running + ); + assert_eq!( + std::fs::read_to_string(managed_service_state_path(&layout, "demo")) + .expect("must read restarted state file"), + "state=running\n" + ); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn service_commands_require_installed_receipt_presence() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let err = run_service_start_command(&layout, "missing") + .expect_err("service start should require installed package receipt"); + let message = err.to_string(); + assert!(message.contains("No installed package found: missing")); + assert!(message.contains("crosspack install missing")); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn managed_services_list_rows_are_sorted_and_filtered_to_installed_packages() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + for name in ["bravo", "alpha", "charlie"] { + write_install_receipt( + &layout, + &InstallReceipt { + name: name.to_string(), + version: "1.0.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 1, + }, + ) + .expect("must write receipt"); + } + + write_managed_service_state(&layout, "charlie", ManagedServiceState::Running) + .expect("must write running service state"); + write_managed_service_state(&layout, "alpha", ManagedServiceState::Stopped) + .expect("must write stopped service state"); + write_managed_service_state(&layout, "ghost", ManagedServiceState::Running) + .expect("must write non-installed service state"); + + let rows = + collect_managed_service_rows(&layout).expect("must collect managed service rows"); + let rendered = rows + .iter() + .map(|row| format!("{} {}", row.name, row.state.as_str())) + .collect::>(); + assert_eq!(rendered, vec!["alpha stopped", "charlie running"]); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn service_commands_accept_plus_in_package_name() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + write_install_receipt( + &layout, + &InstallReceipt { + name: "cpp+tool".to_string(), + version: "1.0.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 1, + }, + ) + .expect("must write receipt"); + + run_service_start_command(&layout, "cpp+tool").expect("start must succeed"); + assert_eq!( + read_managed_service_state(&layout, "cpp+tool").expect("must read running state"), + ManagedServiceState::Running + ); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn read_managed_service_state_rejects_duplicate_state_entries() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let path = managed_service_state_path(&layout, "demo"); + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent).expect("must create service state dir"); + } + std::fs::write(&path, "state=running\nstate=stopped\n") + .expect("must write duplicate service state file"); + + let err = read_managed_service_state(&layout, "demo") + .expect_err("duplicate state lines should fail"); + assert!(err.to_string().contains("duplicate service state entries")); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn build_self_update_install_args_includes_registry_root_and_flags() { + let registry_root = PathBuf::from("/tmp/registry"); + let args = build_self_update_install_args( + Some(registry_root.as_path()), + true, + true, + EscalationArgs { + non_interactive: true, + allow_escalation: true, + no_escalation: false, + }, + ); + let rendered = args + .iter() + .map(|value| value.to_string_lossy().to_string()) + .collect::>(); + assert_eq!( + rendered, + vec![ + "--registry-root", + "/tmp/registry", + "install", + "crosspack", + "--dry-run", + "--force-redownload", + "--non-interactive", + "--allow-escalation", + ] + ); + } + + #[test] + fn build_self_update_install_args_omits_optional_values() { + let args = build_self_update_install_args(None, false, false, EscalationArgs::default()); + let rendered = args + .iter() + .map(|value| value.to_string_lossy().to_string()) + .collect::>(); + assert_eq!(rendered, vec!["install", "crosspack"]); + } + + #[test] + fn cli_supports_global_version_flag() { + let err = Cli::try_parse_from(["crosspack", "--version"]) + .expect_err("version flag should exit with version output"); + assert_eq!(err.kind(), ErrorKind::DisplayVersion); + } + + #[test] + fn render_transaction_preview_lines_is_deterministic_and_script_friendly() { + let preview = build_transaction_preview( + "upgrade", + &[ + PlannedPackageChange { + name: "tool".to_string(), + target: "x86_64-unknown-linux-gnu".to_string(), + new_version: "2.0.0".to_string(), + old_version: Some("1.0.0".to_string()), + replacement_removals: vec![PlannedRemoval { + name: "old-tool".to_string(), + version: "0.9.0".to_string(), + }], + }, + PlannedPackageChange { + name: "dep".to_string(), + target: "x86_64-unknown-linux-gnu".to_string(), + new_version: "1.1.0".to_string(), + old_version: None, + replacement_removals: Vec::new(), + }, + ], + ); + + let lines = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + assert_eq!( + lines[0], + "transaction_preview operation=upgrade mode=dry-run" + ); + assert_eq!( + lines[1], + "transaction_summary adds=1 removals=1 replacements=1 transitions=1" + ); + assert_eq!( + lines[2], + "risk_flags=adds,multi-package-transaction,removals,replacements,version-transitions" + ); + assert_eq!( + lines[3], + "change_add name=dep version=1.1.0 target=x86_64-unknown-linux-gnu" + ); + assert_eq!( + lines[4], + "change_remove name=old-tool version=0.9.0 reason=replacement" + ); + assert_eq!(lines[5], "change_replace from=old-tool@0.9.0 to=tool@2.0.0"); + assert_eq!(lines[6], "change_transition name=tool from=1.0.0 to=2.0.0"); + } + + #[test] + fn transaction_preview_dry_run_output_is_stable_for_same_plan() { + let preview = build_transaction_preview( + "install", + &[PlannedPackageChange { + name: "tool".to_string(), + target: "x86_64-unknown-linux-gnu".to_string(), + new_version: "1.2.3".to_string(), + old_version: Some("1.2.2".to_string()), + replacement_removals: Vec::new(), + }], + ); + let first = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + let second = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + + assert_eq!(first, second); + assert_eq!( + first[0], + "transaction_preview operation=install mode=dry-run" + ); + } + + #[test] + fn dry_run_output_without_explain_matches_existing_contract_lines() { + let preview = build_transaction_preview( + "upgrade", + &[PlannedPackageChange { + name: "tool".to_string(), + target: "x86_64-unknown-linux-gnu".to_string(), + new_version: "2.0.0".to_string(), + old_version: Some("1.0.0".to_string()), + replacement_removals: Vec::new(), + }], + ); + + let contract_lines = + render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + let without_explain = + render_dry_run_output_lines(&preview, TransactionPreviewMode::DryRun, None); + + assert_eq!(without_explain, contract_lines); + } + + #[test] + fn explainability_lines_are_deterministic_for_provider_replacement_and_conflicts() { + let tool_manifest = PackageManifest::from_toml_str( + r#" +name = "tool" +version = "1.0.0" + +[dependencies] +c-compiler = "*" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/tool-1.0.0.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest should parse"); + let provider_manifest = PackageManifest::from_toml_str( + r#" +name = "clang" +version = "18.0.0" +provides = ["c-compiler"] + +[conflicts] +gcc = "*" +legacy-cc = "<2.0.0" + +[replaces] +old-cc = "<2.0.0" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/clang-18.0.0.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest should parse"); + + let resolved = vec![ + ResolvedInstall { + artifact: provider_manifest.artifacts[0].clone(), + manifest: provider_manifest, + resolved_target: "x86_64-unknown-linux-gnu".to_string(), + archive_type: ArchiveType::TarZst, + }, + ResolvedInstall { + artifact: tool_manifest.artifacts[0].clone(), + manifest: tool_manifest, + resolved_target: "x86_64-unknown-linux-gnu".to_string(), + archive_type: ArchiveType::TarZst, + }, + ]; + let receipts = vec![InstallReceipt { + name: "old-cc".to_string(), + version: "1.5.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 1, + }]; + let roots = vec![RootInstallRequest { + name: "tool".to_string(), + requirement: VersionReq::STAR, + }]; + + let explainability = build_dependency_policy_explainability(&resolved, &receipts, &roots) + .expect("must build explainability"); + let lines = render_dependency_policy_explainability_lines(&explainability); + + assert_eq!( + lines, + vec![ + "explain_provider capability=c-compiler selected=clang@18.0.0".to_string(), + "explain_replacement selected=clang@18.0.0 removes=old-cc@1.5.0 declared=<2.0.0" + .to_string(), + "explain_conflict selected=clang@18.0.0 conflicts=gcc(*)".to_string(), + "explain_conflict selected=clang@18.0.0 conflicts=legacy-cc(<2.0.0)".to_string(), + ] + ); + } + + #[test] + fn explainability_includes_multiple_provider_substitutions_for_same_capability() { + let app_manifest = PackageManifest::from_toml_str( + r#" +name = "app" +version = "1.0.0" + +[dependencies] +c-compiler = "*" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/app-1.0.0.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest should parse"); + let clang_manifest = PackageManifest::from_toml_str( + r#" +name = "clang" +version = "18.0.0" +provides = ["c-compiler"] + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/clang-18.0.0.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest should parse"); + let zigcc_manifest = PackageManifest::from_toml_str( + r#" +name = "zigcc" +version = "0.12.0" +provides = ["c-compiler"] + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/zigcc-0.12.0.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest should parse"); + + let resolved = vec![ + ResolvedInstall { + artifact: app_manifest.artifacts[0].clone(), + manifest: app_manifest, + resolved_target: "x86_64-unknown-linux-gnu".to_string(), + archive_type: ArchiveType::TarZst, + }, + ResolvedInstall { + artifact: zigcc_manifest.artifacts[0].clone(), + manifest: zigcc_manifest, + resolved_target: "x86_64-unknown-linux-gnu".to_string(), + archive_type: ArchiveType::TarZst, + }, + ResolvedInstall { + artifact: clang_manifest.artifacts[0].clone(), + manifest: clang_manifest, + resolved_target: "x86_64-unknown-linux-gnu".to_string(), + archive_type: ArchiveType::TarZst, + }, + ]; + + let roots = vec![RootInstallRequest { + name: "app".to_string(), + requirement: VersionReq::STAR, + }]; + let explainability = build_dependency_policy_explainability(&resolved, &[], &roots) + .expect("must build explainability"); + let lines = render_dependency_policy_explainability_lines(&explainability); + assert!(lines + .contains(&"explain_provider capability=c-compiler selected=clang@18.0.0".to_string())); + assert!(lines + .contains(&"explain_provider capability=c-compiler selected=zigcc@0.12.0".to_string())); + } + + #[test] + fn explain_requires_dry_run_error_is_actionable() { + let err = ensure_explain_requires_dry_run("install", false, true) + .expect_err("--explain without --dry-run should fail"); + assert_eq!( + err.to_string(), + "--explain requires --dry-run for 'install'" + ); + } + + #[test] + fn transaction_preview_omits_multi_package_flag_when_no_mutations() { + let preview = build_transaction_preview( + "upgrade", + &[ + PlannedPackageChange { + name: "a".to_string(), + target: "x86_64-unknown-linux-gnu".to_string(), + new_version: "1.0.0".to_string(), + old_version: Some("1.0.0".to_string()), + replacement_removals: Vec::new(), + }, + PlannedPackageChange { + name: "b".to_string(), + target: "x86_64-unknown-linux-gnu".to_string(), + new_version: "2.0.0".to_string(), + old_version: Some("2.0.0".to_string()), + replacement_removals: Vec::new(), + }, + ], + ); + + let lines = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + assert_eq!( + lines[1], + "transaction_summary adds=0 removals=0 replacements=0 transitions=0" ); assert_eq!(lines[2], "risk_flags=none"); } + #[test] + fn bundle_export_document_orders_roots_and_pins_deterministically() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + write_install_receipt( + &layout, + &InstallReceipt { + name: "zeta".to_string(), + version: "1.0.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 2, + }, + ) + .expect("must write zeta receipt"); + write_install_receipt( + &layout, + &InstallReceipt { + name: "alpha".to_string(), + version: "3.0.0".to_string(), + dependencies: Vec::new(), + target: Some("aarch64-apple-darwin".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 3, + }, + ) + .expect("must write alpha receipt"); + write_install_receipt( + &layout, + &InstallReceipt { + name: "dep".to_string(), + version: "2.5.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Dependency, + install_status: "installed".to_string(), + installed_at_unix: 1, + }, + ) + .expect("must write dependency receipt"); + write_pin(&layout, "zeta", "^1").expect("must write zeta pin"); + + let bundle = build_export_bundle_document(&layout).expect("must build bundle"); + assert_eq!(bundle.roots.len(), 2); + assert_eq!(bundle.roots[0].name, "alpha"); + assert_eq!( + bundle.roots[0].target.as_deref(), + Some("aarch64-apple-darwin") + ); + assert_eq!(bundle.roots[0].requirement.as_deref(), Some("=3.0.0")); + assert_eq!(bundle.roots[1].name, "zeta"); + assert_eq!( + bundle.roots[1].target.as_deref(), + Some("x86_64-unknown-linux-gnu") + ); + assert_eq!(bundle.roots[1].requirement.as_deref(), Some("^1")); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn parse_bundle_document_rejects_unknown_fields() { + let raw = r#" +format = "crosspack.bundle" +version = 1 +unexpected = "value" + +[[roots]] +name = "ripgrep" +requirement = "^14" +"#; + + let err = parse_bundle_document(raw).expect_err("unknown fields should be rejected"); + let rendered = err + .chain() + .map(ToString::to_string) + .collect::>() + .join(" | "); + assert!( + rendered.contains("unknown field") && rendered.contains("unexpected"), + "unexpected parse error: {rendered}" + ); + } + + #[test] + fn bundle_apply_group_plans_reject_cross_target_overlap() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + configure_ready_source(&layout, "official"); + write_signed_test_manifest(&layout, "official", "ripgrep", "14.1.0", None, None, &[]); + + let backend = select_metadata_backend(None, &layout).expect("backend must load"); + let bundle = BundleDocument { + format: BUNDLE_FORMAT_MARKER.to_string(), + version: BUNDLE_FORMAT_VERSION, + roots: vec![ + BundleRoot { + name: "ripgrep".to_string(), + target: None, + requirement: Some("^14".to_string()), + }, + BundleRoot { + name: "ripgrep".to_string(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + requirement: Some("^14".to_string()), + }, + ], + snapshot_context: None, + }; + + let err = + build_bundle_apply_group_plans(&layout, &backend, &bundle, &BTreeMap::new(), false) + .expect_err("overlap across target groups must fail"); + assert!( + err.to_string() + .contains("cannot safely process package 'ripgrep'"), + "unexpected error: {err}" + ); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn bundle_apply_dry_run_preview_includes_stable_transaction_keys() { + let preview = build_transaction_preview( + "bundle-apply", + &[PlannedPackageChange { + name: "tool".to_string(), + target: "x86_64-unknown-linux-gnu".to_string(), + new_version: "1.2.3".to_string(), + old_version: None, + replacement_removals: Vec::new(), + }], + ); + + let first = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + let second = render_transaction_preview_lines(&preview, TransactionPreviewMode::DryRun); + + assert_eq!(first, second); + assert!(first + .iter() + .any(|line| line.starts_with("transaction_preview "))); + assert!(first + .iter() + .any(|line| line.starts_with("transaction_summary "))); + assert!(first.iter().any(|line| line.starts_with("risk_flags="))); + assert!(first.iter().any(|line| line.starts_with("change_add "))); + } + + #[test] + fn bundle_apply_with_missing_file_returns_actionable_error() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let missing = layout.prefix().join("missing-bundle.toml"); + let err = load_bundle_document_from_path(&missing) + .expect_err("missing file should return actionable error"); + let rendered = err.to_string(); + assert!(rendered.contains("bundle file not found")); + assert!(rendered.contains(missing.to_string_lossy().as_ref())); + assert!(rendered.contains("--file ")); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + #[test] fn resolve_init_shell_prefers_requested_shell_over_env_detection() { let resolved = resolve_init_shell(Some(CliCompletionShell::Fish), Some("/bin/zsh"), false); @@ -4426,6 +5340,7 @@ old-cc = "<2.0.0" assert_eq!(lines[2], " Provides: c-compiler, cc"); assert_eq!(lines[3], " Conflicts: legacy-cc(*)"); assert_eq!(lines[4], " Replaces: old-cc(<2.0.0)"); + assert_eq!(lines[5], " Policy: provides=2 conflicts=1 replaces=1"); } #[test] @@ -4996,6 +5911,48 @@ old-cc = "<2.0.0" let _ = std::fs::remove_dir_all(layout.prefix()); } + #[test] + fn best_available_short_description_prefers_manifest_description() { + let manifest = PackageManifest::from_toml_str( + r#" +name = "ripgrep" +version = "14.1.0" +description = "Fast line-oriented search tool" +license = "MIT" +homepage = "https://ripgrep.example.test" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/ripgrep.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest should parse"); + + let summary = best_available_short_description(&manifest); + assert_eq!(summary.as_deref(), Some("Fast line-oriented search tool")); + } + + #[test] + fn best_available_short_description_sanitizes_tab_and_newline() { + let manifest = PackageManifest::from_toml_str( + r#" +name = "ripgrep" +version = "14.1.0" +description = "Fast\tline\nsearch" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/ripgrep.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest should parse"); + + let summary = best_available_short_description(&manifest); + assert_eq!(summary.as_deref(), Some("Fast line search")); + } + #[test] fn format_search_results_reports_empty_match_with_actionable_guidance() { let lines = format_search_results(&[], "rip"); @@ -5954,6 +6911,94 @@ old-cc = "<2.0.0" assert!(!layout.gui_native_state_path("demo").exists()); } + #[test] + fn build_from_source_flag_returns_not_supported_error_text() { + let err = ensure_build_from_source_not_supported("install", true) + .expect_err("build-from-source should fail closed until implemented"); + assert_eq!( + err.to_string(), + "source builds are not yet supported; remove --build-from-source and use a target with published binary artifacts" + ); + } + + #[test] + fn install_build_from_source_is_rejected_at_command_entrypoint() { + let cli = Cli::try_parse_from(["crosspack", "install", "demo", "--build-from-source"]) + .expect("command must parse"); + let err = run_cli(cli).expect_err("build-from-source must be rejected before install"); + assert!(err + .to_string() + .contains("source builds are not yet supported")); + } + + #[test] + fn bundle_apply_build_from_source_is_rejected_at_command_entrypoint() { + let cli = Cli::try_parse_from(["crosspack", "bundle", "apply", "--build-from-source"]) + .expect("command must parse"); + let err = run_cli(cli).expect_err("build-from-source must be rejected before bundle apply"); + assert!(err + .to_string() + .contains("source builds are not yet supported")); + } + + #[test] + fn source_build_metadata_requires_build_from_source_flag_when_binary_artifact_missing() { + let manifest = PackageManifest::from_toml_str( + r#" +name = "demo" +version = "1.0.0" + +[[artifacts]] +target = "aarch64-apple-darwin" +url = "https://example.test/demo-1.0.0-aarch64.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/demo-1.0.0-src.tar.gz" +build_system = "cargo" +build_commands = ["cargo", "build", "--release"] +install_commands = ["cargo", "install", "--path", "."] +"#, + ) + .expect("manifest should parse"); + + let err = select_artifact_for_target(&manifest, "x86_64-unknown-linux-gnu", false) + .expect_err("source-build gate should require explicit opt-in"); + assert_eq!( + err.to_string(), + "source build required for demo 1.0.0 on target x86_64-unknown-linux-gnu: no binary artifact published; source builds are not yet supported" + ); + } + + #[test] + fn source_build_metadata_with_flag_returns_not_supported_guardrail_error() { + let manifest = PackageManifest::from_toml_str( + r#" +name = "demo" +version = "1.0.0" + +[[artifacts]] +target = "aarch64-apple-darwin" +url = "https://example.test/demo-1.0.0-aarch64.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/demo-1.0.0-src.tar.gz" +build_system = "cargo" +build_commands = ["cargo", "build", "--release"] +install_commands = ["cargo", "install", "--path", "."] +"#, + ) + .expect("manifest should parse"); + + let err = select_artifact_for_target(&manifest, "x86_64-unknown-linux-gnu", true) + .expect_err("source build path should fail closed while unimplemented"); + assert_eq!( + err.to_string(), + "source builds are not yet supported; remove --build-from-source and use a target with published binary artifacts" + ); + } + fn resolved_install(name: &str, version: &str) -> ResolvedInstall { let manifest = PackageManifest::from_toml_str(&format!( r#" diff --git a/crates/crosspack-core/src/lib.rs b/crates/crosspack-core/src/lib.rs index 2cff4e4..d3cb3ba 100644 --- a/crates/crosspack-core/src/lib.rs +++ b/crates/crosspack-core/src/lib.rs @@ -6,7 +6,7 @@ mod manifest; pub use archive::ArchiveType; pub use artifact::{Artifact, ArtifactBinary, ArtifactCompletion, ArtifactCompletionShell}; pub use gui::{ArtifactGuiApp, ArtifactGuiFileAssociation, ArtifactGuiProtocol}; -pub use manifest::PackageManifest; +pub use manifest::{PackageManifest, SourceBuildMetadata}; #[cfg(test)] mod tests; diff --git a/crates/crosspack-core/src/manifest.rs b/crates/crosspack-core/src/manifest.rs index f27d02e..14c9dd9 100644 --- a/crates/crosspack-core/src/manifest.rs +++ b/crates/crosspack-core/src/manifest.rs @@ -10,6 +10,7 @@ use crate::artifact::Artifact; pub struct PackageManifest { pub name: String, pub version: Version, + pub description: Option, pub license: Option, pub homepage: Option, #[serde(default)] @@ -22,6 +23,20 @@ pub struct PackageManifest { pub dependencies: BTreeMap, #[serde(default)] pub artifacts: Vec, + #[serde(default)] + pub source_build: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(deny_unknown_fields)] +pub struct SourceBuildMetadata { + #[serde(alias = "source_url")] + pub url: String, + pub build_system: String, + #[serde(default)] + pub build_commands: Vec, + #[serde(default)] + pub install_commands: Vec, } impl PackageManifest { diff --git a/crates/crosspack-core/src/tests.rs b/crates/crosspack-core/src/tests.rs index 71436e0..7a5bce2 100644 --- a/crates/crosspack-core/src/tests.rs +++ b/crates/crosspack-core/src/tests.rs @@ -23,6 +23,7 @@ fn parse_manifest() { let content = r#" name = "ripgrep" version = "14.1.0" +description = "Fast line-oriented search tool" license = "MIT" provides = ["ripgrep", "rg"] @@ -52,6 +53,10 @@ path = "completions/rg.bash" let parsed = PackageManifest::from_toml_str(content).expect("manifest should parse"); assert_eq!(parsed.name, "ripgrep"); assert_eq!(parsed.version.to_string(), "14.1.0"); + assert_eq!( + parsed.description.as_deref(), + Some("Fast line-oriented search tool") + ); assert_eq!(parsed.provides, vec!["ripgrep", "rg"]); assert_eq!( parsed.conflicts.get("grep"), @@ -77,6 +82,52 @@ path = "completions/rg.bash" ); } +#[test] +fn parse_manifest_without_description_defaults_to_none() { + let content = r#" +name = "jq" +version = "1.7.1" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/jq-1.7.1.tar.gz" +sha256 = "abc123" +"#; + + let parsed = PackageManifest::from_toml_str(content).expect("manifest should parse"); + assert_eq!(parsed.description, None); + assert_eq!(parsed.source_build, None); +} + +#[test] +fn parse_manifest_with_source_build_section() { + let content = r#" +name = "demo" +version = "1.2.3" + +[source_build] +url = "https://example.test/demo-1.2.3.tar.gz" +build_system = "cargo" +build_commands = ["cargo", "build", "--release"] +install_commands = ["cargo", "install", "--path", "."] +"#; + + let parsed = PackageManifest::from_toml_str(content).expect("manifest should parse"); + let source_build = parsed + .source_build + .expect("source_build metadata should be present"); + assert_eq!(source_build.url, "https://example.test/demo-1.2.3.tar.gz"); + assert_eq!(source_build.build_system, "cargo"); + assert_eq!( + source_build.build_commands, + vec!["cargo", "build", "--release"] + ); + assert_eq!( + source_build.install_commands, + vec!["cargo", "install", "--path", "."] + ); +} + #[test] fn parse_manifest_with_multiple_artifact_completions() { let content = r#" diff --git a/docs/architecture.md b/docs/architecture.md index 21334cf..68cf525 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -39,7 +39,7 @@ Default user prefixes: - `search` and `info` query the local registry index. - `search ` returns deterministic rows with `name`, short description fallback, latest version, and source; match order is exact name, then prefix, then keyword. -- Manifest metadata currently has no dedicated description field, so `search` short description falls back to manifest `provides`, then `license`, then `homepage` (or `-` when none are present). +- `search` short description prefers manifest `description`, then falls back to `provides`, `license`, and `homepage` (or `-` when none are present). - Metadata command backend selection is: - if `--registry-root` is set, read directly from that registry root (legacy single-root mode), - otherwise read from configured snapshots under `/state/registries/cache/`. @@ -79,10 +79,16 @@ Default user prefixes: - Output determinism contract remains fixed for machine-oriented lines (`transaction_preview`, `transaction_summary`, `risk_flags`, ordered `change_*`, and `update summary: updated= up-to-date= failed=`). - Interactive rendering is additive only and must continue to route through centralized renderer/formatter/progress helpers so plain-mode contracts remain unchanged. - `pin` stores per-package version constraints in `/state/pins/.pin`. +- `outdated` compares installed receipt versions with latest available metadata versions and reports upgrade candidates. +- `depends `, `uses `, and `why ` provide deterministic dependency introspection from installed receipts. +- `bundle export` writes deterministic root+pin environment bundles; `bundle apply` replays bundle roots through standard resolve/install flows. +- `services list|status|start|stop|restart` manages deterministic Crosspack service-state files under `/state/services/` for installed packages. - `upgrade` upgrades one package (`upgrade `) or all installed root packages (`upgrade`) while honoring pins. - `upgrade --dry-run` performs full planning and emits the same deterministic transaction preview format without mutating install state. +- `install`, `upgrade`, and `bundle apply` support `--explain` in dry-run mode only; explainability lines are additive and deterministic (`explain_provider`, `explain_replacement`, `explain_conflict`). - Global `upgrade` runs one solve per target group derived from root receipts and rejects cross-target package-name overlap; current install state is package-name keyed. - `install` and `upgrade` persist `install_mode` in receipts (`managed` or `native`, derived from artifact-kind defaults). +- `--build-from-source` is parsed for `install` and `bundle apply` but currently guarded fail-closed until source-build execution is shipped. - `install` and `upgrade` persist `install_reason` in receipts (`root` for explicit installs, `dependency` for transitive installs), while preserving existing root intent on upgrades. - `install` and `upgrade` persist `exposed_completions` receipt entries for package-declared completion files exposed under `/share/completions/packages//`. - `install` and `upgrade` persist GUI asset ownership in optional `/state/installed/.gui` sidecars for deterministic stale cleanup and uninstall removal. @@ -90,6 +96,7 @@ Default user prefixes: - Native GUI registration is best-effort: on macOS, adapters attempt system-scope registration first and fall back to user-scope; on other platforms, registration remains user-scope only. Adapter failures produce warning lines and do not fail otherwise successful installs/upgrades/uninstalls. - `uninstall` is dependency-aware: it blocks removal when remaining roots still require the package, reports blocking roots, removes requested packages, and auto-prunes orphan dependencies. - `uninstall` prunes unreferenced artifact cache files for removed packages. +- `cache list`, `cache gc`, and `cache prune` provide explicit artifact cache lifecycle controls. - Transaction recovery commands are shipped and operational: - `rollback [txid]` replays rollback for eligible failed/incomplete transactions. - `repair` clears stale transaction markers and reconciles interrupted state. diff --git a/docs/manifest-spec.md b/docs/manifest-spec.md index b82eb4f..f7d416d 100644 --- a/docs/manifest-spec.md +++ b/docs/manifest-spec.md @@ -10,9 +10,20 @@ Each package version is represented by a TOML manifest stored in the registry in ## Optional Fields +- `description` - `license` - `homepage` - `dependencies`: map of package name to semver constraint. +- `source_build` (non-GA guardrail metadata): optional source-build metadata block used for explicit source-build policy signaling. + +### Source Build Metadata (`source_build`) + +`source_build` is parsed and validated, but end-to-end source-build execution is currently guarded (non-GA). + +- `url`: source archive or source tree URL. +- `build_system`: build-system token (`cargo`, `cmake`, etc.). +- `build_commands` (optional): deterministic command array used for build steps. +- `install_commands` (optional): deterministic command array used for install steps. ## Artifact Fields From 3c50b6fd56a5b98b966f0e8f9fc10e4560f909d6 Mon Sep 17 00:00:00 2001 From: Ian Pascoe Date: Sun, 1 Mar 2026 12:34:12 -0500 Subject: [PATCH 2/6] feat: add community sources, source builds, and declared services --- crates/crosspack-cli/src/bundle_flows.rs | 6 +- crates/crosspack-cli/src/command_flows.rs | 275 +++- crates/crosspack-cli/src/core_flows.rs | 333 ++++- crates/crosspack-cli/src/dispatch.rs | 7 +- crates/crosspack-cli/src/main.rs | 25 +- crates/crosspack-cli/src/tests.rs | 1269 +++++++++++++++-- crates/crosspack-core/src/lib.rs | 2 +- crates/crosspack-core/src/manifest.rs | 55 + crates/crosspack-core/src/tests.rs | 95 ++ crates/crosspack-installer/src/artifact.rs | 138 ++ crates/crosspack-installer/src/layout.rs | 4 + crates/crosspack-installer/src/lib.rs | 17 +- crates/crosspack-installer/src/native.rs | 145 +- crates/crosspack-installer/src/receipts.rs | 165 ++- crates/crosspack-installer/src/tests.rs | 145 +- crates/crosspack-installer/src/types.rs | 15 + crates/crosspack-installer/src/uninstall.rs | 3 +- crates/crosspack-registry/src/lib.rs | 8 +- .../crosspack-registry/src/registry_index.rs | 10 +- crates/crosspack-registry/src/source_state.rs | 51 + crates/crosspack-registry/src/source_store.rs | 9 +- crates/crosspack-registry/src/source_sync.rs | 162 ++- crates/crosspack-registry/src/source_types.rs | 7 + crates/crosspack-registry/src/tests.rs | 455 ++++++ docs/architecture.md | 6 +- docs/install-flow.md | 45 +- docs/manifest-spec.md | 35 +- docs/registry-spec.md | 10 + docs/source-management-spec.md | 13 +- 29 files changed, 3259 insertions(+), 251 deletions(-) diff --git a/crates/crosspack-cli/src/bundle_flows.rs b/crates/crosspack-cli/src/bundle_flows.rs index 36a3a79..7cbb855 100644 --- a/crates/crosspack-cli/src/bundle_flows.rs +++ b/crates/crosspack-cli/src/bundle_flows.rs @@ -124,7 +124,6 @@ fn run_bundle_apply_command( options: BundleApplyOptions<'_>, ) -> Result<()> { ensure_explain_requires_dry_run("bundle apply", options.dry_run, options.explain)?; - ensure_build_from_source_not_supported("bundle apply", options.build_from_source)?; layout.ensure_base_dirs()?; ensure_no_active_transaction_for(layout, "bundle apply")?; let provider_overrides = parse_provider_overrides(options.provider_values)?; @@ -232,6 +231,10 @@ fn run_bundle_apply_command( journal_seq += 1; let dependencies = build_dependency_receipts(package, &plan.resolved); + let mut source_build_journal = SourceBuildJournal { + txid: &tx.txid, + seq: &mut journal_seq, + }; let outcome = install_resolved( layout, package, @@ -244,6 +247,7 @@ fn run_bundle_apply_command( interaction_policy, install_progress_mode, }, + Some(&mut source_build_journal), )?; print_install_outcome(&outcome, output_style); } diff --git a/crates/crosspack-cli/src/command_flows.rs b/crates/crosspack-cli/src/command_flows.rs index 9d699ab..4a82107 100644 --- a/crates/crosspack-cli/src/command_flows.rs +++ b/crates/crosspack-cli/src/command_flows.rs @@ -216,6 +216,12 @@ struct ManagedServiceRow { state: ManagedServiceState, } +#[derive(Debug, Clone, PartialEq, Eq)] +struct DeclaredServiceRecord { + package: String, + service: ServiceDeclaration, +} + fn managed_services_state_dir(layout: &PrefixLayout) -> PathBuf { layout.state_dir().join("services") } @@ -233,17 +239,56 @@ fn validate_service_name(name: &str) -> Result<()> { Ok(()) } -fn ensure_service_package_installed(layout: &PrefixLayout, name: &str) -> Result<()> { +fn declared_service_for_name(layout: &PrefixLayout, name: &str) -> Result { validate_service_name(name)?; - let installed = read_install_receipts(layout)? - .iter() - .any(|receipt| receipt.name == name); - if !installed { + let declared = collect_declared_services(layout)?; + let Some(service) = declared.get(name).cloned() else { return Err(anyhow!( - "No installed package found: {name}. Install it first with `crosspack install {name}`" + "No declared service found: {name}. Install or upgrade a package that declares this service in its manifest (for example: `crosspack install {name}`)" )); + }; + Ok(service) +} + +fn collect_declared_services( + layout: &PrefixLayout, +) -> Result> { + let receipts = read_install_receipts(layout)?; + let declared_by_package = read_all_declared_services_states(layout)?; + + let mut services = HashMap::new(); + for receipt in &receipts { + let Some(package_services) = declared_by_package.get(&receipt.name) else { + continue; + }; + for service in package_services { + validate_service_name(&service.name)?; + let existing = services.insert( + service.name.clone(), + DeclaredServiceRecord { + package: receipt.name.clone(), + service: service.clone(), + }, + ); + if let Some(previous) = existing { + return Err(anyhow!( + "duplicate declared service '{name}' across packages '{left}' and '{right}'", + name = service.name, + left = previous.package, + right = receipt.name + )); + } + } } - Ok(()) + + Ok(services) +} + +fn declared_service_native_id(service: &ServiceDeclaration) -> String { + service + .native_id + .clone() + .unwrap_or_else(|| service.name.clone()) } fn read_managed_service_state(layout: &PrefixLayout, name: &str) -> Result { @@ -307,57 +352,11 @@ fn write_managed_service_state( } fn collect_managed_service_rows(layout: &PrefixLayout) -> Result> { - let installed = read_install_receipts(layout)? - .into_iter() - .map(|receipt| receipt.name) - .collect::>(); - - let state_root = managed_services_state_dir(layout); - let entries = match std::fs::read_dir(&state_root) { - Ok(entries) => entries, - Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(Vec::new()), - Err(err) => { - return Err(err).with_context(|| { - format!( - "failed reading services state directory: {}", - state_root.display() - ) - }) - } - }; - + let declared = collect_declared_services(layout)?; let mut rows = Vec::new(); - for entry in entries { - let entry = entry.with_context(|| { - format!( - "failed iterating services state directory: {}", - state_root.display() - ) - })?; - let file_type = entry.file_type().with_context(|| { - format!( - "failed reading service state entry metadata: {}", - entry.path().display() - ) - })?; - if !file_type.is_file() { - continue; - } - - let file_name = entry.file_name(); - let Some(name) = file_name - .to_str() - .and_then(|value| value.strip_suffix(".service")) - else { - continue; - }; - - if !installed.contains(name) { - continue; - } - + for name in declared.keys() { rows.push(ManagedServiceRow { - name: name.to_string(), + name: name.clone(), state: read_managed_service_state(layout, name)?, }); } @@ -367,33 +366,103 @@ fn collect_managed_service_rows(layout: &PrefixLayout) -> Result Result<()> { - ensure_service_package_installed(layout, name)?; + let declared = declared_service_for_name(layout, name)?; + let native_outcome = run_native_service_action( + NativeServiceAction::Status, + &declared.service.name, + &declared_service_native_id(&declared.service), + ); let state = read_managed_service_state(layout, name)?; - println!("service_state name={name} state={}", state.as_str()); + println!( + "{}", + render_service_state_line(name, state, None, &native_outcome) + ); Ok(()) } fn run_service_start_command(layout: &PrefixLayout, name: &str) -> Result<()> { - ensure_service_package_installed(layout, name)?; - write_managed_service_state(layout, name, ManagedServiceState::Running)?; - println!("service_state name={name} state=running action=start"); + let declared = declared_service_for_name(layout, name)?; + let native_outcome = run_native_service_action( + NativeServiceAction::Start, + &declared.service.name, + &declared_service_native_id(&declared.service), + ); + let next_state = if native_outcome.applied { + ManagedServiceState::Running + } else { + read_managed_service_state(layout, name)? + }; + if native_outcome.applied { + write_managed_service_state(layout, name, next_state)?; + } + println!( + "{}", + render_service_state_line(name, next_state, Some("start"), &native_outcome) + ); Ok(()) } fn run_service_stop_command(layout: &PrefixLayout, name: &str) -> Result<()> { - ensure_service_package_installed(layout, name)?; - write_managed_service_state(layout, name, ManagedServiceState::Stopped)?; - println!("service_state name={name} state=stopped action=stop"); + let declared = declared_service_for_name(layout, name)?; + let native_outcome = run_native_service_action( + NativeServiceAction::Stop, + &declared.service.name, + &declared_service_native_id(&declared.service), + ); + let next_state = if native_outcome.applied { + ManagedServiceState::Stopped + } else { + read_managed_service_state(layout, name)? + }; + if native_outcome.applied { + write_managed_service_state(layout, name, next_state)?; + } + println!( + "{}", + render_service_state_line(name, next_state, Some("stop"), &native_outcome) + ); Ok(()) } fn run_service_restart_command(layout: &PrefixLayout, name: &str) -> Result<()> { - ensure_service_package_installed(layout, name)?; - write_managed_service_state(layout, name, ManagedServiceState::Running)?; - println!("service_state name={name} state=running action=restart"); + let declared = declared_service_for_name(layout, name)?; + let native_outcome = run_native_service_action( + NativeServiceAction::Restart, + &declared.service.name, + &declared_service_native_id(&declared.service), + ); + let next_state = if native_outcome.applied { + ManagedServiceState::Running + } else { + read_managed_service_state(layout, name)? + }; + if native_outcome.applied { + write_managed_service_state(layout, name, next_state)?; + } + println!( + "{}", + render_service_state_line(name, next_state, Some("restart"), &native_outcome) + ); Ok(()) } +fn render_service_state_line( + name: &str, + state: ManagedServiceState, + action: Option<&str>, + native_outcome: &NativeServiceOutcome, +) -> String { + let mut line = format!("service_state name={name} state={}", state.as_str()); + if let Some(action) = action { + line.push_str(&format!(" action={action}")); + } + line.push_str(&format!( + " adapter={} applied={} reason={}", + native_outcome.adapter, native_outcome.applied, native_outcome.reason_code + )); + line +} + fn run_services_command(layout: &PrefixLayout, command: ServicesCommands) -> Result<()> { layout.ensure_base_dirs()?; match command { @@ -794,6 +863,10 @@ fn run_upgrade_command( journal_seq += 1; let dependencies = build_dependency_receipts(package, &resolved); + let mut source_build_journal = SourceBuildJournal { + txid: &tx.txid, + seq: &mut journal_seq, + }; let outcome = install_resolved( layout, package, @@ -806,6 +879,7 @@ fn run_upgrade_command( interaction_policy, install_progress_mode: current_install_progress_mode(output_style), }, + Some(&mut source_build_journal), )?; if let Some(old) = receipts.iter().find(|r| r.name == package.manifest.name) { println!( @@ -961,6 +1035,10 @@ fn run_upgrade_command( journal_seq += 1; let dependencies = build_dependency_receipts(package, resolved); + let mut source_build_journal = SourceBuildJournal { + txid: &tx.txid, + seq: &mut journal_seq, + }; let outcome = install_resolved( layout, package, @@ -973,6 +1051,7 @@ fn run_upgrade_command( interaction_policy, install_progress_mode: current_install_progress_mode(output_style), }, + Some(&mut source_build_journal), )?; if let Some(old) = receipts.iter().find(|r| r.name == package.manifest.name) { @@ -1226,6 +1305,10 @@ fn snapshot_native_sidecar_path(snapshot_root: &Path) -> PathBuf { snapshot_root.join("native").join("sidecar.state") } +fn snapshot_declared_services_sidecar_path(snapshot_root: &Path) -> PathBuf { + snapshot_root.join("services").join("declared.services") +} + fn read_snapshot_manifest(snapshot_root: &Path) -> Result { let path = snapshot_manifest_path(snapshot_root); let raw = match std::fs::read_to_string(&path) { @@ -1238,6 +1321,7 @@ fn read_snapshot_manifest(snapshot_root: &Path) -> Result { @@ -1253,6 +1337,7 @@ fn read_snapshot_manifest(snapshot_root: &Path) -> Result Result Vec { for manifest in manifests { lines.push(format!("- {}", manifest.version)); + if let Some(description) = manifest.description.as_deref() { + let trimmed = description.trim(); + if !trimmed.is_empty() { + lines.push(format!( + " Description: {}", + sanitize_metadata_cell(trimmed) + )); + } + } + if !manifest.provides.is_empty() { lines.push(format!(" Provides: {}", manifest.provides.join(", "))); } @@ -215,6 +225,17 @@ struct ResolvedInstall { artifact: Artifact, resolved_target: String, archive_type: ArchiveType, + source_build: Option, +} + +#[derive(Debug, Clone)] +struct SourceBuildPlan { + url: String, + archive_sha256: String, + build_system: String, + build_commands: Vec, + install_commands: Vec, + archive_type: ArchiveType, } #[derive(Debug, Clone)] @@ -355,6 +376,7 @@ struct PackageSnapshotManifest { completions: Vec, gui_assets: Vec, native_sidecar_exists: bool, + declared_services_sidecar_exists: bool, } fn begin_transaction( @@ -621,15 +643,19 @@ fn resolve_install_graph_with_tokens( .ok_or_else(|| anyhow!("resolver selected package missing from graph: {name}"))? .clone(); - let artifact = - select_artifact_for_target(&manifest, &resolved_target, build_from_source)?; - let archive_type = artifact.archive_type()?; + let (artifact, source_build) = + select_install_plan_for_target(&manifest, &resolved_target, build_from_source)?; + let archive_type = source_build + .as_ref() + .map(|plan| plan.archive_type) + .unwrap_or(artifact.archive_type()?); Ok(ResolvedInstall { manifest, artifact, resolved_target: resolved_target.clone(), archive_type, + source_build, }) }) .collect::>>()?; @@ -637,13 +663,6 @@ fn resolve_install_graph_with_tokens( Ok((resolved, resolved_dependency_tokens)) } -fn ensure_build_from_source_not_supported(_operation: &str, build_from_source: bool) -> Result<()> { - if build_from_source { - return Err(source_build_not_supported_error()); - } - Ok(()) -} - fn ensure_explain_requires_dry_run(operation: &str, dry_run: bool, explain: bool) -> Result<()> { if explain && !dry_run { return Err(anyhow!("--explain requires --dry-run for '{}'", operation)); @@ -651,32 +670,47 @@ fn ensure_explain_requires_dry_run(operation: &str, dry_run: bool, explain: bool Ok(()) } -fn source_build_not_supported_error() -> anyhow::Error { - anyhow!( - "source builds are not yet supported; remove --build-from-source and use a target with published binary artifacts" - ) -} - +#[cfg(test)] fn select_artifact_for_target( manifest: &PackageManifest, resolved_target: &str, build_from_source: bool, ) -> Result { + let (artifact, _) = + select_install_plan_for_target(manifest, resolved_target, build_from_source)?; + Ok(artifact) +} + +fn select_install_plan_for_target( + manifest: &PackageManifest, + resolved_target: &str, + build_from_source: bool, +) -> Result<(Artifact, Option)> { + if build_from_source { + let source = manifest.source_build.as_ref().ok_or_else(|| { + anyhow!( + "source build requested for {} {} on target {} but manifest has no source_build metadata", + manifest.name, + manifest.version, + resolved_target + ) + })?; + let artifact = select_source_build_artifact_template(manifest, resolved_target)?; + let plan = validate_source_build_plan(manifest, resolved_target, source)?; + return Ok((artifact, Some(plan))); + } + if let Some(artifact) = manifest .artifacts .iter() .find(|artifact| artifact.target == resolved_target) { - return Ok(artifact.clone()); + return Ok((artifact.clone(), None)); } if manifest.source_build.is_some() { - if build_from_source { - return Err(source_build_not_supported_error()); - } - return Err(anyhow!( - "source build required for {} {} on target {}: no binary artifact published; source builds are not yet supported", + "source build required for {} {} on target {}: no binary artifact published; rerun with --build-from-source", manifest.name, manifest.version, resolved_target @@ -691,6 +725,132 @@ fn select_artifact_for_target( )) } +fn select_source_build_artifact_template( + manifest: &PackageManifest, + resolved_target: &str, +) -> Result { + if let Some(artifact) = manifest + .artifacts + .iter() + .find(|artifact| artifact.target == resolved_target) + { + return Ok(artifact.clone()); + } + + if manifest.artifacts.len() == 1 { + return Ok(manifest.artifacts[0].clone()); + } + + Err(anyhow!( + "source build for {} {} on target {} requires deterministic artifact metadata (expected exactly one artifact template when target-specific artifact is absent)", + manifest.name, + manifest.version, + resolved_target + )) +} + +fn validate_source_build_plan( + manifest: &PackageManifest, + resolved_target: &str, + source: &crosspack_core::SourceBuildMetadata, +) -> Result { + let url = source.url.trim(); + if url.is_empty() { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: url must not be empty", + manifest.name, + manifest.version, + resolved_target + )); + } + let build_system = source.build_system.trim(); + let archive_sha256 = source.archive_sha256.trim(); + if archive_sha256.is_empty() { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: archive_sha256 must not be empty", + manifest.name, + manifest.version, + resolved_target + )); + } + if !is_valid_sha256_hex(archive_sha256) { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: archive_sha256 must be a 64-character hexadecimal SHA-256 digest", + manifest.name, + manifest.version, + resolved_target + )); + } + if build_system.is_empty() { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: build_system must not be empty", + manifest.name, + manifest.version, + resolved_target + )); + } + if source.build_commands.is_empty() { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: build_commands must not be empty", + manifest.name, + manifest.version, + resolved_target + )); + } + if source.install_commands.is_empty() { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: install_commands must not be empty", + manifest.name, + manifest.version, + resolved_target + )); + } + if source + .build_commands + .iter() + .chain(source.install_commands.iter()) + .any(|token| token.trim().is_empty()) + { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: command tokens must not be empty", + manifest.name, + manifest.version, + resolved_target + )); + } + + let archive_type = ArchiveType::infer_from_url(url).ok_or_else(|| { + anyhow!( + "invalid source_build metadata for {} {} on target {}: url '{}' must include a supported archive extension", + manifest.name, + manifest.version, + resolved_target, + url + ) + })?; + if !matches!( + archive_type, + ArchiveType::Zip | ArchiveType::TarGz | ArchiveType::TarZst + ) { + return Err(anyhow!( + "invalid source_build metadata for {} {} on target {}: archive type '{}' is not supported for source builds", + manifest.name, + manifest.version, + resolved_target, + archive_type.as_str() + )); + } + + Ok(SourceBuildPlan { + url: url.to_string(), + archive_sha256: archive_sha256.to_string(), + build_system: build_system.to_string(), + build_commands: source.build_commands.clone(), + install_commands: source.install_commands.clone(), + archive_type, + }) +} + fn build_planned_package_changes( resolved: &[ResolvedInstall], receipts: &[InstallReceipt], @@ -1138,7 +1298,6 @@ fn sync_native_gui_registration_state_best_effort( Ok((current_records, warnings)) } -#[derive(Clone, Copy)] struct InstallResolvedOptions<'a> { snapshot_id: Option<&'a str>, force_redownload: bool, @@ -1146,6 +1305,11 @@ struct InstallResolvedOptions<'a> { install_progress_mode: InstallProgressMode, } +struct SourceBuildJournal<'a> { + txid: &'a str, + seq: &'a mut u64, +} + fn install_resolved( layout: &PrefixLayout, resolved: &ResolvedInstall, @@ -1153,6 +1317,7 @@ fn install_resolved( root_names: &[String], planned_dependency_overrides: &HashMap>, options: InstallResolvedOptions<'_>, + mut source_build_journal: Option<&mut SourceBuildJournal<'_>>, ) -> Result { const INSTALL_PROGRESS_STEPS: usize = 7; let mut progress = InstallProgressRenderer::new( @@ -1172,17 +1337,22 @@ fn install_resolved( let declared_completions = collect_declared_completions(&resolved.artifact)?; let declared_gui_apps = collect_declared_gui_apps(&resolved.artifact)?; + let download_url = if let Some(source_build) = resolved.source_build.as_ref() { + source_build.url.as_str() + } else { + resolved.artifact.url.as_str() + }; let cache_path = resolved_artifact_cache_path( layout, &resolved.manifest.name, &resolved.manifest.version.to_string(), &resolved.resolved_target, resolved.archive_type, - &resolved.artifact.url, + download_url, )?; progress.update("download", 2, Some((0, None))); let download_status = download_artifact_with_progress( - &resolved.artifact.url, + download_url, &cache_path, options.force_redownload, |downloaded_bytes, total_bytes| { @@ -1190,28 +1360,78 @@ fn install_resolved( }, )?; + if let (Some(_source_build), Some(journal)) = ( + resolved.source_build.as_ref(), + source_build_journal.as_deref_mut(), + ) { + append_source_build_journal_entry( + layout, + journal, + format!("source_fetch:{}", resolved.manifest.name), + Some(cache_path.display().to_string()), + )?; + } + progress.update("verify", 3, None); - let checksum_ok = verify_sha256_file(&cache_path, &resolved.artifact.sha256)?; + let (expected_sha256, checksum_kind) = + if let Some(source_build) = resolved.source_build.as_ref() { + (source_build.archive_sha256.as_str(), "source archive") + } else { + (resolved.artifact.sha256.as_str(), "artifact") + }; + let checksum_ok = verify_sha256_file(&cache_path, expected_sha256)?; if !checksum_ok { let _ = remove_file_if_exists(&cache_path); return Err(anyhow!( - "sha256 mismatch for {} (expected {})", + "{checksum_kind} sha256 mismatch for {} (expected {})", cache_path.display(), - resolved.artifact.sha256 + expected_sha256 )); } progress.update("install", 4, None); - let install_options = build_artifact_install_options(resolved, options.interaction_policy); - let selected_install_mode = install_options.install_mode; - let install_root = install_from_artifact( - layout, - &resolved.manifest.name, - &resolved.manifest.version.to_string(), - &cache_path, - resolved.archive_type, - install_options, - )?; + let (install_root, selected_install_mode) = if let Some(source_build) = + resolved.source_build.as_ref() + { + let install_root = install_from_source_archive( + layout, + &resolved.manifest.name, + &resolved.manifest.version.to_string(), + &cache_path, + source_build.archive_type, + &source_build.build_commands, + &source_build.install_commands, + )?; + if let Some(journal) = source_build_journal { + append_source_build_journal_entry( + layout, + journal, + format!( + "source_build_system:{}:{}", + resolved.manifest.name, source_build.build_system + ), + None, + )?; + append_source_build_journal_entry( + layout, + journal, + format!("source_install:{}", resolved.manifest.name), + Some(install_root.display().to_string()), + )?; + } + (install_root, InstallMode::Managed) + } else { + let install_options = build_artifact_install_options(resolved, options.interaction_policy); + let install_root = install_from_artifact( + layout, + &resolved.manifest.name, + &resolved.manifest.version.to_string(), + &cache_path, + resolved.archive_type, + install_options, + )?; + (install_root, install_options.install_mode) + }; if let Err(err) = apply_replacement_handoff(layout, &replacement_receipts, planned_dependency_overrides) @@ -1288,8 +1508,14 @@ fn install_resolved( version: resolved.manifest.version.to_string(), dependencies: dependency_receipts.to_vec(), target: Some(resolved.resolved_target.clone()), - artifact_url: Some(resolved.artifact.url.clone()), - artifact_sha256: Some(resolved.artifact.sha256.clone()), + artifact_url: Some(download_url.to_string()), + artifact_sha256: Some( + resolved + .source_build + .as_ref() + .map(|plan| plan.archive_sha256.clone()) + .unwrap_or_else(|| resolved.artifact.sha256.clone()), + ), cache_path: Some(cache_path.display().to_string()), exposed_bins: exposed_bins.clone(), exposed_completions: exposed_completions.clone(), @@ -1304,6 +1530,7 @@ fn install_resolved( install_status: "installed".to_string(), installed_at_unix: current_unix_timestamp()?, }; + write_declared_services_state(layout, &resolved.manifest.name, &resolved.manifest.services)?; let receipt_path = write_install_receipt(layout, &receipt)?; progress.update("complete", 7, None); progress.finish(); @@ -1313,7 +1540,7 @@ fn install_resolved( version: resolved.manifest.version.to_string(), resolved_target: resolved.resolved_target.clone(), archive_type: resolved.archive_type, - artifact_url: resolved.artifact.url.clone(), + artifact_url: download_url.to_string(), cache_path, download_status, install_root, @@ -1332,6 +1559,30 @@ fn install_resolved( }) } +fn append_source_build_journal_entry( + layout: &PrefixLayout, + journal: &mut SourceBuildJournal<'_>, + step: String, + path: Option, +) -> Result<()> { + append_transaction_journal_entry( + layout, + journal.txid, + &TransactionJournalEntry { + seq: *journal.seq, + step, + state: "done".to_string(), + path, + }, + )?; + *journal.seq += 1; + Ok(()) +} + +fn is_valid_sha256_hex(value: &str) -> bool { + value.len() == 64 && value.as_bytes().iter().all(u8::is_ascii_hexdigit) +} + fn resolved_artifact_cache_path( layout: &PrefixLayout, package_name: &str, diff --git a/crates/crosspack-cli/src/dispatch.rs b/crates/crosspack-cli/src/dispatch.rs index 9fd14f3..43e0dd2 100644 --- a/crates/crosspack-cli/src/dispatch.rs +++ b/crates/crosspack-cli/src/dispatch.rs @@ -36,7 +36,6 @@ fn run_cli(cli: Cli) -> Result<()> { let (name, requirement) = parse_spec(&spec)?; let provider_overrides = parse_provider_overrides(&provider)?; ensure_explain_requires_dry_run("install", dry_run, explain)?; - ensure_build_from_source_not_supported("install", build_from_source)?; let escalation_policy = resolve_escalation_policy(escalation); let interaction_policy = install_interaction_policy(escalation_policy); let output_style = current_output_style(); @@ -147,6 +146,10 @@ fn run_cli(cli: Cli) -> Result<()> { journal_seq += 1; let dependencies = build_dependency_receipts(package, &resolved); + let mut source_build_journal = SourceBuildJournal { + txid: &tx.txid, + seq: &mut journal_seq, + }; let outcome = install_resolved( &layout, package, @@ -159,6 +162,7 @@ fn run_cli(cli: Cli) -> Result<()> { interaction_policy, install_progress_mode, }, + Some(&mut source_build_journal), )?; print_install_outcome(&outcome, output_style); } @@ -301,6 +305,7 @@ fn run_cli(cli: Cli) -> Result<()> { fingerprint_sha256: fingerprint, enabled: true, priority, + community: None, })?; for line in output_lines { println!("{line}"); diff --git a/crates/crosspack-cli/src/main.rs b/crates/crosspack-cli/src/main.rs index 4bfe46c..4ba22a1 100644 --- a/crates/crosspack-cli/src/main.rs +++ b/crates/crosspack-cli/src/main.rs @@ -13,23 +13,28 @@ use clap::{Args, CommandFactory, Parser, Subcommand, ValueEnum}; use clap_complete::Shell; use crosspack_core::{ ArchiveType, Artifact, ArtifactCompletionShell, ArtifactGuiApp, PackageManifest, + ServiceDeclaration, }; +#[cfg(test)] +use crosspack_installer::read_declared_services_state; use crosspack_installer::{ append_transaction_journal_entry, bin_path, clear_active_transaction, current_unix_timestamp, default_user_prefix, expose_binary, expose_completion, expose_gui_app, exposed_completion_path, - gui_asset_path, install_from_artifact, projected_exposed_completion_path, projected_gui_assets, - read_active_transaction, read_all_gui_exposure_states, read_all_pins, read_gui_exposure_state, - read_gui_native_state, read_install_receipts, read_transaction_metadata, - register_native_gui_app_best_effort, remove_exposed_binary, remove_exposed_completion, - remove_exposed_gui_asset, remove_file_if_exists, remove_native_gui_registration_best_effort, + gui_asset_path, install_from_artifact, install_from_source_archive, + projected_exposed_completion_path, projected_gui_assets, read_active_transaction, + read_all_declared_services_states, read_all_gui_exposure_states, read_all_pins, + read_gui_exposure_state, read_gui_native_state, read_install_receipts, + read_transaction_metadata, register_native_gui_app_best_effort, remove_exposed_binary, + remove_exposed_completion, remove_exposed_gui_asset, remove_file_if_exists, + remove_native_gui_registration_best_effort, run_native_service_action, run_package_native_uninstall_actions, set_active_transaction, uninstall_blocked_by_roots_with_dependency_overrides_and_ignored_roots, uninstall_package, uninstall_package_with_dependency_overrides_and_ignored_roots, update_transaction_status, - write_gui_exposure_state, write_gui_native_state, write_install_receipt, write_pin, - write_transaction_metadata, ArtifactInstallOptions, GuiExposureAsset, - GuiNativeRegistrationRecord, InstallInteractionPolicy, InstallMode, InstallReason, - InstallReceipt, PrefixLayout, TransactionJournalEntry, TransactionMetadata, UninstallResult, - UninstallStatus, + write_declared_services_state, write_gui_exposure_state, write_gui_native_state, + write_install_receipt, write_pin, write_transaction_metadata, ArtifactInstallOptions, + GuiExposureAsset, GuiNativeRegistrationRecord, InstallInteractionPolicy, InstallMode, + InstallReason, InstallReceipt, NativeServiceAction, NativeServiceOutcome, PrefixLayout, + TransactionJournalEntry, TransactionMetadata, UninstallResult, UninstallStatus, }; use crosspack_registry::{ ConfiguredRegistryIndex, RegistryIndex, RegistrySourceKind, RegistrySourceRecord, diff --git a/crates/crosspack-cli/src/tests.rs b/crates/crosspack-cli/src/tests.rs index fbe952d..27b5573 100644 --- a/crates/crosspack-cli/src/tests.rs +++ b/crates/crosspack-cli/src/tests.rs @@ -4,7 +4,10 @@ mod tests { use clap::error::ErrorKind; use crosspack_registry::RegistrySourceWithSnapshotStatus; use ed25519_dalek::{Signer, SigningKey}; - use std::sync::atomic::{AtomicU64, Ordering}; + use std::sync::{ + atomic::{AtomicU64, Ordering}, + Mutex, OnceLock, + }; const EMPTY_SHA256: &str = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; @@ -481,6 +484,15 @@ mod tests { }; write_gui_native_state(&layout, package_name, std::slice::from_ref(&native_record)) .expect("must write native sidecar state"); + write_declared_services_state( + &layout, + package_name, + &[ServiceDeclaration { + name: "demo".to_string(), + native_id: Some("demo@main".to_string()), + }], + ) + .expect("must write declared services sidecar state"); write_install_receipt( &layout, @@ -514,6 +526,7 @@ mod tests { assert_eq!(manifest.completions, vec![completion_rel_path.clone()]); assert_eq!(manifest.gui_assets, vec![gui_asset.clone()]); assert!(manifest.native_sidecar_exists); + assert!(manifest.declared_services_sidecar_exists); assert!(snapshot_bin_path(&snapshot_root, "demo").exists()); assert!( @@ -528,6 +541,10 @@ mod tests { snapshot_native_sidecar_path(&snapshot_root).exists(), "native sidecar state file should be captured" ); + assert!( + snapshot_declared_services_sidecar_path(&snapshot_root).exists(), + "declared services sidecar should be captured" + ); let _ = std::fs::remove_dir_all(layout.prefix()); } @@ -577,6 +594,12 @@ mod tests { }; write_install_receipt(&layout, &previous_receipt).expect("must write previous receipt"); std::fs::write(bin_path(&layout, "demo"), "old-bin").expect("must write old binary"); + let old_declared_services = vec![ServiceDeclaration { + name: "demo".to_string(), + native_id: Some("demo@old".to_string()), + }]; + write_declared_services_state(&layout, package_name, &old_declared_services) + .expect("must write old declared services sidecar"); let snapshot_root = layout .transaction_staging_path(txid) @@ -587,6 +610,8 @@ mod tests { std::fs::create_dir_all(snapshot_root.join("receipt")) .expect("must create snapshot receipts"); std::fs::create_dir_all(snapshot_root.join("bins")).expect("must create snapshot bins"); + std::fs::create_dir_all(snapshot_root.join("services")) + .expect("must create snapshot services dir"); std::fs::copy( layout .pkgs_dir() @@ -608,9 +633,14 @@ mod tests { snapshot_root.join("bins").join("demo"), ) .expect("must copy bin fixture into snapshot"); + std::fs::copy( + layout.declared_services_state_path(package_name), + snapshot_declared_services_sidecar_path(&snapshot_root), + ) + .expect("must copy declared services fixture into snapshot"); std::fs::write( snapshot_root.join("manifest.txt"), - "package_exists=1\nreceipt_exists=1\nbin=demo\n", + "package_exists=1\nreceipt_exists=1\nbin=demo\ndeclared_services_sidecar_exists=1\n", ) .expect("must write snapshot manifest"); @@ -659,6 +689,15 @@ mod tests { }; write_install_receipt(&layout, &new_receipt).expect("must write new receipt"); std::fs::write(bin_path(&layout, "demo"), "new-bin").expect("must write new binary"); + write_declared_services_state( + &layout, + package_name, + &[ServiceDeclaration { + name: "demo".to_string(), + native_id: Some("demo@new".to_string()), + }], + ) + .expect("must write new declared services sidecar"); append_transaction_journal_entry( &layout, @@ -713,6 +752,89 @@ mod tests { std::fs::read_to_string(bin_path(&layout, "demo")).expect("must read restored binary"), "old-bin" ); + let restored_declared_services = read_declared_services_state(&layout, package_name) + .expect("must read restored declared services sidecar"); + assert_eq!(restored_declared_services, old_declared_services); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[test] + fn rollback_cleans_declared_services_sidecar_when_snapshot_has_no_sidecar() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let txid = "tx-clean-services-sidecar"; + let package_name = "demo"; + write_transaction_metadata( + &layout, + &TransactionMetadata { + version: 1, + txid: txid.to_string(), + operation: "upgrade".to_string(), + status: "failed".to_string(), + started_at_unix: 1_771_001_307, + snapshot_id: None, + }, + ) + .expect("must write metadata"); + set_active_transaction(&layout, txid).expect("must set active marker"); + + let snapshot_root = layout + .transaction_staging_path(txid) + .join("rollback") + .join(package_name); + std::fs::create_dir_all(snapshot_root.join("package")) + .expect("must create snapshot package dir"); + std::fs::create_dir_all(snapshot_root.join("receipt")) + .expect("must create snapshot receipt dir"); + std::fs::create_dir_all(snapshot_root.join("bins")).expect("must create snapshot bins dir"); + std::fs::write( + snapshot_root.join("manifest.txt"), + "package_exists=0\nreceipt_exists=0\ndeclared_services_sidecar_exists=0\n", + ) + .expect("must write snapshot manifest"); + + write_declared_services_state( + &layout, + package_name, + &[ServiceDeclaration { + name: "demo".to_string(), + native_id: Some("demo@interrupted".to_string()), + }], + ) + .expect("must seed interrupted declared services sidecar"); + + append_transaction_journal_entry( + &layout, + txid, + &TransactionJournalEntry { + seq: 1, + step: format!("backup_package_state:{package_name}"), + state: "done".to_string(), + path: Some(snapshot_root.display().to_string()), + }, + ) + .expect("must append backup step"); + append_transaction_journal_entry( + &layout, + txid, + &TransactionJournalEntry { + seq: 2, + step: format!("upgrade_package:{package_name}"), + state: "done".to_string(), + path: Some(package_name.to_string()), + }, + ) + .expect("must append mutating step"); + + run_rollback_command(&layout, Some(txid.to_string())) + .expect("rollback should remove declared services sidecar when absent in snapshot"); + + assert!( + !layout.declared_services_state_path(package_name).exists(), + "declared services sidecar should be removed" + ); let _ = std::fs::remove_dir_all(layout.prefix()); } @@ -849,6 +971,128 @@ mod tests { let _ = std::fs::remove_dir_all(layout.prefix()); } + #[test] + fn rollback_ignores_source_build_journal_steps_and_restores_snapshot_state() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let txid = "tx-source-build-rollback"; + let package_name = "demo"; + let metadata = TransactionMetadata { + version: 1, + txid: txid.to_string(), + operation: "install".to_string(), + status: "failed".to_string(), + started_at_unix: 1_771_001_300, + snapshot_id: None, + }; + write_transaction_metadata(&layout, &metadata).expect("must write metadata"); + set_active_transaction(&layout, txid).expect("must set active marker"); + + let snapshot_root = layout + .transaction_staging_path(txid) + .join("rollback") + .join(package_name); + std::fs::create_dir_all(snapshot_root.join("package")) + .expect("must create snapshot package dir"); + std::fs::create_dir_all(snapshot_root.join("receipt")) + .expect("must create snapshot receipt dir"); + std::fs::create_dir_all(snapshot_root.join("bins")).expect("must create snapshot bins dir"); + std::fs::write( + snapshot_root.join("manifest.txt"), + "package_exists=0\nreceipt_exists=0\n", + ) + .expect("must write snapshot manifest"); + + std::fs::create_dir_all(layout.pkgs_dir().join(package_name).join("2.0.0")) + .expect("must create interrupted package state"); + std::fs::write( + layout + .pkgs_dir() + .join(package_name) + .join("2.0.0") + .join("partial.txt"), + "interrupted", + ) + .expect("must write interrupted package marker"); + + append_transaction_journal_entry( + &layout, + txid, + &TransactionJournalEntry { + seq: 1, + step: format!("backup_package_state:{package_name}"), + state: "done".to_string(), + path: Some(snapshot_root.display().to_string()), + }, + ) + .expect("must append backup step"); + append_transaction_journal_entry( + &layout, + txid, + &TransactionJournalEntry { + seq: 2, + step: format!("source_fetch:{package_name}"), + state: "done".to_string(), + path: Some("/tmp/source-archive.tar.gz".to_string()), + }, + ) + .expect("must append source fetch step"); + append_transaction_journal_entry( + &layout, + txid, + &TransactionJournalEntry { + seq: 3, + step: format!("source_build_system:{package_name}:cargo"), + state: "done".to_string(), + path: None, + }, + ) + .expect("must append source build system step"); + append_transaction_journal_entry( + &layout, + txid, + &TransactionJournalEntry { + seq: 4, + step: format!("source_install:{package_name}"), + state: "done".to_string(), + path: Some(layout.pkgs_dir().join(package_name).display().to_string()), + }, + ) + .expect("must append source install step"); + append_transaction_journal_entry( + &layout, + txid, + &TransactionJournalEntry { + seq: 5, + step: format!("install_package:{package_name}"), + state: "done".to_string(), + path: Some(package_name.to_string()), + }, + ) + .expect("must append mutating install step"); + + run_rollback_command(&layout, Some(txid.to_string())) + .expect("rollback should succeed with source-build journal steps"); + + let updated = read_transaction_metadata(&layout, txid) + .expect("must read rollback metadata") + .expect("rollback metadata should exist"); + assert_eq!(updated.status, "rolled_back"); + assert!( + read_active_transaction(&layout) + .expect("must read active marker") + .is_none(), + "rollback should clear active marker" + ); + assert!( + !layout.pkgs_dir().join(package_name).exists(), + "rollback should remove interrupted source-build package state" + ); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + #[test] fn rollback_native_cleanup_uses_sidecar_when_receipt_missing() { let layout = test_layout(); @@ -2384,6 +2628,7 @@ path = "rg" manifest, resolved_target: "x86_64-unknown-linux-gnu".to_string(), archive_type: ArchiveType::TarGz, + source_build: None, }; let err = validate_install_preflight_for_resolved(&layout, &resolved, &[]) @@ -4302,7 +4547,7 @@ ripgrep-legacy = "*" } #[test] - fn managed_service_state_transitions_are_deterministic() { + fn managed_service_state_transitions_are_deterministic_when_native_not_applied() { let layout = test_layout(); layout.ensure_base_dirs().expect("must create dirs"); @@ -4326,6 +4571,15 @@ ripgrep-legacy = "*" }, ) .expect("must write receipt"); + write_declared_services_state( + &layout, + "demo", + &[crosspack_core::ServiceDeclaration { + name: "demo".to_string(), + native_id: Some("crosspack-missing-service-for-tests".to_string()), + }], + ) + .expect("must write declared services state"); assert_eq!( read_managed_service_state(&layout, "demo").expect("must read default state"), @@ -4334,13 +4588,13 @@ ripgrep-legacy = "*" run_service_start_command(&layout, "demo").expect("start must succeed"); assert_eq!( - read_managed_service_state(&layout, "demo").expect("must read running state"), - ManagedServiceState::Running + read_managed_service_state(&layout, "demo") + .expect("must preserve stopped state when native action fails"), + ManagedServiceState::Stopped ); - assert_eq!( - std::fs::read_to_string(managed_service_state_path(&layout, "demo")) - .expect("must read running state file"), - "state=running\n" + assert!( + !managed_service_state_path(&layout, "demo").exists(), + "state file should not be created when native start is not applied" ); run_service_stop_command(&layout, "demo").expect("stop must succeed"); @@ -4348,36 +4602,97 @@ ripgrep-legacy = "*" read_managed_service_state(&layout, "demo").expect("must read stopped state"), ManagedServiceState::Stopped ); - assert_eq!( - std::fs::read_to_string(managed_service_state_path(&layout, "demo")) - .expect("must read stopped state file"), - "state=stopped\n" + assert!( + !managed_service_state_path(&layout, "demo").exists(), + "state file should remain absent when native stop is not applied" ); run_service_restart_command(&layout, "demo").expect("restart must succeed"); assert_eq!( - read_managed_service_state(&layout, "demo").expect("must read restarted state"), - ManagedServiceState::Running + read_managed_service_state(&layout, "demo") + .expect("must preserve stopped state on restart failure"), + ManagedServiceState::Stopped ); - assert_eq!( - std::fs::read_to_string(managed_service_state_path(&layout, "demo")) - .expect("must read restarted state file"), - "state=running\n" + assert!( + !managed_service_state_path(&layout, "demo").exists(), + "state file should remain absent when native restart is not applied" ); let _ = std::fs::remove_dir_all(layout.prefix()); } #[test] - fn service_commands_require_installed_receipt_presence() { + fn service_output_contract_lines_include_adapter_applied_and_reason_keys() { + let outcome = NativeServiceOutcome { + adapter: "systemd".to_string(), + applied: false, + reason_code: "adapter-tool-missing".to_string(), + }; + + assert_eq!( + render_service_state_line("demo", ManagedServiceState::Stopped, None, &outcome), + "service_state name=demo state=stopped adapter=systemd applied=false reason=adapter-tool-missing" + ); + assert_eq!( + render_service_state_line( + "demo", + ManagedServiceState::Stopped, + Some("start"), + &outcome + ), + "service_state name=demo state=stopped action=start adapter=systemd applied=false reason=adapter-tool-missing" + ); + assert_eq!( + render_service_state_line( + "demo", + ManagedServiceState::Stopped, + Some("stop"), + &outcome + ), + "service_state name=demo state=stopped action=stop adapter=systemd applied=false reason=adapter-tool-missing" + ); + assert_eq!( + render_service_state_line( + "demo", + ManagedServiceState::Stopped, + Some("restart"), + &outcome + ), + "service_state name=demo state=stopped action=restart adapter=systemd applied=false reason=adapter-tool-missing" + ); + } + + #[test] + fn service_commands_require_declared_service_presence() { let layout = test_layout(); layout.ensure_base_dirs().expect("must create dirs"); + write_install_receipt( + &layout, + &InstallReceipt { + name: "missing".to_string(), + version: "1.0.0".to_string(), + dependencies: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 1, + }, + ) + .expect("must write receipt"); + let err = run_service_start_command(&layout, "missing") - .expect_err("service start should require installed package receipt"); + .expect_err("service start should require declared service metadata"); let message = err.to_string(); - assert!(message.contains("No installed package found: missing")); - assert!(message.contains("crosspack install missing")); + assert!(message.contains("No declared service found: missing")); + assert!(message.contains("crosspack install")); let _ = std::fs::remove_dir_all(layout.prefix()); } @@ -4409,6 +4724,24 @@ ripgrep-legacy = "*" ) .expect("must write receipt"); } + write_declared_services_state( + &layout, + "alpha", + &[crosspack_core::ServiceDeclaration { + name: "alpha".to_string(), + native_id: None, + }], + ) + .expect("must write declared services"); + write_declared_services_state( + &layout, + "charlie", + &[crosspack_core::ServiceDeclaration { + name: "charlie".to_string(), + native_id: Some("charlie-daemon".to_string()), + }], + ) + .expect("must write declared services"); write_managed_service_state(&layout, "charlie", ManagedServiceState::Running) .expect("must write running service state"); @@ -4453,11 +4786,21 @@ ripgrep-legacy = "*" }, ) .expect("must write receipt"); + write_declared_services_state( + &layout, + "cpp+tool", + &[crosspack_core::ServiceDeclaration { + name: "cpp+tool".to_string(), + native_id: None, + }], + ) + .expect("must write declared services state"); run_service_start_command(&layout, "cpp+tool").expect("start must succeed"); assert_eq!( - read_managed_service_state(&layout, "cpp+tool").expect("must read running state"), - ManagedServiceState::Running + read_managed_service_state(&layout, "cpp+tool") + .expect("must keep stopped state when start is not applied"), + ManagedServiceState::Stopped ); let _ = std::fs::remove_dir_all(layout.prefix()); @@ -4668,12 +5011,14 @@ sha256 = "abc" manifest: provider_manifest, resolved_target: "x86_64-unknown-linux-gnu".to_string(), archive_type: ArchiveType::TarZst, + source_build: None, }, ResolvedInstall { artifact: tool_manifest.artifacts[0].clone(), manifest: tool_manifest, resolved_target: "x86_64-unknown-linux-gnu".to_string(), archive_type: ArchiveType::TarZst, + source_build: None, }, ]; let receipts = vec![InstallReceipt { @@ -4763,18 +5108,21 @@ sha256 = "abc" manifest: app_manifest, resolved_target: "x86_64-unknown-linux-gnu".to_string(), archive_type: ArchiveType::TarZst, + source_build: None, }, ResolvedInstall { artifact: zigcc_manifest.artifacts[0].clone(), manifest: zigcc_manifest, resolved_target: "x86_64-unknown-linux-gnu".to_string(), archive_type: ArchiveType::TarZst, + source_build: None, }, ResolvedInstall { artifact: clang_manifest.artifacts[0].clone(), manifest: clang_manifest, resolved_target: "x86_64-unknown-linux-gnu".to_string(), archive_type: ArchiveType::TarZst, + source_build: None, }, ]; @@ -5318,11 +5666,34 @@ sha256 = "bar" } #[test] - fn format_info_lines_includes_policy_sections_when_present() { + fn format_info_lines_includes_sanitized_description_when_present() { + let manifest = PackageManifest::from_toml_str( + r#" +name = "compiler" +version = "2.1.0" + description = "Fast\tline\nsearch\rtool" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/compiler.tar.zst" +sha256 = "abc" +"#, + ) + .expect("manifest must parse"); + + let lines = format_info_lines("compiler", &[manifest]); + assert_eq!(lines[0], "Package: compiler"); + assert_eq!(lines[1], "- 2.1.0"); + assert_eq!(lines[2], " Description: Fast line search tool"); + } + + #[test] + fn format_info_lines_preserves_policy_order_with_and_without_description() { let manifest = PackageManifest::from_toml_str( r#" name = "compiler" version = "2.1.0" +description = "Portable toolchain" provides = ["c-compiler", "cc"] [conflicts] @@ -5337,11 +5708,59 @@ old-cc = "<2.0.0" let lines = format_info_lines("compiler", &[manifest]); assert_eq!(lines[0], "Package: compiler"); assert_eq!(lines[1], "- 2.1.0"); - assert_eq!(lines[2], " Provides: c-compiler, cc"); - assert_eq!(lines[3], " Conflicts: legacy-cc(*)"); - assert_eq!(lines[4], " Replaces: old-cc(<2.0.0)"); - assert_eq!(lines[5], " Policy: provides=2 conflicts=1 replaces=1"); - } + assert_eq!(lines[2], " Description: Portable toolchain"); + assert_eq!(lines[3], " Provides: c-compiler, cc"); + assert_eq!(lines[4], " Conflicts: legacy-cc(*)"); + assert_eq!(lines[5], " Replaces: old-cc(<2.0.0)"); + assert_eq!(lines[6], " Policy: provides=2 conflicts=1 replaces=1"); + + let manifest_without_description = PackageManifest::from_toml_str( + r#" +name = "compiler" +version = "2.1.0" +provides = ["c-compiler", "cc"] + +[conflicts] +legacy-cc = "*" + +[replaces] +old-cc = "<2.0.0" +"#, + ) + .expect("manifest must parse"); + + let lines_without_description = + format_info_lines("compiler", &[manifest_without_description]); + assert_eq!(lines_without_description[0], "Package: compiler"); + assert_eq!(lines_without_description[1], "- 2.1.0"); + assert_eq!(lines_without_description[2], " Provides: c-compiler, cc"); + assert_eq!(lines_without_description[3], " Conflicts: legacy-cc(*)"); + assert_eq!(lines_without_description[4], " Replaces: old-cc(<2.0.0)"); + assert_eq!( + lines_without_description[5], + " Policy: provides=2 conflicts=1 replaces=1" + ); + } + + #[test] + fn format_info_lines_omits_description_when_only_whitespace() { + let manifest = PackageManifest::from_toml_str( + r#" +name = "compiler" +version = "2.1.0" +description = " \n\t" +"#, + ) + .expect("manifest must parse"); + + let lines = format_info_lines("compiler", &[manifest]); + assert_eq!(lines[0], "Package: compiler"); + assert_eq!(lines[1], "- 2.1.0"); + assert!( + !lines.iter().any(|line| line.starts_with(" Description:")), + "whitespace-only descriptions must not be rendered" + ); + } #[test] fn cli_parses_registry_add_command() { @@ -5509,6 +5928,7 @@ old-cc = "<2.0.0" .to_string(), enabled: true, priority: 10, + community: None, }, snapshot: RegistrySourceSnapshotState::Ready { snapshot_id: "git:0123456789abcdef".to_string(), @@ -5524,6 +5944,7 @@ old-cc = "<2.0.0" .to_string(), enabled: true, priority: 1, + community: None, }, snapshot: RegistrySourceSnapshotState::None, }, @@ -5594,6 +6015,7 @@ old-cc = "<2.0.0" "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef".to_string(), enabled: true, priority: 1, + community: None, }) .expect("must add alpha source"); store @@ -5605,6 +6027,7 @@ old-cc = "<2.0.0" "fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210".to_string(), enabled: false, priority: 2, + community: None, }) .expect("must add beta source"); @@ -5646,6 +6069,7 @@ old-cc = "<2.0.0" "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef".to_string(), enabled: true, priority: 1, + community: None, }) .expect("must add alpha source"); store @@ -5657,6 +6081,7 @@ old-cc = "<2.0.0" "fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210".to_string(), enabled: true, priority: 2, + community: None, }) .expect("must add beta source"); @@ -5710,6 +6135,7 @@ old-cc = "<2.0.0" "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef".to_string(), enabled: true, priority: 1, + community: None, }) .expect("must add alpha source"); store @@ -5721,6 +6147,7 @@ old-cc = "<2.0.0" "fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210".to_string(), enabled: true, priority: 2, + community: None, }) .expect("must add beta source"); @@ -5768,6 +6195,7 @@ old-cc = "<2.0.0" "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef".to_string(), enabled: true, priority: 1, + community: None, }) .expect("must add alpha source"); @@ -5797,6 +6225,7 @@ old-cc = "<2.0.0" "f0cf90f634c31f8f43f56f3576d2f23f9f66d4b041e92f788bcbdbdbf4dcd89f".to_string(), enabled: true, priority: 1, + community: None, }) .expect("must add ok source"); store @@ -5808,6 +6237,7 @@ old-cc = "<2.0.0" "f0cf90f634c31f8f43f56f3576d2f23f9f66d4b041e92f788bcbdbdbf4dcd89f".to_string(), enabled: true, priority: 2, + community: None, }) .expect("must add bad source"); @@ -6346,6 +6776,9 @@ sha256 = "abc" #[test] fn download_artifact_reports_progress_with_known_total() { + let _env_lock = download_backend_env_lock() + .lock() + .expect("download backend env lock must be available"); let layout = test_layout(); layout.ensure_base_dirs().expect("must create dirs"); @@ -6377,6 +6810,9 @@ sha256 = "abc" #[test] fn download_artifact_reports_progress_without_total_for_streamed_response() { + let _env_lock = download_backend_env_lock() + .lock() + .expect("download backend env lock must be available"); let layout = test_layout(); layout.ensure_base_dirs().expect("must create dirs"); @@ -6434,16 +6870,16 @@ sha256 = "abc" #[test] fn download_artifact_cache_hit_ignores_invalid_backend_env() { + let _env_lock = download_backend_env_lock() + .lock() + .expect("download backend env lock must be available"); let layout = test_layout(); layout.ensure_base_dirs().expect("must create dirs"); let cache_path = layout.prefix().join("download-cache-hit.bin"); std::fs::write(&cache_path, b"cached").expect("must write cache fixture"); - let previous = std::env::var("CROSSPACK_DOWNLOAD_BACKEND").ok(); - unsafe { - std::env::set_var("CROSSPACK_DOWNLOAD_BACKEND", "not-a-backend"); - } + let _backend_guard = DownloadBackendEnvGuard::set("not-a-backend"); let status = download_artifact_with_progress( "https://example.test/cached.bin", @@ -6453,17 +6889,15 @@ sha256 = "abc" ) .expect("cache hit should short-circuit before backend validation"); - match previous { - Some(value) => unsafe { std::env::set_var("CROSSPACK_DOWNLOAD_BACKEND", value) }, - None => unsafe { std::env::remove_var("CROSSPACK_DOWNLOAD_BACKEND") }, - } - assert_eq!(status, "cache-hit"); let _ = std::fs::remove_dir_all(layout.prefix()); } #[test] fn download_artifact_retries_in_process_download_before_succeeding() { + let _env_lock = download_backend_env_lock() + .lock() + .expect("download backend env lock must be available"); let layout = test_layout(); layout.ensure_base_dirs().expect("must create dirs"); @@ -6664,6 +7098,7 @@ sha256 = "abc" interaction_policy: InstallInteractionPolicy::default(), install_progress_mode: InstallProgressMode::Disabled, }, + None, ) .expect_err("unsupported EXE host should fail deterministically"); @@ -6699,6 +7134,7 @@ sha256 = "abc" interaction_policy: InstallInteractionPolicy::default(), install_progress_mode: InstallProgressMode::Disabled, }, + None, ) .expect_err("unsupported PKG host should fail deterministically"); @@ -6911,36 +7347,6 @@ sha256 = "abc" assert!(!layout.gui_native_state_path("demo").exists()); } - #[test] - fn build_from_source_flag_returns_not_supported_error_text() { - let err = ensure_build_from_source_not_supported("install", true) - .expect_err("build-from-source should fail closed until implemented"); - assert_eq!( - err.to_string(), - "source builds are not yet supported; remove --build-from-source and use a target with published binary artifacts" - ); - } - - #[test] - fn install_build_from_source_is_rejected_at_command_entrypoint() { - let cli = Cli::try_parse_from(["crosspack", "install", "demo", "--build-from-source"]) - .expect("command must parse"); - let err = run_cli(cli).expect_err("build-from-source must be rejected before install"); - assert!(err - .to_string() - .contains("source builds are not yet supported")); - } - - #[test] - fn bundle_apply_build_from_source_is_rejected_at_command_entrypoint() { - let cli = Cli::try_parse_from(["crosspack", "bundle", "apply", "--build-from-source"]) - .expect("command must parse"); - let err = run_cli(cli).expect_err("build-from-source must be rejected before bundle apply"); - assert!(err - .to_string() - .contains("source builds are not yet supported")); - } - #[test] fn source_build_metadata_requires_build_from_source_flag_when_binary_artifact_missing() { let manifest = PackageManifest::from_toml_str( @@ -6955,6 +7361,7 @@ sha256 = "abc123" [source_build] url = "https://example.test/demo-1.0.0-src.tar.gz" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" build_system = "cargo" build_commands = ["cargo", "build", "--release"] install_commands = ["cargo", "install", "--path", "."] @@ -6966,12 +7373,12 @@ install_commands = ["cargo", "install", "--path", "."] .expect_err("source-build gate should require explicit opt-in"); assert_eq!( err.to_string(), - "source build required for demo 1.0.0 on target x86_64-unknown-linux-gnu: no binary artifact published; source builds are not yet supported" + "source build required for demo 1.0.0 on target x86_64-unknown-linux-gnu: no binary artifact published; rerun with --build-from-source" ); } #[test] - fn source_build_metadata_with_flag_returns_not_supported_guardrail_error() { + fn source_build_metadata_with_flag_uses_source_build_path() { let manifest = PackageManifest::from_toml_str( r#" name = "demo" @@ -6984,6 +7391,7 @@ sha256 = "abc123" [source_build] url = "https://example.test/demo-1.0.0-src.tar.gz" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" build_system = "cargo" build_commands = ["cargo", "build", "--release"] install_commands = ["cargo", "install", "--path", "."] @@ -6991,12 +7399,487 @@ install_commands = ["cargo", "install", "--path", "."] ) .expect("manifest should parse"); - let err = select_artifact_for_target(&manifest, "x86_64-unknown-linux-gnu", true) - .expect_err("source build path should fail closed while unimplemented"); + let (selected, source_build) = + select_install_plan_for_target(&manifest, "x86_64-unknown-linux-gnu", true) + .expect("source-build opt-in should use supported source-build path"); assert_eq!( - err.to_string(), - "source builds are not yet supported; remove --build-from-source and use a target with published binary artifacts" + selected.target, "aarch64-apple-darwin", + "fallback artifact metadata should be selected deterministically" + ); + let source_build = source_build.expect("source-build plan should be present"); + assert_eq!( + source_build.url, + "https://example.test/demo-1.0.0-src.tar.gz" + ); + assert_eq!( + source_build.archive_sha256, + "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + ); + assert_eq!(source_build.archive_type, ArchiveType::TarGz); + assert_eq!(source_build.build_system, "cargo"); + } + + #[test] + fn source_build_metadata_rejects_unknown_fields_fail_closed() { + let err = PackageManifest::from_toml_str( + r#" +name = "demo" +version = "1.0.0" + +[[artifacts]] +target = "x86_64-unknown-linux-gnu" +url = "https://example.test/demo-1.0.0.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/demo-1.0.0-src.zip" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" +build_system = "shell" +build_commands = ["sh", "-c", "true"] +install_commands = ["sh", "-c", "true"] +unexpected = "value" +"#, + ) + .expect_err("unknown source_build fields must be rejected"); + let rendered = err + .chain() + .map(ToString::to_string) + .collect::>() + .join(" | "); + assert!( + rendered.contains("unknown field") && rendered.contains("unexpected"), + "unexpected parse error: {rendered}" + ); + } + + #[test] + fn source_build_metadata_rejects_invalid_archive_sha256_fail_closed() { + let target = host_target_triple().to_string(); + let manifest = PackageManifest::from_toml_str(&format!( + r#" +name = "demo" +version = "1.0.0" + +[[artifacts]] +target = "{target}" +url = "https://example.test/demo-1.0.0.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/demo-1.0.0-src.tar.gz" +archive_sha256 = "xyz" +build_system = "shell" +build_commands = ["sh", "-c", "true"] +install_commands = ["sh", "-c", "true"] +"# + )) + .expect("manifest should parse before source-build plan validation"); + + let err = select_install_plan_for_target(&manifest, &target, true) + .expect_err("invalid source archive checksum metadata must fail closed"); + assert!( + err.to_string() + .contains("archive_sha256 must be a 64-character hexadecimal SHA-256 digest"), + "unexpected error: {err}" + ); + } + + #[test] + fn source_build_metadata_rejects_empty_command_tokens_fail_closed() { + let target = host_target_triple().to_string(); + let manifest = PackageManifest::from_toml_str(&format!( + r#" +name = "demo" +version = "1.0.0" + +[[artifacts]] +target = "{target}" +url = "https://example.test/demo-1.0.0.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/demo-1.0.0-src.zip" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" +build_system = "shell" +build_commands = ["", "-c", "true"] +install_commands = ["sh", "-c", "true"] +"# + )) + .expect("manifest should parse before source-build plan validation"); + + let err = select_install_plan_for_target(&manifest, &target, true) + .expect_err("empty source-build command tokens must fail closed"); + assert!( + err.to_string().contains("command tokens must not be empty"), + "unexpected error: {err}" + ); + } + + #[test] + fn source_build_metadata_rejects_unsupported_archive_type_fail_closed() { + let target = host_target_triple().to_string(); + let manifest = PackageManifest::from_toml_str(&format!( + r#" +name = "demo" +version = "1.0.0" + +[[artifacts]] +target = "{target}" +url = "https://example.test/demo-1.0.0.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/demo-1.0.0-src.pkg" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" +build_system = "shell" +build_commands = ["sh", "-c", "true"] +install_commands = ["sh", "-c", "true"] +"# + )) + .expect("manifest should parse before source-build plan validation"); + + let err = select_install_plan_for_target(&manifest, &target, true) + .expect_err("unsupported source-build archive types must fail closed"); + assert!( + err.to_string() + .contains("archive type 'pkg' is not supported for source builds"), + "unexpected error: {err}" + ); + } + + #[cfg(unix)] + #[test] + fn install_build_from_source_cli_flow_records_source_build_journal_steps() { + let home_root = build_test_layout_path(current_unix_nanos()); + std::fs::create_dir_all(&home_root).expect("must create test HOME root"); + + with_test_home_layout(&home_root, |layout| { + layout.ensure_base_dirs().expect("must create base dirs"); + configure_ready_source(layout, "official"); + let target = host_target_triple().to_string(); + let source_archive_sha256 = + seed_source_build_tar_gz_cache(layout, "demo", "1.0.0", &target); + write_signed_source_build_manifest( + layout, + "official", + "demo", + "1.0.0", + &target, + &source_archive_sha256, + ); + + let cli = Cli::try_parse_from(["crosspack", "install", "demo", "--build-from-source"]) + .expect("install command must parse"); + run_cli(cli).expect("install --build-from-source should succeed"); + + let receipts = read_install_receipts(layout).expect("must read receipts"); + assert_eq!(receipts.len(), 1, "exactly one package should be installed"); + let receipt = &receipts[0]; + assert_eq!(receipt.name, "demo"); + assert_eq!(receipt.version, "1.0.0"); + assert_eq!( + receipt.artifact_url.as_deref(), + Some("https://example.test/demo-1.0.0-src.tar.gz") + ); + assert_eq!( + receipt.artifact_sha256.as_deref(), + Some(source_archive_sha256.as_str()), + "source-build installs must persist source archive checksum provenance" + ); + + let txid = single_transaction_txid(layout); + let metadata = read_transaction_metadata(layout, &txid) + .expect("must read transaction metadata") + .expect("metadata should exist"); + assert_eq!(metadata.operation, "install"); + assert_eq!(metadata.status, "committed"); + + let records = + read_transaction_journal_records(layout, &txid).expect("must read journal records"); + let fetch_index = records + .iter() + .position(|entry| entry.step == "source_fetch:demo") + .expect("source fetch step must be journaled"); + let build_system_index = records + .iter() + .position(|entry| entry.step == "source_build_system:demo:shell") + .expect("source build system step must be journaled"); + let install_index = records + .iter() + .position(|entry| entry.step == "source_install:demo") + .expect("source install step must be journaled"); + assert!( + fetch_index < build_system_index && build_system_index < install_index, + "source-build journal step order must remain deterministic" + ); + }); + + let _ = std::fs::remove_dir_all(home_root); + } + + #[cfg(unix)] + #[test] + fn bundle_apply_build_from_source_executes_install_and_records_source_steps() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + configure_ready_source(&layout, "official"); + let target = host_target_triple().to_string(); + let source_archive_sha256 = + seed_source_build_tar_gz_cache(&layout, "bundle-demo", "1.0.0", &target); + write_signed_source_build_manifest( + &layout, + "official", + "bundle-demo", + "1.0.0", + &target, + &source_archive_sha256, + ); + + let bundle_path = layout.prefix().join("bundle-source-build.toml"); + std::fs::write( + &bundle_path, + r#" +format = "crosspack.bundle" +version = 1 + +[[roots]] +name = "bundle-demo" +"#, + ) + .expect("must write bundle apply fixture"); + + let provider_values: Vec = Vec::new(); + run_bundle_apply_command( + &layout, + None, + BundleApplyOptions { + file: Some(bundle_path.as_path()), + dry_run: false, + explain: false, + build_from_source: true, + force_redownload: false, + provider_values: &provider_values, + }, + ) + .expect("bundle apply --build-from-source should execute install path"); + + let receipts = read_install_receipts(&layout).expect("must read receipts"); + assert!( + receipts.iter().any(|receipt| receipt.name == "bundle-demo"), + "bundle apply should install bundle root package" + ); + + let txid = single_transaction_txid(&layout); + let metadata = read_transaction_metadata(&layout, &txid) + .expect("must read transaction metadata") + .expect("metadata should exist"); + assert_eq!(metadata.operation, "bundle-apply"); + assert_eq!(metadata.status, "committed"); + + let records = read_transaction_journal_records(&layout, &txid) + .expect("must read bundle apply journal records"); + assert!( + records + .iter() + .any(|entry| entry.step == "source_fetch:bundle-demo"), + "bundle apply source-build flow must record source_fetch step" + ); + assert!( + records + .iter() + .any(|entry| entry.step == "source_build_system:bundle-demo:shell"), + "bundle apply source-build flow must record build-system step" + ); + assert!( + records + .iter() + .any(|entry| entry.step == "source_install:bundle-demo"), + "bundle apply source-build flow must record source_install step" + ); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + + #[cfg(unix)] + #[test] + fn install_build_from_source_fails_closed_on_source_archive_checksum_mismatch() { + let home_root = build_test_layout_path(current_unix_nanos()); + std::fs::create_dir_all(&home_root).expect("must create test HOME root"); + + with_test_home_layout(&home_root, |layout| { + layout.ensure_base_dirs().expect("must create base dirs"); + configure_ready_source(layout, "official"); + let target = host_target_triple().to_string(); + let _source_archive_sha256 = + seed_source_build_tar_gz_cache(layout, "checksum-demo", "1.0.0", &target); + write_signed_source_build_manifest( + layout, + "official", + "checksum-demo", + "1.0.0", + &target, + EMPTY_SHA256, + ); + + let cli = Cli::try_parse_from([ + "crosspack", + "install", + "checksum-demo", + "--build-from-source", + ]) + .expect("install command must parse"); + let err = run_cli(cli).expect_err("checksum mismatch must fail closed"); + assert!( + err.to_string().contains("source archive sha256 mismatch"), + "unexpected error: {err}" + ); + + let receipts = read_install_receipts(layout).expect("must read receipts after failure"); + assert!( + receipts + .iter() + .all(|receipt| receipt.name != "checksum-demo"), + "checksum mismatch must not persist an install receipt" + ); + }); + + let _ = std::fs::remove_dir_all(home_root); + } + + #[cfg(unix)] + #[test] + fn failed_source_build_does_not_mark_build_system_journal_step_done() { + let home_root = build_test_layout_path(current_unix_nanos()); + std::fs::create_dir_all(&home_root).expect("must create test HOME root"); + + with_test_home_layout(&home_root, |layout| { + layout.ensure_base_dirs().expect("must create base dirs"); + configure_ready_source(layout, "official"); + let target = host_target_triple().to_string(); + let source_archive_sha256 = + seed_source_build_tar_gz_cache(layout, "journal-demo", "1.0.0", &target); + write_signed_source_build_manifest_with_commands( + layout, + "official", + "journal-demo", + "1.0.0", + &target, + &source_archive_sha256, + SourceBuildScripts { + build: "exit 7".to_string(), + install: "true".to_string(), + }, + ); + + let cli = Cli::try_parse_from([ + "crosspack", + "install", + "journal-demo", + "--build-from-source", + ]) + .expect("install command must parse"); + let err = run_cli(cli).expect_err("build failure must fail install"); + assert!( + err.to_string() + .contains("source build build command failed"), + "unexpected error: {err}" + ); + + let txid = single_transaction_txid(layout); + let records = read_transaction_journal_records(layout, &txid) + .expect("must read source-build failure journal records"); + assert!( + records + .iter() + .any(|entry| entry.step == "source_fetch:journal-demo"), + "source fetch should still be journaled after successful download" + ); + assert!( + records + .iter() + .all(|entry| entry.step != "source_build_system:journal-demo:shell"), + "source build system step must not be journaled as done before successful build" + ); + assert!( + records + .iter() + .all(|entry| entry.step != "source_install:journal-demo"), + "source install step must not be journaled when build fails" + ); + }); + + let _ = std::fs::remove_dir_all(home_root); + } + + #[test] + fn install_build_from_source_dry_run_keeps_machine_contract_keys_and_order() { + let target = host_target_triple().to_string(); + let manifest = PackageManifest::from_toml_str(&format!( + r#" +name = "dryrun-demo" +version = "1.0.0" + +[[artifacts]] +target = "{target}" +url = "https://example.test/dryrun-demo-1.0.0.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/dryrun-demo-1.0.0-src.zip" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" +build_system = "shell" +build_commands = ["sh", "-c", "true"] +install_commands = ["sh", "-c", "true"] +"# + )) + .expect("manifest should parse"); + + let (binary_artifact, _) = select_install_plan_for_target(&manifest, &target, false) + .expect("binary path must resolve"); + let binary_planned = build_planned_package_changes( + &[ResolvedInstall { + manifest: manifest.clone(), + artifact: binary_artifact.clone(), + resolved_target: target.clone(), + archive_type: binary_artifact + .archive_type() + .expect("artifact archive type must parse"), + source_build: None, + }], + &[], + ) + .expect("binary planned changes must build"); + let binary_lines = render_dry_run_output_lines( + &build_transaction_preview("install", &binary_planned), + TransactionPreviewMode::DryRun, + None, + ); + + let (source_artifact, source_build) = + select_install_plan_for_target(&manifest, &target, true) + .expect("source-build path must resolve"); + let source_planned = build_planned_package_changes( + &[ResolvedInstall { + manifest, + artifact: source_artifact, + resolved_target: target, + archive_type: ArchiveType::Zip, + source_build, + }], + &[], + ) + .expect("source-build planned changes must build"); + let source_lines = render_dry_run_output_lines( + &build_transaction_preview("install", &source_planned), + TransactionPreviewMode::DryRun, + None, ); + + assert_eq!(source_lines, binary_lines); + assert!(source_lines[0].starts_with("transaction_preview ")); + assert!(source_lines[1].starts_with("transaction_summary ")); + assert!(source_lines[2].starts_with("risk_flags=")); + assert!(source_lines[3].starts_with("change_add ")); } fn resolved_install(name: &str, version: &str) -> ResolvedInstall { @@ -7018,6 +7901,7 @@ sha256 = "abc" artifact, resolved_target: "x86_64-unknown-linux-gnu".to_string(), archive_type: ArchiveType::TarZst, + source_build: None, } } @@ -7151,6 +8035,107 @@ sha256 = "abc" static TEST_LAYOUT_COUNTER: AtomicU64 = AtomicU64::new(0); + fn home_env_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + fn download_backend_env_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + struct DownloadBackendEnvGuard { + previous: Option, + } + + impl DownloadBackendEnvGuard { + fn set(value: &str) -> Self { + let previous = std::env::var("CROSSPACK_DOWNLOAD_BACKEND").ok(); + unsafe { + std::env::set_var("CROSSPACK_DOWNLOAD_BACKEND", value); + } + Self { previous } + } + } + + impl Drop for DownloadBackendEnvGuard { + fn drop(&mut self) { + match self.previous.as_deref() { + Some(value) => unsafe { + std::env::set_var("CROSSPACK_DOWNLOAD_BACKEND", value); + }, + None => unsafe { + std::env::remove_var("CROSSPACK_DOWNLOAD_BACKEND"); + }, + } + } + } + + struct HomeEnvGuard { + previous: Option, + } + + impl HomeEnvGuard { + fn set(home: &Path) -> Self { + let previous = std::env::var("HOME").ok(); + unsafe { + std::env::set_var("HOME", home); + } + Self { previous } + } + } + + impl Drop for HomeEnvGuard { + fn drop(&mut self) { + match self.previous.as_deref() { + Some(value) => unsafe { + std::env::set_var("HOME", value); + }, + None => unsafe { + std::env::remove_var("HOME"); + }, + } + } + } + + fn current_unix_nanos() -> u128 { + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .expect("system time") + .as_nanos() + } + + fn with_test_home_layout(home_root: &Path, run: impl FnOnce(&PrefixLayout) -> T) -> T { + let _home_lock = home_env_lock() + .lock() + .expect("HOME env lock should not be poisoned"); + let _home_guard = HomeEnvGuard::set(home_root); + let layout = PrefixLayout::new(home_root.join(".crosspack")); + run(&layout) + } + + fn single_transaction_txid(layout: &PrefixLayout) -> String { + let mut txids = std::fs::read_dir(layout.transactions_dir()) + .expect("must read transactions dir") + .filter_map(|entry| entry.ok().map(|entry| entry.path())) + .filter(|path| path.extension().and_then(|ext| ext.to_str()) == Some("json")) + .filter_map(|path| { + path.file_stem() + .and_then(|stem| stem.to_str()) + .map(ToOwned::to_owned) + }) + .collect::>(); + txids.sort(); + assert_eq!( + txids.len(), + 1, + "expected exactly one transaction metadata file, found {:?}", + txids + ); + txids.remove(0) + } + fn build_test_layout_path(nanos: u128) -> PathBuf { let mut path = std::env::temp_dir(); let sequence = TEST_LAYOUT_COUNTER.fetch_add(1, Ordering::Relaxed); @@ -7183,10 +8168,7 @@ sha256 = "abc" fn test_registry_source_dir(name: &str, with_registry_pub: bool) -> PathBuf { let mut path = std::env::temp_dir(); - let nanos = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .expect("system time") - .as_nanos(); + let nanos = current_unix_nanos(); path.push(format!("crosspack-cli-test-registry-{name}-{nanos}")); std::fs::create_dir_all(path.join("index")).expect("must create index dir"); if with_registry_pub { @@ -7248,6 +8230,133 @@ sha256 = "abc" .expect("must write signature"); } + fn write_signed_source_build_manifest( + layout: &PrefixLayout, + source_name: &str, + package_name: &str, + version: &str, + target: &str, + source_archive_sha256: &str, + ) { + write_signed_source_build_manifest_with_commands( + layout, + source_name, + package_name, + version, + target, + source_archive_sha256, + SourceBuildScripts { + build: "mkdir -p $CROSSPACK_STAGE_DIR/bin".to_string(), + install: format!( + "echo '#!/bin/sh' > $CROSSPACK_STAGE_DIR/bin/{package_name}; echo 'exit 0' >> $CROSSPACK_STAGE_DIR/bin/{package_name}; chmod +x $CROSSPACK_STAGE_DIR/bin/{package_name}" + ), + }, + ); + } + + struct SourceBuildScripts { + build: String, + install: String, + } + + fn write_signed_source_build_manifest_with_commands( + layout: &PrefixLayout, + source_name: &str, + package_name: &str, + version: &str, + target: &str, + source_archive_sha256: &str, + scripts: SourceBuildScripts, + ) { + let manifest = format!( + r#" +name = "{package_name}" +version = "{version}" + +[[artifacts]] +target = "{target}" +url = "https://example.test/{package_name}-{version}.tar.zst" +sha256 = "abc123" +[[artifacts.binaries]] +name = "{package_name}" +path = "bin/{package_name}" + +[source_build] +url = "https://example.test/{package_name}-{version}-src.tar.gz" +archive_sha256 = "{source_archive_sha256}" +build_system = "shell" +build_commands = ["sh", "-c", "{build_script}"] +install_commands = ["sh", "-c", "{install_script}"] +"#, + build_script = scripts.build, + install_script = scripts.install + ); + + let cache_root = registry_state_root(layout).join("cache").join(source_name); + let package_dir = cache_root.join("index").join(package_name); + std::fs::create_dir_all(&package_dir).expect("must create package directory"); + + let signing_key = test_signing_key(); + std::fs::write( + cache_root.join("registry.pub"), + public_key_hex(&signing_key), + ) + .expect("must write registry key"); + + let manifest_path = package_dir.join(format!("{version}.toml")); + std::fs::write(&manifest_path, manifest.as_bytes()).expect("must write manifest"); + + let signature = signing_key.sign(manifest.as_bytes()); + std::fs::write( + manifest_path.with_extension("toml.sig"), + hex::encode(signature.to_bytes()), + ) + .expect("must write signature"); + } + + fn seed_source_build_tar_gz_cache( + layout: &PrefixLayout, + package_name: &str, + version: &str, + target: &str, + ) -> String { + let cache_path = + layout.artifact_cache_path(package_name, version, target, ArchiveType::TarGz); + std::fs::create_dir_all( + cache_path + .parent() + .expect("artifact cache path must have a parent"), + ) + .expect("must create source-build cache dir"); + + let fixture_root = layout + .tmp_state_dir() + .join(format!("source-build-archive-{package_name}-{version}")); + let _ = std::fs::remove_dir_all(&fixture_root); + std::fs::create_dir_all(&fixture_root).expect("must create source archive fixture root"); + std::fs::write(fixture_root.join("README.txt"), "source-build fixture\n") + .expect("must write source archive fixture file"); + + let status = std::process::Command::new("tar") + .arg("-czf") + .arg(&cache_path) + .arg("-C") + .arg(&fixture_root) + .arg(".") + .status() + .expect("must spawn tar to build source archive fixture"); + assert!( + status.success(), + "tar must create source archive fixture successfully" + ); + + let archive_bytes = std::fs::read(&cache_path).expect("must read source archive fixture"); + let archive_sha256 = crosspack_security::sha256_hex(&archive_bytes); + + let _ = std::fs::remove_dir_all(fixture_root); + archive_sha256 + } + fn test_signing_key() -> SigningKey { SigningKey::from_bytes(&[7_u8; 32]) } diff --git a/crates/crosspack-core/src/lib.rs b/crates/crosspack-core/src/lib.rs index d3cb3ba..b0dca96 100644 --- a/crates/crosspack-core/src/lib.rs +++ b/crates/crosspack-core/src/lib.rs @@ -6,7 +6,7 @@ mod manifest; pub use archive::ArchiveType; pub use artifact::{Artifact, ArtifactBinary, ArtifactCompletion, ArtifactCompletionShell}; pub use gui::{ArtifactGuiApp, ArtifactGuiFileAssociation, ArtifactGuiProtocol}; -pub use manifest::{PackageManifest, SourceBuildMetadata}; +pub use manifest::{PackageManifest, ServiceDeclaration, SourceBuildMetadata}; #[cfg(test)] mod tests; diff --git a/crates/crosspack-core/src/manifest.rs b/crates/crosspack-core/src/manifest.rs index 14c9dd9..30f6026 100644 --- a/crates/crosspack-core/src/manifest.rs +++ b/crates/crosspack-core/src/manifest.rs @@ -25,6 +25,16 @@ pub struct PackageManifest { pub artifacts: Vec, #[serde(default)] pub source_build: Option, + #[serde(default)] + pub services: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(deny_unknown_fields)] +pub struct ServiceDeclaration { + pub name: String, + #[serde(default)] + pub native_id: Option, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] @@ -32,6 +42,7 @@ pub struct PackageManifest { pub struct SourceBuildMetadata { #[serde(alias = "source_url")] pub url: String, + pub archive_sha256: String, pub build_system: String, #[serde(default)] pub build_commands: Vec, @@ -77,10 +88,54 @@ impl PackageManifest { } } } + let mut seen_service_names = HashSet::new(); + for service in &manifest.services { + validate_service_name_token(&service.name)?; + if !seen_service_names.insert(service.name.clone()) { + return Err(anyhow!( + "duplicate service declaration '{}' in manifest '{}'", + service.name, + manifest.name + )); + } + if let Some(native_id) = service.native_id.as_deref() { + validate_native_service_id_token(native_id)?; + } + } Ok(manifest) } } +fn validate_service_name_token(value: &str) -> anyhow::Result<()> { + validate_service_token("service name", value, false) +} + +fn validate_native_service_id_token(value: &str) -> anyhow::Result<()> { + validate_service_token("native service id", value, true) +} + +fn validate_service_token(kind: &str, value: &str, allow_at: bool) -> anyhow::Result<()> { + let bytes = value.as_bytes(); + if bytes.is_empty() || bytes.len() > 64 { + return Err(anyhow!( + "invalid {kind} '{value}': use package-token grammar" + )); + } + + let starts_valid = bytes[0].is_ascii_lowercase() || bytes[0].is_ascii_digit(); + let allowed_symbols: &[u8] = if allow_at { b"._+-@" } else { b"._+-" }; + let remainder_valid = bytes[1..] + .iter() + .all(|b| b.is_ascii_lowercase() || b.is_ascii_digit() || allowed_symbols.contains(b)); + if !starts_valid || !remainder_valid { + return Err(anyhow!( + "invalid {kind} '{value}': use package-token grammar" + )); + } + + Ok(()) +} + fn validate_protocol_scheme(scheme: &str) -> anyhow::Result<()> { let trimmed = scheme.trim(); if trimmed.is_empty() { diff --git a/crates/crosspack-core/src/tests.rs b/crates/crosspack-core/src/tests.rs index 7a5bce2..689a7bf 100644 --- a/crates/crosspack-core/src/tests.rs +++ b/crates/crosspack-core/src/tests.rs @@ -97,6 +97,96 @@ sha256 = "abc123" let parsed = PackageManifest::from_toml_str(content).expect("manifest should parse"); assert_eq!(parsed.description, None); assert_eq!(parsed.source_build, None); + assert!(parsed.services.is_empty()); +} + +#[test] +fn parse_manifest_with_declared_services() { + let content = r#" +name = "demo" +version = "1.2.3" + +[[services]] +name = "demo" + +[[services]] +name = "demo-worker" +native_id = "demo-worker@main" +"#; + + let parsed = PackageManifest::from_toml_str(content).expect("manifest should parse"); + assert_eq!(parsed.services.len(), 2); + assert_eq!(parsed.services[0].name, "demo"); + assert_eq!(parsed.services[0].native_id, None); + assert_eq!(parsed.services[1].name, "demo-worker"); + assert_eq!( + parsed.services[1].native_id.as_deref(), + Some("demo-worker@main") + ); +} + +#[test] +fn parse_manifest_rejects_duplicate_declared_service_names() { + let content = r#" +name = "demo" +version = "1.0.0" + +[[services]] +name = "demo" + +[[services]] +name = "demo" +"#; + + let err = + PackageManifest::from_toml_str(content).expect_err("duplicate service name must fail"); + assert!(err.to_string().contains("duplicate service declaration")); +} + +#[test] +fn parse_manifest_rejects_invalid_declared_service_name() { + let content = r#" +name = "demo" +version = "1.0.0" + +[[services]] +name = "Demo Service" +"#; + + let err = + PackageManifest::from_toml_str(content).expect_err("invalid service declaration must fail"); + assert!(err.to_string().contains("invalid service name")); +} + +#[test] +fn parse_manifest_rejects_declared_service_name_with_at_sign() { + let content = r#" +name = "demo" +version = "1.0.0" + +[[services]] +name = "demo@main" +"#; + + let err = PackageManifest::from_toml_str(content) + .expect_err("service name with '@' must fail package-token validation"); + assert!(err.to_string().contains("invalid service name")); +} + +#[test] +fn parse_manifest_rejects_invalid_declared_service_native_id() { + let content = r#" +name = "demo" +version = "1.0.0" + +[[services]] +name = "demo" +native_id = "demo service" +"#; + + let err = + PackageManifest::from_toml_str(content).expect_err("invalid native service id must fail"); + assert!(err.to_string().contains("invalid native service id")); } #[test] @@ -107,6 +197,7 @@ version = "1.2.3" [source_build] url = "https://example.test/demo-1.2.3.tar.gz" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" build_system = "cargo" build_commands = ["cargo", "build", "--release"] install_commands = ["cargo", "install", "--path", "."] @@ -117,6 +208,10 @@ install_commands = ["cargo", "install", "--path", "."] .source_build .expect("source_build metadata should be present"); assert_eq!(source_build.url, "https://example.test/demo-1.2.3.tar.gz"); + assert_eq!( + source_build.archive_sha256, + "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + ); assert_eq!(source_build.build_system, "cargo"); assert_eq!( source_build.build_commands, diff --git a/crates/crosspack-installer/src/artifact.rs b/crates/crosspack-installer/src/artifact.rs index da9724e..f3a5a3b 100644 --- a/crates/crosspack-installer/src/artifact.rs +++ b/crates/crosspack-installer/src/artifact.rs @@ -60,6 +60,144 @@ pub fn install_from_artifact( Ok(dst) } +pub fn install_from_source_archive( + layout: &PrefixLayout, + name: &str, + version: &str, + source_archive_path: &Path, + source_archive_type: ArchiveType, + build_commands: &[String], + install_commands: &[String], +) -> Result { + if !matches!( + source_archive_type, + ArchiveType::Zip | ArchiveType::TarGz | ArchiveType::TarZst + ) { + return Err(anyhow!( + "unsupported source build archive type '{}': expected one of zip, tar.gz, tar.zst", + source_archive_type.as_str() + )); + } + + if build_commands.is_empty() { + return Err(anyhow!( + "source build metadata requires non-empty build_commands" + )); + } + if install_commands.is_empty() { + return Err(anyhow!( + "source build metadata requires non-empty install_commands" + )); + } + + let install_tmp = make_tmp_dir(layout, "source-build")?; + let source_raw_dir = install_tmp.join("source-raw"); + let staged_dir = install_tmp.join("staged"); + fs::create_dir_all(&source_raw_dir) + .with_context(|| format!("failed to create {}", source_raw_dir.display()))?; + fs::create_dir_all(&staged_dir) + .with_context(|| format!("failed to create {}", staged_dir.display()))?; + + stage_artifact_payload( + source_archive_path, + &source_raw_dir, + source_archive_type, + 0, + None, + InstallMode::Managed, + InstallInteractionPolicy::default(), + )?; + + let source_root = infer_source_root(&source_raw_dir)?; + run_source_build_command("build", build_commands, &source_root, &staged_dir)?; + run_source_build_command("install", install_commands, &source_root, &staged_dir)?; + + let dst = layout.package_dir(name, version); + if dst.exists() { + fs::remove_dir_all(&dst) + .with_context(|| format!("failed to remove existing package dir: {}", dst.display()))?; + } + move_dir_or_copy(&staged_dir, &dst)?; + + let _ = fs::remove_dir_all(&install_tmp); + Ok(dst) +} + +fn infer_source_root(source_raw_dir: &Path) -> Result { + let mut dirs = Vec::new(); + let mut has_non_dir_entries = false; + for entry in fs::read_dir(source_raw_dir) + .with_context(|| format!("failed to read {}", source_raw_dir.display()))? + { + let entry = entry.with_context(|| { + format!( + "failed to iterate source build extraction root: {}", + source_raw_dir.display() + ) + })?; + let file_type = entry.file_type().with_context(|| { + format!( + "failed to inspect source build extraction entry: {}", + entry.path().display() + ) + })?; + if file_type.is_dir() { + dirs.push(entry.path()); + } else { + has_non_dir_entries = true; + } + } + + dirs.sort(); + if dirs.len() == 1 && !has_non_dir_entries { + return Ok(dirs.remove(0)); + } + Ok(source_raw_dir.to_path_buf()) +} + +fn run_source_build_command( + phase: &str, + command_tokens: &[String], + source_root: &Path, + staged_dir: &Path, +) -> Result<()> { + let program = command_tokens + .first() + .map(String::as_str) + .unwrap_or_default() + .trim(); + if program.is_empty() { + return Err(anyhow!( + "source build metadata contains an empty program token in {phase}_commands" + )); + } + if command_tokens.iter().any(|token| token.trim().is_empty()) { + return Err(anyhow!( + "source build metadata contains empty command tokens in {phase}_commands" + )); + } + + let mut command = Command::new(program); + if command_tokens.len() > 1 { + command.args(&command_tokens[1..]); + } + command + .current_dir(source_root) + .env("CROSSPACK_SOURCE_ROOT", source_root) + .env("CROSSPACK_STAGE_DIR", staged_dir); + + let context_message = format!("source build {phase} command failed"); + run_command(&mut command, &context_message).map_err(|err| { + if error_chain_has_not_found(&err) { + return anyhow!( + "source build {phase} command failed: required tool '{}' was not found on PATH", + program + ); + } + err + }) +} + fn make_tmp_dir(layout: &PrefixLayout, prefix: &str) -> Result { let mut dir = layout.tmp_state_dir(); dir.push(format!( diff --git a/crates/crosspack-installer/src/layout.rs b/crates/crosspack-installer/src/layout.rs index 331bec6..0a6b2c8 100644 --- a/crates/crosspack-installer/src/layout.rs +++ b/crates/crosspack-installer/src/layout.rs @@ -99,6 +99,10 @@ impl PrefixLayout { .join(format!("{name}.gui-native")) } + pub fn declared_services_state_path(&self, name: &str) -> PathBuf { + self.installed_state_dir().join(format!("{name}.services")) + } + pub fn transactions_dir(&self) -> PathBuf { self.state_dir().join("transactions") } diff --git a/crates/crosspack-installer/src/lib.rs b/crates/crosspack-installer/src/lib.rs index 7e79172..17461b6 100644 --- a/crates/crosspack-installer/src/lib.rs +++ b/crates/crosspack-installer/src/lib.rs @@ -9,7 +9,7 @@ mod transactions; mod types; mod uninstall; -pub use artifact::install_from_artifact; +pub use artifact::{install_from_artifact, install_from_source_archive}; pub use exposure::{ bin_path, clear_gui_exposure_state, expose_binary, expose_completion, expose_gui_app, exposed_completion_path, gui_asset_path, projected_exposed_completion_path, @@ -23,11 +23,14 @@ pub use native::{ clear_gui_native_state, clear_native_sidecar_state, read_all_gui_native_states, read_all_native_sidecar_states, read_gui_native_state, read_native_sidecar_state, register_native_gui_app_best_effort, remove_native_gui_registration_best_effort, - remove_package_native_gui_registrations_best_effort, run_package_native_uninstall_actions, - write_gui_native_state, write_native_sidecar_state, + remove_package_native_gui_registrations_best_effort, run_native_service_action, + run_package_native_uninstall_actions, write_gui_native_state, write_native_sidecar_state, }; pub use pins::{read_all_pins, read_pin, remove_pin, write_pin}; -pub use receipts::{read_install_receipts, write_install_receipt}; +pub use receipts::{ + clear_declared_services_state, read_all_declared_services_states, read_declared_services_state, + read_install_receipts, write_declared_services_state, write_install_receipt, +}; pub use transactions::{ append_transaction_journal_entry, clear_active_transaction, current_unix_timestamp, read_active_transaction, read_transaction_metadata, set_active_transaction, @@ -35,9 +38,9 @@ pub use transactions::{ }; pub use types::{ ArtifactInstallOptions, GuiExposureAsset, GuiNativeRegistrationRecord, - InstallInteractionPolicy, InstallMode, InstallReason, InstallReceipt, NativeSidecarState, - NativeUninstallAction, TransactionJournalEntry, TransactionMetadata, UninstallResult, - UninstallStatus, + InstallInteractionPolicy, InstallMode, InstallReason, InstallReceipt, NativeServiceAction, + NativeServiceOutcome, NativeSidecarState, NativeUninstallAction, TransactionJournalEntry, + TransactionMetadata, UninstallResult, UninstallStatus, }; pub use uninstall::{ uninstall_blocked_by_roots_with_dependency_overrides, diff --git a/crates/crosspack-installer/src/native.rs b/crates/crosspack-installer/src/native.rs index c8c6812..5b816d0 100644 --- a/crates/crosspack-installer/src/native.rs +++ b/crates/crosspack-installer/src/native.rs @@ -14,14 +14,155 @@ use crate::exposure::{ }; use crate::fs_utils::remove_file_if_exists; use crate::{ - GuiExposureAsset, GuiNativeRegistrationRecord, NativeSidecarState, NativeUninstallAction, - PrefixLayout, + GuiExposureAsset, GuiNativeRegistrationRecord, NativeServiceAction, NativeServiceOutcome, + NativeSidecarState, NativeUninstallAction, PrefixLayout, }; pub(crate) const MACOS_LSREGISTER_PATH: &str = "/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Support/lsregister"; const NATIVE_SIDECAR_VERSION: u32 = 1; +pub fn run_native_service_action( + action: NativeServiceAction, + service_name: &str, + native_id: &str, +) -> NativeServiceOutcome { + run_native_service_action_with_executor(action, service_name, native_id, run_command) +} + +pub(crate) fn run_native_service_action_with_executor( + action: NativeServiceAction, + _service_name: &str, + native_id: &str, + mut run_command_executor: RunCommand, +) -> NativeServiceOutcome +where + RunCommand: FnMut(&mut Command, &str) -> Result<()>, +{ + let (adapter, mut commands): (&str, Vec<(Command, &'static str)>) = if cfg!(target_os = "linux") + { + ( + "systemd", + vec![( + build_systemd_service_command(action, native_id), + "failed to execute systemd service action", + )], + ) + } else if cfg!(target_os = "macos") { + ( + "launchctl", + vec![( + build_macos_launchctl_service_command(action, native_id), + "failed to execute launchctl service action", + )], + ) + } else if cfg!(windows) { + ( + "windows-scm", + build_windows_service_commands(action, native_id), + ) + } else { + return NativeServiceOutcome { + adapter: "unsupported".to_string(), + applied: false, + reason_code: "unsupported-host".to_string(), + }; + }; + + for (command, context) in &mut commands { + if let Err(err) = run_command_executor(command, context) { + let reason_code = if error_chain_has_not_found(&err) { + "adapter-tool-missing" + } else { + "native-command-failed" + }; + return NativeServiceOutcome { + adapter: adapter.to_string(), + applied: false, + reason_code: reason_code.to_string(), + }; + } + } + + NativeServiceOutcome { + adapter: adapter.to_string(), + applied: true, + reason_code: "native-applied".to_string(), + } +} + +fn error_chain_has_not_found(err: &anyhow::Error) -> bool { + err.chain().any(|cause| { + cause + .downcast_ref::() + .map(|io_err| io_err.kind() == std::io::ErrorKind::NotFound) + .unwrap_or(false) + }) +} + +fn build_systemd_service_command(action: NativeServiceAction, native_id: &str) -> Command { + let mut command = Command::new("systemctl"); + let verb = match action { + NativeServiceAction::Status => "status", + NativeServiceAction::Start => "start", + NativeServiceAction::Stop => "stop", + NativeServiceAction::Restart => "restart", + }; + command.arg(verb).arg(native_id); + command +} + +fn build_macos_launchctl_service_command(action: NativeServiceAction, native_id: &str) -> Command { + let mut command = Command::new("launchctl"); + match action { + NativeServiceAction::Status => { + command.arg("print").arg(format!("system/{native_id}")); + } + NativeServiceAction::Start => { + command.arg("start").arg(native_id); + } + NativeServiceAction::Stop => { + command.arg("stop").arg(native_id); + } + NativeServiceAction::Restart => { + command + .arg("kickstart") + .arg("-k") + .arg(format!("system/{native_id}")); + } + } + command +} + +fn build_windows_service_commands( + action: NativeServiceAction, + native_id: &str, +) -> Vec<(Command, &'static str)> { + match action { + NativeServiceAction::Restart => { + let mut stop = Command::new("sc"); + stop.arg("stop").arg(native_id); + let mut start = Command::new("sc"); + start.arg("start").arg(native_id); + vec![ + (stop, "failed to execute Windows service stop action"), + (start, "failed to execute Windows service start action"), + ] + } + _ => { + let mut command = Command::new("sc"); + let verb = match action { + NativeServiceAction::Status => "query", + NativeServiceAction::Start => "start", + NativeServiceAction::Stop => "stop", + NativeServiceAction::Restart => unreachable!(), + }; + command.arg(verb).arg(native_id); + vec![(command, "failed to execute Windows service action")] + } + } +} + pub fn write_native_sidecar_state( layout: &PrefixLayout, package_name: &str, diff --git a/crates/crosspack-installer/src/receipts.rs b/crates/crosspack-installer/src/receipts.rs index 974a315..a1d935a 100644 --- a/crates/crosspack-installer/src/receipts.rs +++ b/crates/crosspack-installer/src/receipts.rs @@ -1,4 +1,6 @@ -use anyhow::{Context, Result}; +use anyhow::{anyhow, Context, Result}; +use crosspack_core::ServiceDeclaration; +use std::collections::BTreeMap; use std::fs; use std::path::PathBuf; @@ -96,7 +98,7 @@ pub(crate) fn parse_receipt(raw: &str) -> Result { let mut install_status = None; let mut installed_at_unix = None; - for line in raw.lines().map(str::trim).filter(|line| !line.is_empty()) { + for line in raw.lines().filter(|line| !line.trim().is_empty()) { let Some((k, v)) = line.split_once('=') else { continue; }; @@ -138,3 +140,162 @@ pub(crate) fn parse_receipt(raw: &str) -> Result { installed_at_unix: installed_at_unix.context("missing installed_at_unix")?, }) } + +const DECLARED_SERVICES_STATE_VERSION: u32 = 1; + +pub fn write_declared_services_state( + layout: &PrefixLayout, + package_name: &str, + services: &[ServiceDeclaration], +) -> Result { + let path = layout.declared_services_state_path(package_name); + if services.is_empty() { + let _ = fs::remove_file(&path); + return Ok(path); + } + + let mut payload = String::new(); + payload.push_str(&format!("version={}\n", DECLARED_SERVICES_STATE_VERSION)); + for service in services { + if service.name.contains('\t') + || service.name.contains('\n') + || service + .native_id + .as_deref() + .is_some_and(|id| id.contains('\t') || id.contains('\n')) + { + return Err(anyhow!( + "declared service values must not contain tabs or newlines" + )); + } + payload.push_str(&format!( + "service={}\t{}\n", + service.name, + service.native_id.as_deref().unwrap_or("") + )); + } + + fs::write(&path, payload.as_bytes()).with_context(|| { + format!( + "failed to write declared services state: {}", + path.display() + ) + })?; + Ok(path) +} + +pub fn read_declared_services_state( + layout: &PrefixLayout, + package_name: &str, +) -> Result> { + let path = layout.declared_services_state_path(package_name); + if !path.exists() { + return Ok(Vec::new()); + } + + let raw = fs::read_to_string(&path) + .with_context(|| format!("failed to read declared services state: {}", path.display()))?; + parse_declared_services_state(&raw).with_context(|| { + format!( + "failed to parse declared services state: {}", + path.display() + ) + }) +} + +pub fn read_all_declared_services_states( + layout: &PrefixLayout, +) -> Result>> { + let dir = layout.installed_state_dir(); + if !dir.exists() { + return Ok(BTreeMap::new()); + } + + let mut states = BTreeMap::new(); + for entry in fs::read_dir(&dir) + .with_context(|| format!("failed to read install state directory: {}", dir.display()))? + { + let entry = entry?; + if !entry.file_type()?.is_file() { + continue; + } + let path = entry.path(); + if !path + .file_name() + .and_then(|value| value.to_str()) + .is_some_and(|name| name.ends_with(".services")) + { + continue; + } + let Some(stem) = path.file_stem().and_then(|v| v.to_str()) else { + continue; + }; + let raw = fs::read_to_string(&path).with_context(|| { + format!("failed to read declared services state: {}", path.display()) + })?; + let services = parse_declared_services_state(&raw).with_context(|| { + format!( + "failed to parse declared services state: {}", + path.display() + ) + })?; + states.insert(stem.to_string(), services); + } + + Ok(states) +} + +pub fn clear_declared_services_state(layout: &PrefixLayout, package_name: &str) -> Result<()> { + let path = layout.declared_services_state_path(package_name); + let _ = fs::remove_file(path); + Ok(()) +} + +fn parse_declared_services_state(raw: &str) -> Result> { + let mut version = None; + let mut services = Vec::new(); + + for line in raw.lines().filter(|line| !line.trim().is_empty()) { + let Some((key, value)) = line.split_once('=') else { + return Err(anyhow!("invalid declared services row format: {line}")); + }; + match key { + "version" => { + version = Some( + value + .parse::() + .context("declared services version must be u32")?, + ); + } + "service" => { + let parts = value.split('\t').collect::>(); + if parts.len() != 2 { + return Err(anyhow!("invalid declared service row format")); + } + if parts[0].trim().is_empty() { + return Err(anyhow!("declared service name must not be empty")); + } + let native_id = if parts[1].trim().is_empty() { + None + } else { + Some(parts[1].to_string()) + }; + services.push(ServiceDeclaration { + name: parts[0].to_string(), + native_id, + }); + } + _ => {} + } + } + + if let Some(found_version) = version { + if found_version != DECLARED_SERVICES_STATE_VERSION { + return Err(anyhow!( + "unsupported declared services state version: {found_version}" + )); + } + } + + Ok(services) +} diff --git a/crates/crosspack-installer/src/tests.rs b/crates/crosspack-installer/src/tests.rs index 2e320c8..fef0573 100644 --- a/crates/crosspack-installer/src/tests.rs +++ b/crates/crosspack-installer/src/tests.rs @@ -2,11 +2,12 @@ use super::*; use std::sync::atomic::{AtomicU64, Ordering}; use anyhow::anyhow; -use crosspack_core::{ArchiveType, ArtifactCompletionShell, ArtifactGuiApp}; +use crosspack_core::{ArchiveType, ArtifactCompletionShell, ArtifactGuiApp, ServiceDeclaration}; use std::collections::HashMap; use std::fs; use std::io; use std::path::{Path, PathBuf}; +use std::process::Command; #[cfg(unix)] use crate::artifact::copy_dmg_payload; @@ -26,8 +27,8 @@ use crate::native::{ project_macos_user_applications_dir, project_windows_start_menu_programs_dir, register_macos_application_symlink_with_creator, register_macos_native_gui_registration_with_executor_and_creator, - register_native_gui_app_best_effort_with_executor, select_macos_registration_destination, - MACOS_LSREGISTER_PATH, + register_native_gui_app_best_effort_with_executor, run_native_service_action_with_executor, + select_macos_registration_destination, MACOS_LSREGISTER_PATH, }; use crate::receipts::parse_receipt; @@ -108,6 +109,68 @@ fn receipt_unknown_install_mode_falls_back_to_managed() { assert_eq!(receipt.install_mode, InstallMode::Managed); } +#[test] +fn declared_services_state_round_trip() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let services = vec![ + ServiceDeclaration { + name: "demo".to_string(), + native_id: None, + }, + ServiceDeclaration { + name: "demo-worker".to_string(), + native_id: Some("demo-worker@main".to_string()), + }, + ]; + + write_declared_services_state(&layout, "demo", &services) + .expect("must write declared services state"); + let loaded = + read_declared_services_state(&layout, "demo").expect("must read declared services state"); + assert_eq!(loaded, services); + + let all = + read_all_declared_services_states(&layout).expect("must read all declared services state"); + assert_eq!(all.get("demo"), Some(&services)); +} + +#[test] +fn declared_services_state_is_removed_when_empty() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + write_declared_services_state( + &layout, + "demo", + &[ServiceDeclaration { + name: "demo".to_string(), + native_id: None, + }], + ) + .expect("must write services state"); + write_declared_services_state(&layout, "demo", &[]).expect("must clear services state"); + + assert!(!layout.declared_services_state_path("demo").exists()); +} + +#[test] +fn native_service_adapter_returns_reason_coded_fallback_on_command_failure() { + let outcome = run_native_service_action_with_executor( + NativeServiceAction::Start, + "demo", + "demo", + |_command, _context| Err(anyhow!("simulated service command failure")), + ); + + assert!( + !outcome.applied, + "failed native command should report deterministic fallback" + ); + assert_eq!(outcome.reason_code, "native-command-failed"); +} + #[test] fn transaction_paths_match_spec_layout() { let layout = test_layout(); @@ -1225,6 +1288,82 @@ fn install_from_artifact_rejects_native_installer_when_escalation_policy_forbids let _ = fs::remove_dir_all(layout.prefix()); } +#[cfg(unix)] +#[test] +fn install_from_source_archive_runs_build_and_install_commands() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let source_root = layout.prefix().join("source"); + let project_dir = source_root.join("demo-src"); + fs::create_dir_all(&project_dir).expect("must create source project dir"); + fs::write(project_dir.join("demo"), b"#!/bin/sh\n").expect("must write source payload"); + + let archive_path = layout.prefix().join("demo-src.tar.gz"); + let tar_status = Command::new("tar") + .arg("-czf") + .arg(&archive_path) + .arg("-C") + .arg(&source_root) + .arg("demo-src") + .status() + .expect("must execute tar command for test fixture"); + assert!(tar_status.success(), "tar fixture creation must succeed"); + + let install_root = install_from_source_archive( + &layout, + "demo", + "1.0.0", + &archive_path, + ArchiveType::TarGz, + &[ + "sh".to_string(), + "-c".to_string(), + "cp demo built-demo".to_string(), + ], + &[ + "sh".to_string(), + "-c".to_string(), + "test -f built-demo && cp built-demo $CROSSPACK_STAGE_DIR/built-demo".to_string(), + ], + ) + .expect("source archive install should succeed"); + + assert!( + install_root.join("built-demo").exists(), + "source build output should be present in installed package root" + ); + + let _ = fs::remove_dir_all(layout.prefix()); +} + +#[test] +fn install_from_source_archive_rejects_missing_build_commands() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + + let archive_path = layout.prefix().join("demo-src.tar.gz"); + fs::write(&archive_path, b"not-a-real-archive").expect("must write archive fixture"); + + let err = install_from_source_archive( + &layout, + "demo", + "1.0.0", + &archive_path, + ArchiveType::TarGz, + &[], + &["true".to_string()], + ) + .expect_err("empty build command set should fail closed"); + assert!( + err.to_string() + .contains("source build metadata requires non-empty build_commands"), + "unexpected error: {err}" + ); + + let _ = fs::remove_dir_all(layout.prefix()); +} + #[cfg(not(windows))] #[test] fn install_from_artifact_rejects_msi_on_non_windows_host() { diff --git a/crates/crosspack-installer/src/types.rs b/crates/crosspack-installer/src/types.rs index f042dd8..95c412b 100644 --- a/crates/crosspack-installer/src/types.rs +++ b/crates/crosspack-installer/src/types.rs @@ -170,3 +170,18 @@ pub struct UninstallResult { pub pruned_dependencies: Vec, pub blocked_by_roots: Vec, } + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum NativeServiceAction { + Status, + Start, + Stop, + Restart, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct NativeServiceOutcome { + pub adapter: String, + pub applied: bool, + pub reason_code: String, +} diff --git a/crates/crosspack-installer/src/uninstall.rs b/crates/crosspack-installer/src/uninstall.rs index a4f5380..d703e68 100644 --- a/crates/crosspack-installer/src/uninstall.rs +++ b/crates/crosspack-installer/src/uninstall.rs @@ -12,7 +12,7 @@ use crate::native::{ clear_native_sidecar_state, remove_package_native_gui_registrations_best_effort, run_package_native_uninstall_actions, }; -use crate::receipts::read_install_receipts; +use crate::receipts::{clear_declared_services_state, read_install_receipts}; use crate::{ InstallMode, InstallReason, InstallReceipt, PrefixLayout, UninstallResult, UninstallStatus, }; @@ -265,6 +265,7 @@ fn remove_receipt_artifacts( receipt_path.display() ) })?; + clear_declared_services_state(layout, &receipt.name)?; Ok(if package_existed { UninstallStatus::Uninstalled diff --git a/crates/crosspack-registry/src/lib.rs b/crates/crosspack-registry/src/lib.rs index 350049c..3e2e11b 100644 --- a/crates/crosspack-registry/src/lib.rs +++ b/crates/crosspack-registry/src/lib.rs @@ -10,7 +10,7 @@ mod source_types; pub use registry_index::{ConfiguredRegistryIndex, RegistryIndex}; pub use source_store::RegistrySourceStore; pub use source_types::{ - RegistrySourceKind, RegistrySourceRecord, RegistrySourceSnapshotState, + RegistrySourceCommunity, RegistrySourceKind, RegistrySourceRecord, RegistrySourceSnapshotState, RegistrySourceWithSnapshotState, RegistrySourceWithSnapshotStatus, SourceUpdateResult, SourceUpdateStatus, }; @@ -24,10 +24,12 @@ pub(crate) use snapshot_state::{ read_snapshot_id, read_snapshot_state, source_has_ready_snapshot, write_snapshot_file, }; pub(crate) use source_state::{ - parse_source_state_file, select_update_sources, sort_sources, validate_source_fingerprint, - validate_source_name, RegistrySourceStateFile, + parse_source_state_file, select_update_sources, sort_sources, + validate_community_recipe_catalog_path, validate_source_fingerprint, validate_source_name, + RegistrySourceStateFile, }; pub(crate) use source_sync::update_source; +pub(crate) use source_sync::verify_community_recipe_catalog_policy; #[cfg(test)] pub(crate) use git_ops::derive_snapshot_id_from_full_git_sha; diff --git a/crates/crosspack-registry/src/registry_index.rs b/crates/crosspack-registry/src/registry_index.rs index ebcfa66..b38d216 100644 --- a/crates/crosspack-registry/src/registry_index.rs +++ b/crates/crosspack-registry/src/registry_index.rs @@ -7,8 +7,8 @@ use crosspack_core::PackageManifest; use crosspack_security::verify_ed25519_signature_hex; use crate::{ - parse_source_state_file, sort_sources, source_has_ready_snapshot, RegistrySourceRecord, - RegistrySourceStateFile, + parse_source_state_file, sort_sources, source_has_ready_snapshot, + verify_community_recipe_catalog_policy, RegistrySourceRecord, RegistrySourceStateFile, }; #[derive(Debug, Clone)] @@ -177,6 +177,12 @@ impl ConfiguredRegistryIndex { if !source_has_ready_snapshot(&cache_root)? { continue; } + verify_community_recipe_catalog_policy(&cache_root, &source).with_context(|| { + format!( + "failed validating community recipe metadata for configured source '{}'", + source.name + ) + })?; configured.push(ConfiguredSnapshotSource { name: source.name, index: RegistryIndex::open(cache_root), diff --git a/crates/crosspack-registry/src/source_state.rs b/crates/crosspack-registry/src/source_state.rs index f115cfb..1b28899 100644 --- a/crates/crosspack-registry/src/source_state.rs +++ b/crates/crosspack-registry/src/source_state.rs @@ -1,4 +1,5 @@ use std::collections::HashSet; +use std::path::Path; use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; @@ -105,6 +106,9 @@ pub(crate) fn validate_loaded_sources(sources: &[RegistrySourceRecord]) -> Resul for source in sources { validate_source_name(&source.name)?; validate_source_fingerprint(&source.fingerprint_sha256)?; + if let Some(community) = &source.community { + validate_community_recipe_catalog_path(&community.recipe_catalog_path)?; + } if !seen_names.insert(source.name.as_str()) { anyhow::bail!( @@ -117,6 +121,53 @@ pub(crate) fn validate_loaded_sources(sources: &[RegistrySourceRecord]) -> Resul Ok(()) } +pub(crate) fn validate_community_recipe_catalog_path(path: &str) -> Result<()> { + if path.is_empty() { + anyhow::bail!("invalid community recipe catalog path: must not be empty"); + } + + let path_value = Path::new(path); + if path_value.is_absolute() { + anyhow::bail!( + "invalid community recipe catalog path '{}': must be relative", + path + ); + } + + if path_value + .components() + .any(|component| matches!(component, std::path::Component::ParentDir)) + { + anyhow::bail!( + "invalid community recipe catalog path '{}': parent traversal is not allowed", + path + ); + } + + if path_value + .components() + .any(|component| matches!(component, std::path::Component::CurDir)) + { + anyhow::bail!( + "invalid community recipe catalog path '{}': '.' segments are not allowed", + path + ); + } + + if path_value + .extension() + .and_then(|extension| extension.to_str()) + != Some("toml") + { + anyhow::bail!( + "invalid community recipe catalog path '{}': expected .toml file", + path + ); + } + + Ok(()) +} + pub(crate) fn select_update_sources( sources: &[RegistrySourceRecord], target_names: &[String], diff --git a/crates/crosspack-registry/src/source_store.rs b/crates/crosspack-registry/src/source_store.rs index 4f2cbe4..4031da5 100644 --- a/crates/crosspack-registry/src/source_store.rs +++ b/crates/crosspack-registry/src/source_store.rs @@ -5,9 +5,9 @@ use anyhow::{Context, Result}; use crate::{ parse_source_state_file, read_snapshot_state, select_update_sources, sort_sources, - update_source, validate_source_fingerprint, validate_source_name, RegistrySourceRecord, - RegistrySourceStateFile, RegistrySourceWithSnapshotState, SourceUpdateResult, - SourceUpdateStatus, + update_source, validate_community_recipe_catalog_path, validate_source_fingerprint, + validate_source_name, RegistrySourceRecord, RegistrySourceStateFile, + RegistrySourceWithSnapshotState, SourceUpdateResult, SourceUpdateStatus, }; #[derive(Debug, Clone)] @@ -25,6 +25,9 @@ impl RegistrySourceStore { pub fn add_source(&self, source: RegistrySourceRecord) -> Result<()> { validate_source_name(&source.name)?; validate_source_fingerprint(&source.fingerprint_sha256)?; + if let Some(community) = &source.community { + validate_community_recipe_catalog_path(&community.recipe_catalog_path)?; + } let mut state = self.load_state()?; if state diff --git a/crates/crosspack-registry/src/source_sync.rs b/crates/crosspack-registry/src/source_sync.rs index 4f22816..85a5612 100644 --- a/crates/crosspack-registry/src/source_sync.rs +++ b/crates/crosspack-registry/src/source_sync.rs @@ -2,15 +2,47 @@ use std::fs; use std::path::{Path, PathBuf}; use anyhow::{Context, Result}; -use crosspack_security::sha256_hex; +use crosspack_security::{sha256_hex, verify_ed25519_signature_hex}; +use serde::Deserialize; use crate::{ compute_filesystem_snapshot_id, copy_source_to_temp, count_manifest_files, git_head_snapshot_id, read_snapshot_id, run_git_clone, run_git_command, unique_suffix, - validate_staged_registry_layout, write_snapshot_file, RegistryIndex, RegistrySourceKind, - RegistrySourceRecord, RegistrySourceStore, SourceUpdateStatus, + validate_community_recipe_catalog_path, validate_staged_registry_layout, write_snapshot_file, + RegistryIndex, RegistrySourceKind, RegistrySourceRecord, RegistrySourceStore, + SourceUpdateStatus, }; +#[derive(Debug, Deserialize)] +struct CommunityRecipeCatalog { + #[serde(default = "community_recipe_catalog_version")] + version: u32, + #[serde(default)] + recipes: Vec, +} + +#[derive(Debug, Deserialize)] +struct CommunityRecipeCatalogEntry { + package: String, +} + +fn community_recipe_catalog_version() -> u32 { + 1 +} + +fn is_package_token(value: &str) -> bool { + let bytes = value.as_bytes(); + if bytes.is_empty() || bytes.len() > 64 { + return false; + } + + let starts_valid = bytes[0].is_ascii_lowercase() || bytes[0].is_ascii_digit(); + starts_valid + && bytes[1..] + .iter() + .all(|b| b.is_ascii_lowercase() || b.is_ascii_digit() || b"._+-".contains(b)) +} + pub(crate) fn update_source( store: &RegistrySourceStore, source: &RegistrySourceRecord, @@ -117,6 +149,7 @@ fn finalize_staged_source_update( } verify_metadata_signature_policy(&staged_root, &source.name)?; + verify_community_recipe_catalog_policy(&staged_root, source)?; let manifest_count = count_manifest_files(&staged_root.join("index"))?; let existing_snapshot_id = read_snapshot_id( @@ -207,6 +240,129 @@ fn finalize_staged_source_update( Ok((status, snapshot_id)) } +pub(crate) fn verify_community_recipe_catalog_policy( + staged_root: &Path, + source: &RegistrySourceRecord, +) -> Result<()> { + let Some(community) = &source.community else { + return Ok(()); + }; + + validate_community_recipe_catalog_path(&community.recipe_catalog_path)?; + + let trusted_key_path = staged_root.join("registry.pub"); + let trusted_public_key_hex = fs::read_to_string(&trusted_key_path).with_context(|| { + format!( + "source-metadata-invalid: source '{}' failed reading trusted key {}", + source.name, + trusted_key_path.display() + ) + })?; + let trusted_public_key_hex = trusted_public_key_hex.trim(); + + let catalog_path = staged_root.join(&community.recipe_catalog_path); + let catalog_bytes = fs::read(&catalog_path).with_context(|| { + format!( + "source-metadata-invalid: source '{}' missing community recipe catalog {}", + source.name, + catalog_path.display() + ) + })?; + + let catalog_signature_path = catalog_path.with_extension("toml.sig"); + let catalog_signature_hex = fs::read_to_string(&catalog_signature_path).with_context(|| { + format!( + "source-metadata-invalid: source '{}' missing community recipe catalog signature {}", + source.name, + catalog_signature_path.display() + ) + })?; + let catalog_signature_hex = catalog_signature_hex.trim(); + + let signature_is_valid = verify_ed25519_signature_hex( + &catalog_bytes, + trusted_public_key_hex, + catalog_signature_hex, + ) + .with_context(|| { + format!( + "source-metadata-invalid: source '{}' failed verifying community recipe catalog signature {}", + source.name, + catalog_signature_path.display() + ) + })?; + if !signature_is_valid { + anyhow::bail!( + "source-metadata-invalid: source '{}' community recipe catalog has invalid signature {}", + source.name, + catalog_signature_path.display() + ); + } + + let catalog_content = String::from_utf8(catalog_bytes).with_context(|| { + format!( + "source-metadata-invalid: source '{}' community recipe catalog is not UTF-8: {}", + source.name, + catalog_path.display() + ) + })?; + let catalog: CommunityRecipeCatalog = toml::from_str(&catalog_content).with_context(|| { + format!( + "source-metadata-invalid: source '{}' failed parsing community recipe catalog {}", + source.name, + catalog_path.display() + ) + })?; + + if catalog.version != community_recipe_catalog_version() { + anyhow::bail!( + "source-metadata-invalid: source '{}' unsupported community recipe catalog version {}", + source.name, + catalog.version + ); + } + + let mut previous = None::<&str>; + for entry in &catalog.recipes { + if entry.package.is_empty() { + anyhow::bail!( + "source-metadata-invalid: source '{}' community recipe catalog contains empty package entry", + source.name + ); + } + + if !is_package_token(&entry.package) { + anyhow::bail!( + "source-metadata-invalid: source '{}' community recipe catalog contains invalid package token '{}': use package-token grammar", + source.name, + entry.package + ); + } + + let package_index_dir = staged_root.join("index").join(&entry.package); + if !package_index_dir.is_dir() { + anyhow::bail!( + "source-metadata-invalid: source '{}' community recipe '{}' missing index directory {}", + source.name, + entry.package, + package_index_dir.display() + ); + } + + if let Some(last) = previous { + if entry.package.as_str() <= last { + anyhow::bail!( + "source-metadata-invalid: source '{}' community recipe catalog must be strictly sorted by package name", + source.name + ); + } + } + previous = Some(entry.package.as_str()); + } + + Ok(()) +} + pub(crate) fn verify_metadata_signature_policy( staged_root: &Path, source_name: &str, diff --git a/crates/crosspack-registry/src/source_types.rs b/crates/crosspack-registry/src/source_types.rs index 400e655..7217afb 100644 --- a/crates/crosspack-registry/src/source_types.rs +++ b/crates/crosspack-registry/src/source_types.rs @@ -17,6 +17,13 @@ pub struct RegistrySourceRecord { #[serde(default = "crate::source_types::source_enabled_default")] pub enabled: bool, pub priority: u32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub community: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct RegistrySourceCommunity { + pub recipe_catalog_path: String, } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/crosspack-registry/src/tests.rs b/crates/crosspack-registry/src/tests.rs index 474beb3..6400979 100644 --- a/crates/crosspack-registry/src/tests.rs +++ b/crates/crosspack-registry/src/tests.rs @@ -498,6 +498,106 @@ fn source_store_defaults_enabled_to_true_when_missing_in_loaded_state() { let _ = fs::remove_dir_all(&root); } +#[test] +fn source_store_persists_and_loads_optional_community_metadata() { + let root = test_registry_root(); + let store = RegistrySourceStore::new(&root); + + let mut source = source_record("community", 5); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + store.add_source(source).expect("must add source"); + + let listed = store.list_sources().expect("must list sources"); + assert_eq!(listed.len(), 1); + assert_eq!( + listed[0].community, + Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }) + ); + + let content = fs::read_to_string(root.join("sources.toml")).expect("must read state file"); + assert!( + content.contains("[sources.community]"), + "expected persisted nested community metadata block\n{content}" + ); + assert!( + content.contains("recipe_catalog_path = \"community/recipes.toml\""), + "expected persisted recipe catalog path\n{content}" + ); + + let _ = fs::remove_dir_all(&root); +} + +#[test] +fn source_store_rejects_invalid_community_recipe_catalog_path() { + let root = test_registry_root(); + let store = RegistrySourceStore::new(&root); + + let mut source = source_record("community", 5); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "../recipes.toml".to_string(), + }); + + let err = store + .add_source(source) + .expect_err("must reject unsafe community recipe catalog path"); + assert!( + err.to_string() + .contains("invalid community recipe catalog path"), + "expected community path validation error, got: {err:#}" + ); + + let _ = fs::remove_dir_all(&root); +} + +#[test] +fn configured_index_precedence_remains_deterministic_with_community_metadata() { + let state_root = test_registry_root(); + let store = RegistrySourceStore::new(&state_root); + + let mut preferred = source_record("alpha", 1); + preferred.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + store + .add_source(preferred) + .expect("must add preferred source"); + + store + .add_source(source_record("beta", 1)) + .expect("must add fallback source"); + + let preferred_key = SigningKey::from_bytes(&[41u8; 32]); + let fallback_key = SigningKey::from_bytes(&[43u8; 32]); + write_ready_snapshot_cache(&state_root, "alpha", &preferred_key, &["14.2.0"]); + write_signed_community_recipe_catalog( + &state_root + .join("cache") + .join("alpha") + .join("community") + .join("recipes.toml"), + &preferred_key, + &["ripgrep"], + ); + write_ready_snapshot_cache(&state_root, "beta", &fallback_key, &["14.1.0"]); + + let index = ConfiguredRegistryIndex::open(&state_root).expect("must open configured index"); + let manifests = index + .package_versions("ripgrep") + .expect("must read package using deterministic source ordering"); + + let versions: Vec = manifests + .iter() + .map(|manifest| manifest.version.to_string()) + .collect(); + assert_eq!(versions, vec!["14.2.0"]); + + let _ = fs::remove_dir_all(&state_root); +} + #[test] fn source_store_first_write_from_empty_uses_version_one() { let root = test_registry_root(); @@ -615,6 +715,53 @@ fn update_filesystem_source_accepts_uppercase_configured_fingerprint() { let _ = fs::remove_dir_all(&root); } +#[test] +fn update_filesystem_source_fails_when_community_recipe_catalog_signature_is_missing() { + let root = test_registry_root(); + let source_root = filesystem_source_fixture(); + let store = RegistrySourceStore::new(&root); + + let registry_pub = fs::read(source_root.join("registry.pub")).expect("must read registry pub"); + let mut source = filesystem_source_record( + "local", + source_root + .to_str() + .expect("filesystem source path must be valid UTF-8"), + sha256_hex_bytes(®istry_pub), + 0, + ); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + + fs::create_dir_all(source_root.join("community")).expect("must create community dir"); + fs::write( + source_root.join("community").join("recipes.toml"), + "version = 1\n[[recipes]]\npackage = \"ripgrep\"\n", + ) + .expect("must write community recipe catalog without signature"); + + store.add_source(source).expect("must add source"); + + let results = store + .update_sources(&[]) + .expect("update API must report per-source failure"); + assert_eq!(results.len(), 1); + assert_eq!(results[0].status, SourceUpdateStatus::Failed); + assert!( + results[0] + .error + .as_deref() + .expect("must include error message") + .contains("missing community recipe catalog signature"), + "expected fail-closed missing signature error, got: {:?}", + results[0].error + ); + + let _ = fs::remove_dir_all(&source_root); + let _ = fs::remove_dir_all(&root); +} + #[test] fn update_filesystem_source_preserves_existing_cache_on_failure() { let root = test_registry_root(); @@ -1012,6 +1159,269 @@ fn configured_index_fails_when_no_ready_snapshot_exists() { let _ = fs::remove_dir_all(&state_root); } +#[test] +fn configured_index_fails_closed_when_cached_community_recipe_catalog_signature_is_invalid() { + let state_root = test_registry_root(); + let store = RegistrySourceStore::new(&state_root); + + let mut source = source_record("community", 0); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + store.add_source(source).expect("must add source"); + + let signing_key = SigningKey::from_bytes(&[53u8; 32]); + write_ready_snapshot_cache(&state_root, "community", &signing_key, &["14.1.0"]); + let catalog_path = state_root + .join("cache") + .join("community") + .join("community") + .join("recipes.toml"); + write_signed_community_recipe_catalog(&catalog_path, &signing_key, &["ripgrep"]); + fs::write(catalog_path.with_extension("toml.sig"), "00") + .expect("must overwrite community metadata signature with invalid value"); + + let err = ConfiguredRegistryIndex::open(&state_root) + .expect_err("must fail closed for invalid community metadata signature"); + let rendered = format!("{err:#}"); + assert!( + rendered.contains("failed validating community recipe metadata"), + "expected configured-index validation context, got: {rendered}" + ); + assert!( + rendered.contains("community recipe catalog") && rendered.contains("signature"), + "expected explicit community signature failure details, got: {rendered}" + ); + + let _ = fs::remove_dir_all(&state_root); +} + +#[test] +fn community_recipe_catalog_rejects_unsupported_version() { + let staged_root = filesystem_source_fixture(); + let signing_key = signing_key(); + let catalog_path = staged_root.join("community").join("recipes.toml"); + write_signed_community_recipe_catalog_raw( + &catalog_path, + &signing_key, + "version = 2\n[[recipes]]\npackage = \"ripgrep\"\n", + ); + + let mut source = filesystem_source_record( + "local", + staged_root + .to_str() + .expect("filesystem source path must be valid UTF-8"), + sha256_hex_bytes( + &fs::read(staged_root.join("registry.pub")).expect("must read registry key"), + ), + 0, + ); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + + let err = verify_community_recipe_catalog_policy(&staged_root, &source) + .expect_err("must reject unsupported community recipe catalog version"); + assert!( + err.to_string() + .contains("unsupported community recipe catalog version"), + "expected unsupported-version validation error, got: {err:#}" + ); + + let _ = fs::remove_dir_all(&staged_root); +} + +#[test] +fn community_recipe_catalog_rejects_unsorted_and_duplicate_entries() { + let staged_root = filesystem_source_fixture(); + let signing_key = signing_key(); + let catalog_path = staged_root.join("community").join("recipes.toml"); + fs::create_dir_all(staged_root.join("index").join("zsh")) + .expect("must create zsh package index directory"); + write_signed_community_recipe_catalog_raw( + &catalog_path, + &signing_key, + concat!( + "version = 1\n", + "[[recipes]]\n", + "package = \"ripgrep\"\n", + "[[recipes]]\n", + "package = \"ripgrep\"\n", + ), + ); + + let mut source = filesystem_source_record( + "local", + staged_root + .to_str() + .expect("filesystem source path must be valid UTF-8"), + sha256_hex_bytes( + &fs::read(staged_root.join("registry.pub")).expect("must read registry key"), + ), + 0, + ); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + + let duplicate_err = verify_community_recipe_catalog_policy(&staged_root, &source) + .expect_err("must reject duplicate package entries"); + assert!( + duplicate_err + .to_string() + .contains("must be strictly sorted by package name"), + "expected strict-sort duplicate rejection, got: {duplicate_err:#}" + ); + + write_signed_community_recipe_catalog_raw( + &catalog_path, + &signing_key, + concat!( + "version = 1\n", + "[[recipes]]\n", + "package = \"zsh\"\n", + "[[recipes]]\n", + "package = \"ripgrep\"\n", + ), + ); + + let unsorted_err = verify_community_recipe_catalog_policy(&staged_root, &source) + .expect_err("must reject unsorted package entries"); + assert!( + unsorted_err + .to_string() + .contains("must be strictly sorted by package name"), + "expected strict-sort unsorted rejection, got: {unsorted_err:#}" + ); + + let _ = fs::remove_dir_all(&staged_root); +} + +#[test] +fn community_recipe_catalog_rejects_empty_package_entry() { + let staged_root = filesystem_source_fixture(); + let signing_key = signing_key(); + let catalog_path = staged_root.join("community").join("recipes.toml"); + write_signed_community_recipe_catalog_raw( + &catalog_path, + &signing_key, + "version = 1\n[[recipes]]\npackage = \"\"\n", + ); + + let mut source = filesystem_source_record( + "local", + staged_root + .to_str() + .expect("filesystem source path must be valid UTF-8"), + sha256_hex_bytes( + &fs::read(staged_root.join("registry.pub")).expect("must read registry key"), + ), + 0, + ); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + + let err = verify_community_recipe_catalog_policy(&staged_root, &source) + .expect_err("must reject empty package entries"); + assert!( + err.to_string().contains("contains empty package entry"), + "expected empty package validation error, got: {err:#}" + ); + + let _ = fs::remove_dir_all(&staged_root); +} + +#[test] +fn community_recipe_catalog_rejects_missing_index_directory_for_listed_package() { + let staged_root = filesystem_source_fixture(); + let signing_key = signing_key(); + let catalog_path = staged_root.join("community").join("recipes.toml"); + write_signed_community_recipe_catalog_raw( + &catalog_path, + &signing_key, + "version = 1\n[[recipes]]\npackage = \"zsh\"\n", + ); + + let mut source = filesystem_source_record( + "local", + staged_root + .to_str() + .expect("filesystem source path must be valid UTF-8"), + sha256_hex_bytes( + &fs::read(staged_root.join("registry.pub")).expect("must read registry key"), + ), + 0, + ); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + + let err = verify_community_recipe_catalog_policy(&staged_root, &source) + .expect_err("must reject catalog entries missing index directories"); + let rendered = format!("{err:#}"); + assert!( + rendered.contains("missing index directory"), + "expected missing index directory error, got: {rendered}" + ); + assert!( + rendered.contains("index/zsh"), + "expected deterministic missing directory path details, got: {rendered}" + ); + + let _ = fs::remove_dir_all(&staged_root); +} + +#[test] +fn community_recipe_catalog_rejects_invalid_package_tokens() { + let staged_root = filesystem_source_fixture(); + let signing_key = signing_key(); + let catalog_path = staged_root.join("community").join("recipes.toml"); + + fs::create_dir_all(staged_root.join("index").join("ripgrep").join("plugins")) + .expect("must create nested index path to ensure grammar check is enforced"); + + let mut source = filesystem_source_record( + "local", + staged_root + .to_str() + .expect("filesystem source path must be valid UTF-8"), + sha256_hex_bytes( + &fs::read(staged_root.join("registry.pub")).expect("must read registry key"), + ), + 0, + ); + source.community = Some(RegistrySourceCommunity { + recipe_catalog_path: "community/recipes.toml".to_string(), + }); + + for token in [ + "ripgrep/plugins", + "..", + "Ripgrep", + "-ripgrep", + "ripgrep\\plugins", + ] { + let escaped_token = token.replace('\\', "\\\\"); + write_signed_community_recipe_catalog_raw( + &catalog_path, + &signing_key, + &format!("version = 1\n[[recipes]]\npackage = \"{escaped_token}\"\n"), + ); + + let err = verify_community_recipe_catalog_policy(&staged_root, &source) + .expect_err("must reject invalid community package token"); + let rendered = format!("{err:#}"); + assert!( + rendered.contains("invalid package token"), + "expected invalid package token error for '{token}', got: {rendered}" + ); + } + + let _ = fs::remove_dir_all(&staged_root); +} + #[test] fn configured_index_open_fails_when_sources_file_is_unreadable() { let state_root = test_registry_root(); @@ -1150,6 +1560,48 @@ fn write_signed_manifest(package_dir: &std::path::Path, signing_key: &SigningKey .expect("must write signature sidecar"); } +fn write_signed_community_recipe_catalog( + catalog_path: &Path, + signing_key: &SigningKey, + package_names: &[&str], +) { + if let Some(parent) = catalog_path.parent() { + fs::create_dir_all(parent).expect("must create community metadata directory"); + } + + let mut catalog = String::from("version = 1\n"); + for package_name in package_names { + catalog.push_str("[[recipes]]\n"); + catalog.push_str(&format!("package = \"{package_name}\"\n")); + } + + fs::write(catalog_path, catalog.as_bytes()).expect("must write community recipe catalog"); + let signature = signing_key.sign(catalog.as_bytes()); + fs::write( + catalog_path.with_extension("toml.sig"), + hex::encode(signature.to_bytes()), + ) + .expect("must write community recipe catalog signature"); +} + +fn write_signed_community_recipe_catalog_raw( + catalog_path: &Path, + signing_key: &SigningKey, + catalog: &str, +) { + if let Some(parent) = catalog_path.parent() { + fs::create_dir_all(parent).expect("must create community metadata directory"); + } + + fs::write(catalog_path, catalog.as_bytes()).expect("must write community recipe catalog"); + let signature = signing_key.sign(catalog.as_bytes()); + fs::write( + catalog_path.with_extension("toml.sig"), + hex::encode(signature.to_bytes()), + ) + .expect("must write community recipe catalog signature"); +} + fn rewrite_signed_manifest_with_extra_field( manifest_path: &Path, signing_key: &SigningKey, @@ -1192,6 +1644,7 @@ fn source_record(name: &str, priority: u32) -> RegistrySourceRecord { .to_string(), enabled: true, priority, + community: None, } } @@ -1208,6 +1661,7 @@ fn git_source_record( fingerprint_sha256, enabled: true, priority, + community: None, } } @@ -1224,6 +1678,7 @@ fn filesystem_source_record( fingerprint_sha256, enabled: true, priority, + community: None, } } diff --git a/docs/architecture.md b/docs/architecture.md index 68cf525..75c844b 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -49,6 +49,7 @@ Default user prefixes: - `registry remove [--purge-cache]` removes a source and optionally deletes its cached snapshot. - `update [--registry ]...` refreshes all or selected sources and prints per-source status plus `update summary: updated= up-to-date= failed=`. - `self-update [--dry-run] [--force-redownload]` refreshes configured source snapshots and then installs the latest `crosspack` package for the current host target. +- Source records support an optional `community` metadata block with a signed `recipe_catalog_path`; validation is fail-closed during update and again when opening configured snapshots. - Lifecycle-oriented commands use automatic output mode selection: an enhanced interactive terminal renderer (section hierarchy + semantic color + progress indicators) on interactive terminals, plain deterministic output for non-interactive/piped usage. - Registry metadata is trusted only when signature verification succeeds with `registry.pub` at the registry root, which acts as the local trust anchor for that registry snapshot or mirror. - Every version manifest requires a detached hex signature sidecar at `.toml.sig`. @@ -82,13 +83,14 @@ Default user prefixes: - `outdated` compares installed receipt versions with latest available metadata versions and reports upgrade candidates. - `depends `, `uses `, and `why ` provide deterministic dependency introspection from installed receipts. - `bundle export` writes deterministic root+pin environment bundles; `bundle apply` replays bundle roots through standard resolve/install flows. -- `services list|status|start|stop|restart` manages deterministic Crosspack service-state files under `/state/services/` for installed packages. +- `services list|status|start|stop|restart` resolves service names from manifest-declared service state persisted under `/state/installed/.services` and tracks deterministic state files under `/state/services/`. +- Service actions integrate host-native adapters (`systemd` on Linux, `launchctl` on macOS, `sc` on Windows) with deterministic reason-coded fallback (`unsupported-host`, `adapter-tool-missing`, `native-command-failed`) when native actions are unavailable or fail. - `upgrade` upgrades one package (`upgrade `) or all installed root packages (`upgrade`) while honoring pins. - `upgrade --dry-run` performs full planning and emits the same deterministic transaction preview format without mutating install state. - `install`, `upgrade`, and `bundle apply` support `--explain` in dry-run mode only; explainability lines are additive and deterministic (`explain_provider`, `explain_replacement`, `explain_conflict`). - Global `upgrade` runs one solve per target group derived from root receipts and rejects cross-target package-name overlap; current install state is package-name keyed. - `install` and `upgrade` persist `install_mode` in receipts (`managed` or `native`, derived from artifact-kind defaults). -- `--build-from-source` is parsed for `install` and `bundle apply` but currently guarded fail-closed until source-build execution is shipped. +- `--build-from-source` is supported for `install` and `bundle apply` when manifests provide valid `source_build` metadata (including `archive_sha256`); invalid metadata, checksum mismatch, and command/tool failures fail closed. - `install` and `upgrade` persist `install_reason` in receipts (`root` for explicit installs, `dependency` for transitive installs), while preserving existing root intent on upgrades. - `install` and `upgrade` persist `exposed_completions` receipt entries for package-declared completion files exposed under `/share/completions/packages//`. - `install` and `upgrade` persist GUI asset ownership in optional `/state/installed/.gui` sidecars for deterministic stale cleanup and uninstall removal. diff --git a/docs/install-flow.md b/docs/install-flow.md index c50be62..a2e41f9 100644 --- a/docs/install-flow.md +++ b/docs/install-flow.md @@ -13,41 +13,53 @@ - merge dependency constraints transitively, - apply pin constraints to root and transitive packages, - produce dependency-first install order. -3. Select artifact for each resolved package for requested target (`--target` or host triple). +3. Select install plan for each resolved package for requested target (`--target` or host triple): + - binary artifact path when target artifact is available, + - source-build path when `--build-from-source` is set and validated `source_build` metadata is present. 4. Determine artifact kind (`artifact.archive` or infer from URL suffix): `zip`, `tar.gz`, `tar.zst`, `bin`, `msi`, `dmg`, `appimage`, `exe`, `pkg`, `msix`, `appx`. - Extensionless final URL path segments infer to `bin`. - Pre-1.0 scope reset: `deb` and `rpm` are removed from the supported artifact contract and are rejected. 5. For each resolved package, resolve cache path at: - `/cache/artifacts////artifact.` -6. Download artifact if needed (or if `--force-redownload`). +6. Download selected payload if needed (or if `--force-redownload`): + - binary artifact URL for binary installs, + - `source_build.url` for source installs. - backend selection env var: `CROSSPACK_DOWNLOAD_BACKEND` supports `in-process` (default) or `external`. - default (`in-process`) uses reqwest with bounded retry (up to 3 attempts) and falls back to external backend on failure. - `external` forces external downloader backend and skips in-process attempts. - external backend is cross-platform (`curl`/`wget` with Windows PowerShell support). -7. Verify artifact SHA-256 against manifest `sha256`. -8. Stage artifact payload into temporary state directory with deterministic adapters: +7. Verify SHA-256 before execution: + - binary installs verify artifact bytes against manifest `sha256`, + - source installs verify source archive bytes against `source_build.archive_sha256`. +8. Stage payload into temporary state directory with deterministic adapters: - managed mode adapters: `zip`, `tar.gz`, `tar.zst` (archive extraction), `bin` (copy payload using the cached file name; requires `strip_components=0` and no `artifact_root`), `dmg` (attach/copy/detach extraction on macOS), `appimage` (copy payload as `artifact.appimage` on Linux; requires `strip_components=0` and no `artifact_root`), - native mode defaults: `pkg` on macOS, `exe`/`msi`/`msix`/`appx` on Windows, - native mode still uses deterministic non-UI adapter execution; vendor installer fallback is not attempted. -9. Apply `strip_components` during staging copy where supported. -10. Move staged content into `/pkgs///`. -11. Preflight binary exposure collisions against existing receipts and on-disk `/bin` entries. -12. Preflight package completion exposure collisions against existing receipts and on-disk completion files under `/share/completions/packages//`. -13. Expose declared binaries: +9. Source-build path (when selected): + - extract source archive, + - run deterministic `build_commands`, + - run deterministic `install_commands`, + - install staged output from `CROSSPACK_STAGE_DIR` into `/pkgs///`. +10. Apply `strip_components` during staging copy where supported (binary artifact path). +11. Move staged content into `/pkgs///`. +12. Preflight binary exposure collisions against existing receipts and on-disk `/bin` entries. +13. Preflight package completion exposure collisions against existing receipts and on-disk completion files under `/share/completions/packages//`. +14. Expose declared binaries: - Unix: symlink `/bin/` to installed package path. - Windows: write `/bin/.cmd` shim to installed package path. -14. Expose declared package completion files to `/share/completions/packages//`. -15. Expose declared GUI application assets under `/share/gui/` (launcher + handler metadata). -16. Register native GUI integrations as best-effort adapters; failures emit warning lines and do not fail successful install. +15. Expose declared package completion files to `/share/completions/packages//`. +16. Expose declared GUI application assets under `/share/gui/` (launcher + handler metadata). +17. Register native GUI integrations as best-effort adapters; failures emit warning lines and do not fail successful install. - macOS `.app` registration uses bundle-copy deployment and tries `/Applications/.app` before `~/Applications/.app`. - Existing unmanaged app bundles at either macOS destination are not overwritten; registration emits warnings and continues. -17. Remove stale previously-owned binaries, completion files, GUI assets, and native GUI registrations no longer declared for that package. -18. Write install receipt to `/state/installed/.receipt`. +18. Remove stale previously-owned binaries, completion files, GUI assets, and native GUI registrations no longer declared for that package. +19. Persist declared manifest services to `/state/installed/.services` for service-command lookup. +20. Write install receipt to `/state/installed/.receipt`. - persist `install_mode=managed|native` from artifact-kind defaults, - set `install_reason=root` for requested roots, - set `install_reason=dependency` for transitive-only packages, - preserve existing `install_reason=root` when upgrading already-rooted packages. -19. Best-effort refresh Crosspack shell completion assets under `/share/completions/crosspack.` so package completion loaders are up to date. +21. Best-effort refresh Crosspack shell completion assets under `/share/completions/crosspack.` so package completion loaders are up to date. `crosspack install --dry-run` executes the same planning and emits deterministic, script-friendly preview lines: - `transaction_preview operation=... mode=dry-run` @@ -94,6 +106,7 @@ Crosspack executes install/upgrade/uninstall mutations under a transaction state Rollback snapshot/replay contract (current behavior): - per-package snapshots include package tree, receipt, exposed binaries, exposed package completions, exposed GUI assets, and optional native sidecar state, +- source-build package application journals explicit source phase steps (`source_fetch:*`, `source_build_system:*`, `source_install:*`) in addition to package apply steps, with `source_build_system:*` recorded only after successful source build execution, - rollback replays compensating package steps in reverse journal order, including native step names (`install_native_package:`, `upgrade_native_package:`), - native uninstall actions are replayed before managed snapshot restore for native package steps. @@ -123,6 +136,7 @@ The following install-flow extensions are planned in `docs/dependency-policy-spe - `install_mode` (`managed` or `native`; legacy receipts default to `managed`) - `state/installed/.gui` sidecar (optional): GUI asset ownership keys and storage paths for uninstall/upgrade cleanup. - `state/installed/.gui-native` sidecar (optional): native uninstall action records (`key`, `kind`, `path`) for deterministic uninstall/rollback cleanup. +- `state/installed/.services` sidecar (optional): declared service records (`name`, optional `native_id`) for deterministic service command routing. - `dependency` (repeated `name@version`, optional) - `install_reason` (`root` or `dependency`; legacy receipts default to `root`) - `install_status` (`installed`) @@ -142,6 +156,7 @@ The following install-flow extensions are planned in `docs/dependency-policy-spe - Completion collision: install fails if a projected package completion file is already owned by another package or exists unmanaged in Crosspack completion storage. - GUI asset collision: install fails if a projected GUI ownership key is already owned by another package or a projected GUI asset path already exists unmanaged. - Native GUI registration failures (including macOS destination prepare/write failures and unmanaged overwrite protection): install/upgrade/uninstall emit warnings and continue when package payload install/removal succeeded. +- Native service adapter failures for `services status|start|stop|restart`: commands return deterministic fallback reason codes (`unsupported-host`, `adapter-tool-missing`, `native-command-failed`) while preserving deterministic plain output shape. - Global solve downgrade requirement during `upgrade`: operation fails with an explicit downgrade message and command hint. - Completion asset refresh failure: install/upgrade/uninstall warns but does not fail. diff --git a/docs/manifest-spec.md b/docs/manifest-spec.md index f7d416d..4702857 100644 --- a/docs/manifest-spec.md +++ b/docs/manifest-spec.md @@ -14,16 +14,43 @@ Each package version is represented by a TOML manifest stored in the registry in - `license` - `homepage` - `dependencies`: map of package name to semver constraint. -- `source_build` (non-GA guardrail metadata): optional source-build metadata block used for explicit source-build policy signaling. +- `source_build`: optional source-build metadata block used when `--build-from-source` is requested. +- `services`: optional list of service declarations consumed by `crosspack services` commands. + +`crosspack info ` prints `Description: ` when `description` is present and non-empty. +For deterministic plain output, tab/newline/carriage-return characters in `description` are normalized to spaces. ### Source Build Metadata (`source_build`) -`source_build` is parsed and validated, but end-to-end source-build execution is currently guarded (non-GA). +`source_build` is parsed, validated, and used by source-build install flows. - `url`: source archive or source tree URL. +- `archive_sha256`: expected SHA-256 digest of downloaded source archive bytes (required). - `build_system`: build-system token (`cargo`, `cmake`, etc.). -- `build_commands` (optional): deterministic command array used for build steps. -- `install_commands` (optional): deterministic command array used for install steps. +- `build_commands`: deterministic command-token array used for build steps (must be non-empty). +- `install_commands`: deterministic command-token array used for install steps (must be non-empty). + +Source-build constraints: + +- source builds run only when `--build-from-source` is set, +- source URL must infer to a supported archive type (`zip`, `tar.gz`, `tar.zst`), +- source archive checksum must be a 64-character hexadecimal SHA-256 digest, +- command tokens must be non-empty, +- metadata or command validation failures fail closed. + +### Service Declarations (`services`) + +`services` is parsed and validated strictly. + +- `name`: service token exposed to `crosspack services `. +- `native_id` (optional): host-native service identifier; defaults to `name` when omitted. + +Service declaration constraints: + +- service names must follow package-token grammar (`[a-z0-9][a-z0-9._+-]{0,63}`), +- `native_id`, when present, must follow package-token grammar with optional `@` segments, +- service names must be unique per manifest, +- unknown fields fail closed. ## Artifact Fields diff --git a/docs/registry-spec.md b/docs/registry-spec.md index 54a3b00..ec81663 100644 --- a/docs/registry-spec.md +++ b/docs/registry-spec.md @@ -34,6 +34,7 @@ Legacy compatibility path when `--registry-root` is provided: - Refresh snapshots via `crosspack update` (all sources by default, or selected via repeated `--registry `). - Read manifests from local verified snapshots on disk. - Keep cached snapshots for deterministic resolution and source precedence. +- If a source defines optional community metadata in `sources.toml`, verify the configured recipe catalog path and signature before snapshot acceptance. ## Version Discovery @@ -49,8 +50,17 @@ Legacy compatibility path when `--registry-root` is provided: - Each manifest must have a detached signature sidecar at `.toml.sig`. - The sidecar format is hex-encoded detached signature bytes. - Operations that rely on registry metadata fail closed on signature or key errors. +- Optional community recipe metadata is signed and validated with the same source trust root (`registry.pub`) and fails closed on missing/invalid signatures or invalid catalog content. - If the entire registry root content is compromised (including `registry.pub`), this model does not provide authenticity guarantees for that compromised root. +## Optional Community Recipe Metadata + +- Source records may include an optional `community` block in `sources.toml`. +- `community.recipe_catalog_path` points to a relative `.toml` file within the source snapshot (for example: `community/recipes.toml`). +- The recipe catalog requires a detached signature at `.sig` and must verify against the source `registry.pub` key. +- Catalog schema currently supports `version = 1` and `[[recipes]] package = ""` entries. +- Recipe entries must be strictly sorted by package name and each package must exist under `index//`. + ## Source Management Commands - `crosspack registry add --kind --priority --fingerprint <64-hex>` diff --git a/docs/source-management-spec.md b/docs/source-management-spec.md index 252579d..b2ea479 100644 --- a/docs/source-management-spec.md +++ b/docs/source-management-spec.md @@ -20,7 +20,7 @@ This spec does not cover: ## Goals -- Keep a Homebrew-like local-first workflow with simple commands. +- Keep a Crosspack-native local-first workflow with simple commands. - Add APT-like trust pinning and fail-closed metadata usage. - Keep package selection deterministic across sources. - Keep existing `search`, `info`, `install`, and `upgrade` behavior stable unless source configuration requires stricter validation. @@ -37,6 +37,7 @@ This spec does not cover: - Snapshot: a verified local copy of a source at a specific revision. - Fingerprint: SHA-256 hex digest of the raw `registry.pub` bytes. - Source precedence: deterministic ordering used when the same package exists in multiple sources. +- Community recipe catalog: optional source-scoped metadata describing community-managed recipe package names. ## Official Bootstrap Contract (SPI-26) @@ -172,12 +173,16 @@ location = "https://github.com/spiritledsoftware/crosspack-registry.git" priority = 100 fingerprint_sha256 = "65149d198a39db9ecfea6f63d098858ed3b06c118c1f455f84ab571106b830c2" enabled = true + +[sources.community] +recipe_catalog_path = "community/recipes.toml" ``` Rules: - Serializer must emit sources sorted by `(priority, name)` for deterministic diffs. - `enabled` defaults to `true` when missing. +- `community` is optional; when present, `recipe_catalog_path` must be a relative `.toml` path under the source snapshot. - Unknown fields are ignored for forward compatibility. ### `snapshot.json` @@ -208,8 +213,10 @@ For each targeted source, `crosspack update` performs: - `index/` 3. Compute fingerprint from fetched `registry.pub` and compare against `sources.toml`. 4. Verify metadata signature policy can be enforced (sidecar files must be present for manifests that are read by registry APIs). -5. Atomically replace `/state/registries/cache//`. -6. Write `snapshot.json`. +5. If source `community` metadata is configured, verify `recipe_catalog_path` and detached signature (`.toml.sig`) using the same pinned source trust root. +6. Parse and validate the community recipe catalog (supported schema version, strictly sorted package names, package directories present). +7. Atomically replace `/state/registries/cache//`. +8. Write `snapshot.json`. If any step fails, existing cache for that source remains unchanged. From 9d8f73ed7dbe49f18940846240274fff4937d3a2 Mon Sep 17 00:00:00 2001 From: Ian Pascoe Date: Mon, 2 Mar 2026 05:09:31 -0500 Subject: [PATCH 3/6] fix(ci): resolve cross-platform clippy/test regressions --- crates/crosspack-cli/src/tests.rs | 10 ++++++++++ crates/crosspack-installer/src/tests.rs | 1 + 2 files changed, 11 insertions(+) diff --git a/crates/crosspack-cli/src/tests.rs b/crates/crosspack-cli/src/tests.rs index 27b5573..a85c235 100644 --- a/crates/crosspack-cli/src/tests.rs +++ b/crates/crosspack-cli/src/tests.rs @@ -8035,6 +8035,7 @@ sha256 = "abc" static TEST_LAYOUT_COUNTER: AtomicU64 = AtomicU64::new(0); + #[cfg(unix)] fn home_env_lock() -> &'static Mutex<()> { static LOCK: OnceLock> = OnceLock::new(); LOCK.get_or_init(|| Mutex::new(())) @@ -8072,10 +8073,12 @@ sha256 = "abc" } } + #[cfg(unix)] struct HomeEnvGuard { previous: Option, } + #[cfg(unix)] impl HomeEnvGuard { fn set(home: &Path) -> Self { let previous = std::env::var("HOME").ok(); @@ -8086,6 +8089,7 @@ sha256 = "abc" } } + #[cfg(unix)] impl Drop for HomeEnvGuard { fn drop(&mut self) { match self.previous.as_deref() { @@ -8106,6 +8110,7 @@ sha256 = "abc" .as_nanos() } + #[cfg(unix)] fn with_test_home_layout(home_root: &Path, run: impl FnOnce(&PrefixLayout) -> T) -> T { let _home_lock = home_env_lock() .lock() @@ -8115,6 +8120,7 @@ sha256 = "abc" run(&layout) } + #[cfg(unix)] fn single_transaction_txid(layout: &PrefixLayout) -> String { let mut txids = std::fs::read_dir(layout.transactions_dir()) .expect("must read transactions dir") @@ -8230,6 +8236,7 @@ sha256 = "abc" .expect("must write signature"); } + #[cfg(unix)] fn write_signed_source_build_manifest( layout: &PrefixLayout, source_name: &str, @@ -8254,11 +8261,13 @@ sha256 = "abc" ); } + #[cfg(unix)] struct SourceBuildScripts { build: String, install: String, } + #[cfg(unix)] fn write_signed_source_build_manifest_with_commands( layout: &PrefixLayout, source_name: &str, @@ -8314,6 +8323,7 @@ install_commands = ["sh", "-c", "{install_script}"] .expect("must write signature"); } + #[cfg(unix)] fn seed_source_build_tar_gz_cache( layout: &PrefixLayout, package_name: &str, diff --git a/crates/crosspack-installer/src/tests.rs b/crates/crosspack-installer/src/tests.rs index fef0573..fff0cf1 100644 --- a/crates/crosspack-installer/src/tests.rs +++ b/crates/crosspack-installer/src/tests.rs @@ -7,6 +7,7 @@ use std::collections::HashMap; use std::fs; use std::io; use std::path::{Path, PathBuf}; +#[cfg(unix)] use std::process::Command; #[cfg(unix)] From d6a08e636e197c1fa9916a2e0fa76ade253d7f55 Mon Sep 17 00:00:00 2001 From: Ian Pascoe Date: Mon, 2 Mar 2026 05:18:36 -0500 Subject: [PATCH 4/6] fix(tests): make bundle overlap coverage target-agnostic --- crates/crosspack-cli/src/tests.rs | 78 +++++++++++++++++++++++++------ 1 file changed, 65 insertions(+), 13 deletions(-) diff --git a/crates/crosspack-cli/src/tests.rs b/crates/crosspack-cli/src/tests.rs index a85c235..8e26c8f 100644 --- a/crates/crosspack-cli/src/tests.rs +++ b/crates/crosspack-cli/src/tests.rs @@ -5293,7 +5293,24 @@ requirement = "^14" let layout = test_layout(); layout.ensure_base_dirs().expect("must create dirs"); configure_ready_source(&layout, "official"); - write_signed_test_manifest(&layout, "official", "ripgrep", "14.1.0", None, None, &[]); + let primary_target = host_target_triple(); + let secondary_target = if primary_target == "x86_64-unknown-linux-gnu" { + "aarch64-apple-darwin" + } else { + "x86_64-unknown-linux-gnu" + }; + write_signed_test_manifest_with_targets( + &layout, + TestManifestSpec { + source_name: "official", + package_name: "ripgrep", + version: "14.1.0", + license: None, + homepage: None, + provides: &[], + targets: &[primary_target, secondary_target], + }, + ); let backend = select_metadata_backend(None, &layout).expect("backend must load"); let bundle = BundleDocument { @@ -5302,12 +5319,12 @@ requirement = "^14" roots: vec![ BundleRoot { name: "ripgrep".to_string(), - target: None, + target: Some(primary_target.to_string()), requirement: Some("^14".to_string()), }, BundleRoot { name: "ripgrep".to_string(), - target: Some("x86_64-unknown-linux-gnu".to_string()), + target: Some(secondary_target.to_string()), requirement: Some("^14".to_string()), }, ], @@ -8213,8 +8230,35 @@ sha256 = "abc" homepage: Option<&str>, provides: &[&str], ) { - let cache_root = registry_state_root(layout).join("cache").join(source_name); - let package_dir = cache_root.join("index").join(package_name); + write_signed_test_manifest_with_targets( + layout, + TestManifestSpec { + source_name, + package_name, + version, + license, + homepage, + provides, + targets: &["x86_64-unknown-linux-gnu"], + }, + ); + } + + struct TestManifestSpec<'a> { + source_name: &'a str, + package_name: &'a str, + version: &'a str, + license: Option<&'a str>, + homepage: Option<&'a str>, + provides: &'a [&'a str], + targets: &'a [&'a str], + } + + fn write_signed_test_manifest_with_targets(layout: &PrefixLayout, spec: TestManifestSpec<'_>) { + let cache_root = registry_state_root(layout) + .join("cache") + .join(spec.source_name); + let package_dir = cache_root.join("index").join(spec.package_name); std::fs::create_dir_all(&package_dir).expect("must create package directory"); let signing_key = test_signing_key(); @@ -8224,8 +8268,15 @@ sha256 = "abc" ) .expect("must write registry key"); - let manifest = manifest_toml(package_name, version, license, homepage, provides); - let manifest_path = package_dir.join(format!("{version}.toml")); + let manifest = manifest_toml( + spec.package_name, + spec.version, + spec.license, + spec.homepage, + spec.provides, + spec.targets, + ); + let manifest_path = package_dir.join(format!("{}.toml", spec.version)); std::fs::write(&manifest_path, manifest.as_bytes()).expect("must write manifest"); let signature = signing_key.sign(manifest.as_bytes()); @@ -8381,6 +8432,7 @@ install_commands = ["sh", "-c", "{install_script}"] license: Option<&str>, homepage: Option<&str>, provides: &[&str], + targets: &[&str], ) -> String { let mut manifest = format!("name = \"{package_name}\"\nversion = \"{version}\"\n"); if let Some(license) = license { @@ -8397,12 +8449,12 @@ install_commands = ["sh", "-c", "{install_script}"] .join(", "); manifest.push_str(&format!("provides = [{joined}]\n")); } - manifest.push_str(concat!( - "[[artifacts]]\n", - "target = \"x86_64-unknown-linux-gnu\"\n", - "url = \"https://example.test/artifact.tar.zst\"\n", - "sha256 = \"abc\"\n" - )); + for target in targets { + manifest.push_str("[[artifacts]]\n"); + manifest.push_str(&format!("target = \"{target}\"\n")); + manifest.push_str("url = \"https://example.test/artifact.tar.zst\"\n"); + manifest.push_str("sha256 = \"abc\"\n"); + } manifest } } From 62dbdeaf24932e016ca9d80b00f4391d0931c3aa Mon Sep 17 00:00:00 2001 From: Ian Pascoe Date: Mon, 2 Mar 2026 05:22:43 -0500 Subject: [PATCH 5/6] fix(tests): normalize missing index path assertion on windows --- crates/crosspack-registry/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/crosspack-registry/src/tests.rs b/crates/crosspack-registry/src/tests.rs index 6400979..0fc6c3a 100644 --- a/crates/crosspack-registry/src/tests.rs +++ b/crates/crosspack-registry/src/tests.rs @@ -1366,7 +1366,7 @@ fn community_recipe_catalog_rejects_missing_index_directory_for_listed_package() "expected missing index directory error, got: {rendered}" ); assert!( - rendered.contains("index/zsh"), + rendered.contains(&format!("index{}zsh", std::path::MAIN_SEPARATOR)), "expected deterministic missing directory path details, got: {rendered}" ); From dd3e0eca198b6e938dfd3d303233dd29b145cad6 Mon Sep 17 00:00:00 2001 From: Ian Pascoe Date: Mon, 2 Mar 2026 05:41:33 -0500 Subject: [PATCH 6/6] fix(upgrade): support build-from-source resolution --- crates/crosspack-cli/src/command_flows.rs | 53 +++++--- crates/crosspack-cli/src/dispatch.rs | 12 +- crates/crosspack-cli/src/main.rs | 2 + crates/crosspack-cli/src/tests.rs | 148 +++++++++++++++++++++- 4 files changed, 186 insertions(+), 29 deletions(-) diff --git a/crates/crosspack-cli/src/command_flows.rs b/crates/crosspack-cli/src/command_flows.rs index 4a82107..d79a59d 100644 --- a/crates/crosspack-cli/src/command_flows.rs +++ b/crates/crosspack-cli/src/command_flows.rs @@ -626,16 +626,21 @@ fn collect_cache_files_recursive( Ok(()) } +struct UpgradeCommandOptions<'a> { + dry_run: bool, + explain: bool, + build_from_source: bool, + provider_overrides: &'a BTreeMap, + interaction_policy: InstallInteractionPolicy, +} + fn run_upgrade_command( layout: &PrefixLayout, registry_root: Option<&Path>, spec: Option, - dry_run: bool, - explain: bool, - provider_overrides: &BTreeMap, - interaction_policy: InstallInteractionPolicy, + options: UpgradeCommandOptions<'_>, ) -> Result<()> { - ensure_explain_requires_dry_run("upgrade", dry_run, explain)?; + ensure_explain_requires_dry_run("upgrade", options.dry_run, options.explain)?; let output_style = current_output_style(); let renderer = TerminalRenderer::from_style(output_style); ensure_upgrade_command_ready(layout)?; @@ -652,7 +657,7 @@ fn run_upgrade_command( None => Some(resolve_transaction_snapshot_id(layout, "upgrade")?), }; - if dry_run { + if options.dry_run { let mut planned_changes = Vec::new(); let mut explainability = DependencyPolicyExplainability::default(); @@ -674,15 +679,15 @@ fn run_upgrade_command( &backend, &roots, installed_receipt.target.as_deref(), - provider_overrides, - false, + options.provider_overrides, + options.build_from_source, )?; enforce_no_downgrades(&receipts, &resolved, "upgrade")?; for package in &resolved { validate_install_preflight_for_resolved(layout, package, &receipts)?; } planned_changes.extend(build_planned_package_changes(&resolved, &receipts)?); - if explain { + if options.explain { merge_dependency_policy_explainability( &mut explainability, build_dependency_policy_explainability(&resolved, &receipts, &roots)?, @@ -704,13 +709,13 @@ fn run_upgrade_command( &backend, &plan.roots, plan.target.as_deref(), - provider_overrides, - false, + options.provider_overrides, false, + options.build_from_source, )?; enforce_no_downgrades(&receipts, &resolved, "upgrade")?; resolved_dependency_tokens.extend(plan_tokens); - if explain { + if options.explain { merge_dependency_policy_explainability( &mut explainability, build_dependency_policy_explainability( @@ -723,7 +728,10 @@ fn run_upgrade_command( grouped_resolved.push(resolved); } - validate_provider_overrides_used(provider_overrides, &resolved_dependency_tokens)?; + validate_provider_overrides_used( + options.provider_overrides, + &resolved_dependency_tokens, + )?; let overlap_check = grouped_resolved .iter() @@ -753,7 +761,7 @@ fn run_upgrade_command( for line in render_dry_run_output_lines( &preview, TransactionPreviewMode::DryRun, - explain.then_some(&explainability), + options.explain.then_some(&explainability), ) { println!("{line}"); } @@ -788,8 +796,8 @@ fn run_upgrade_command( &backend, &roots, installed_receipt.target.as_deref(), - provider_overrides, - false, + options.provider_overrides, + options.build_from_source, )?; let planned_dependency_overrides = build_planned_dependency_overrides(&resolved); enforce_no_downgrades(&receipts, &resolved, "upgrade")?; @@ -876,7 +884,7 @@ fn run_upgrade_command( InstallResolvedOptions { snapshot_id: snapshot_id.as_deref(), force_redownload: false, - interaction_policy, + interaction_policy: options.interaction_policy, install_progress_mode: current_install_progress_mode(output_style), }, Some(&mut source_build_journal), @@ -922,9 +930,9 @@ fn run_upgrade_command( &backend, &plan.roots, plan.target.as_deref(), - provider_overrides, - false, + options.provider_overrides, false, + options.build_from_source, )?; enforce_no_downgrades(&receipts, &resolved, "upgrade")?; @@ -947,7 +955,10 @@ fn run_upgrade_command( grouped_resolved.push(resolved); } - validate_provider_overrides_used(provider_overrides, &resolved_dependency_tokens)?; + validate_provider_overrides_used( + options.provider_overrides, + &resolved_dependency_tokens, + )?; let overlap_check = grouped_resolved .iter() @@ -1048,7 +1059,7 @@ fn run_upgrade_command( InstallResolvedOptions { snapshot_id: snapshot_id.as_deref(), force_redownload: false, - interaction_policy, + interaction_policy: options.interaction_policy, install_progress_mode: current_install_progress_mode(output_style), }, Some(&mut source_build_journal), diff --git a/crates/crosspack-cli/src/dispatch.rs b/crates/crosspack-cli/src/dispatch.rs index 43e0dd2..c4d564e 100644 --- a/crates/crosspack-cli/src/dispatch.rs +++ b/crates/crosspack-cli/src/dispatch.rs @@ -188,6 +188,7 @@ fn run_cli(cli: Cli) -> Result<()> { spec, dry_run, explain, + build_from_source, provider, escalation, } => { @@ -200,10 +201,13 @@ fn run_cli(cli: Cli) -> Result<()> { &layout, cli.registry_root.as_deref(), spec, - dry_run, - explain, - &provider_overrides, - interaction_policy, + UpgradeCommandOptions { + dry_run, + explain, + build_from_source, + provider_overrides: &provider_overrides, + interaction_policy, + }, )?; } Commands::Rollback { txid, escalation } => { diff --git a/crates/crosspack-cli/src/main.rs b/crates/crosspack-cli/src/main.rs index 4ba22a1..23dd380 100644 --- a/crates/crosspack-cli/src/main.rs +++ b/crates/crosspack-cli/src/main.rs @@ -476,6 +476,8 @@ enum Commands { dry_run: bool, #[arg(long)] explain: bool, + #[arg(long)] + build_from_source: bool, #[arg(long = "provider", value_name = "capability=package")] provider: Vec, #[command(flatten)] diff --git a/crates/crosspack-cli/src/tests.rs b/crates/crosspack-cli/src/tests.rs index 8e26c8f..a893179 100644 --- a/crates/crosspack-cli/src/tests.rs +++ b/crates/crosspack-cli/src/tests.rs @@ -149,10 +149,13 @@ mod tests { &layout, None, None, - false, - false, - &BTreeMap::new(), - InstallInteractionPolicy::default(), + UpgradeCommandOptions { + dry_run: false, + explain: false, + build_from_source: false, + provider_overrides: &BTreeMap::new(), + interaction_policy: InstallInteractionPolicy::default(), + }, ) .expect_err("active transaction should block upgrade command"); assert!( @@ -4115,6 +4118,21 @@ ripgrep-legacy = "*" } } + #[test] + fn cli_parses_upgrade_with_build_from_source_flag() { + let cli = Cli::try_parse_from(["crosspack", "upgrade", "ripgrep", "--build-from-source"]) + .expect("command must parse"); + + match cli.command { + Commands::Upgrade { + build_from_source, .. + } => { + assert!(build_from_source); + } + other => panic!("unexpected command: {other:?}"), + } + } + #[test] fn cli_parses_bundle_export_with_optional_output_flag() { let cli = Cli::try_parse_from([ @@ -7436,6 +7454,81 @@ install_commands = ["cargo", "install", "--path", "."] assert_eq!(source_build.build_system, "cargo"); } + #[test] + fn upgrade_build_from_source_opt_in_unblocks_source_only_upgrade_resolution() { + let layout = test_layout(); + layout.ensure_base_dirs().expect("must create dirs"); + configure_ready_source(&layout, "official"); + + let host_target = host_target_triple(); + let other_target = if host_target == "x86_64-unknown-linux-gnu" { + "aarch64-apple-darwin" + } else { + "x86_64-unknown-linux-gnu" + }; + write_signed_source_build_metadata_manifest( + &layout, + "official", + "demo", + "2.0.0", + other_target, + ); + write_install_receipt( + &layout, + &InstallReceipt { + name: "demo".to_string(), + version: "1.0.0".to_string(), + dependencies: Vec::new(), + target: Some(host_target.to_string()), + artifact_url: None, + artifact_sha256: None, + cache_path: None, + exposed_bins: Vec::new(), + exposed_completions: Vec::new(), + snapshot_id: None, + install_mode: InstallMode::Managed, + install_reason: InstallReason::Root, + install_status: "installed".to_string(), + installed_at_unix: 1, + }, + ) + .expect("must write installed receipt"); + + let err = run_upgrade_command( + &layout, + None, + Some("demo".to_string()), + UpgradeCommandOptions { + dry_run: true, + explain: false, + build_from_source: false, + provider_overrides: &BTreeMap::new(), + interaction_policy: InstallInteractionPolicy::default(), + }, + ) + .expect_err("upgrade should require explicit source-build opt-in"); + assert!( + err.to_string().contains("rerun with --build-from-source"), + "unexpected error: {err}" + ); + + run_upgrade_command( + &layout, + None, + Some("demo".to_string()), + UpgradeCommandOptions { + dry_run: true, + explain: false, + build_from_source: true, + provider_overrides: &BTreeMap::new(), + interaction_policy: InstallInteractionPolicy::default(), + }, + ) + .expect("upgrade dry-run should resolve source-build install plan when opted in"); + + let _ = std::fs::remove_dir_all(layout.prefix()); + } + #[test] fn source_build_metadata_rejects_unknown_fields_fail_closed() { let err = PackageManifest::from_toml_str( @@ -8287,6 +8380,53 @@ sha256 = "abc" .expect("must write signature"); } + fn write_signed_source_build_metadata_manifest( + layout: &PrefixLayout, + source_name: &str, + package_name: &str, + version: &str, + artifact_target: &str, + ) { + let cache_root = registry_state_root(layout).join("cache").join(source_name); + let package_dir = cache_root.join("index").join(package_name); + std::fs::create_dir_all(&package_dir).expect("must create package directory"); + + let signing_key = test_signing_key(); + std::fs::write( + cache_root.join("registry.pub"), + public_key_hex(&signing_key), + ) + .expect("must write registry key"); + + let manifest = format!( + r#" +name = "{package_name}" +version = "{version}" + +[[artifacts]] +target = "{artifact_target}" +url = "https://example.test/{package_name}-{version}-{artifact_target}.tar.zst" +sha256 = "abc123" + +[source_build] +url = "https://example.test/{package_name}-{version}-src.tar.gz" +archive_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" +build_system = "cargo" +build_commands = ["cargo", "build", "--release"] +install_commands = ["cargo", "install", "--path", "."] +"# + ); + let manifest_path = package_dir.join(format!("{version}.toml")); + std::fs::write(&manifest_path, manifest.as_bytes()).expect("must write manifest"); + + let signature = signing_key.sign(manifest.as_bytes()); + std::fs::write( + manifest_path.with_extension("toml.sig"), + hex::encode(signature.to_bytes()), + ) + .expect("must write signature"); + } + #[cfg(unix)] fn write_signed_source_build_manifest( layout: &PrefixLayout,