diff --git a/docs/dev-tools/mise-lock.md b/docs/dev-tools/mise-lock.md index 8713de1f9c..a80c1b1c42 100644 --- a/docs/dev-tools/mise-lock.md +++ b/docs/dev-tools/mise-lock.md @@ -281,6 +281,34 @@ mise install mise use node@$(jq -r '.engines.node' package.json) ``` +## Provenance and Security + +When `mise lock` generates a lockfile, it records a provenance type (e.g., `slsa`, `cosign`, `minisign`, `github-attestations`) for each tool. For the **current platform**, mise downloads the artifact and performs full cryptographic verification at lock time -- ensuring the provenance entry in the lockfile is backed by actual verification, not just registry metadata. This applies to both the aqua and github backends. For cross-platform entries, provenance is detected from registry metadata without verification (since the artifact may not be runnable on the current machine). + +By default, when `mise install` sees a lockfile with both a checksum and a provenance entry, it trusts the lockfile and skips re-verification. This avoids redundant API calls (e.g., GitHub attestation queries) which can cause rate limit issues in CI. Since the current platform's provenance was already verified during `mise lock`, this is safe. + +For additional security, you can force provenance re-verification at install time on every install: + +```toml +[settings] +locked_verify_provenance = true +``` + +Or via environment variable: + +```sh +MISE_LOCKED_VERIFY_PROVENANCE=1 mise install +``` + +This is also automatically enabled in [paranoid mode](/paranoid.html): + +```toml +[settings] +paranoid = true +``` + +When enabled, every `mise install` will cryptographically verify provenance regardless of what the lockfile contains, ensuring the artifact was built by a trusted CI pipeline. + ## Minimum Release Age In addition to lockfiles, mise supports the [`install_before`](/configuration/settings.html#install_before) setting to limit supply chain risk by only installing versions that have been available for a minimum amount of time: diff --git a/docs/paranoid.md b/docs/paranoid.md index 2e1127a771..604ced9e66 100644 --- a/docs/paranoid.md +++ b/docs/paranoid.md @@ -56,6 +56,21 @@ Normally mise uses HTTP because loading the TLS module takes about 10ms and this affects commonly used commands so it is a noticeably delay. In paranoid mode, all endpoints will be fetched over HTTPS. +## Provenance re-verification + +Normally, when a lockfile contains both a checksum and a provenance entry for a tool, +`mise install` trusts the lockfile and skips provenance re-verification to avoid +redundant API calls (e.g., to GitHub). This is safe when you trust the lockfile was +generated correctly. + +In paranoid mode, `mise install` always re-verifies provenance (SLSA, cosign, minisign, +GitHub artifact attestations) at install time, even when the lockfile already has a +provenance entry. This ensures that cryptographic verification happens on every install, +not just when the lockfile is first generated. + +This behavior can also be enabled independently via the +[`locked_verify_provenance`](/configuration/settings.html#locked_verify_provenance) setting. + ## More? If you have suggestions for more that could be added to paranoid, please let diff --git a/e2e-win/npm_backend.Tests.ps1 b/e2e-win/npm_backend.Tests.ps1 index b998965bc9..e6fad6fb37 100644 --- a/e2e-win/npm_backend.Tests.ps1 +++ b/e2e-win/npm_backend.Tests.ps1 @@ -3,8 +3,8 @@ Describe 'npm_backend' { mise x node@24.4.1 npm:prettier@3.6.2 -- prettier --version | Should -be "3.6.2" } It 'installs npm:cowsay 1.6.0 with bun' { - $env:MISE_NPM_BUN = "true" + $env:MISE_NPM_PACKAGE_MANAGER = "bun" mise x node@24.4.1 bun@1.2.19 npm:cowsay@1.6.0 -- cowsay --version | Should -be "1.6.0" - Remove-Item Env:MISE_NPM_BUN + Remove-Item Env:MISE_NPM_PACKAGE_MANAGER } } diff --git a/e2e/backend/test_aqua_cosign b/e2e/backend/test_aqua_cosign index 839b5e721e..93b5ace888 100644 --- a/e2e/backend/test_aqua_cosign +++ b/e2e/backend/test_aqua_cosign @@ -1,23 +1,26 @@ #!/usr/bin/env bash # Test native Cosign verification for aqua packages +# Uses fork-cleaner which has bundle-based cosign (native verification), +# unlike sops which only has opts-based cosign (CLI pass-through). set -euo pipefail export MISE_EXPERIMENTAL=1 export MISE_AQUA_COSIGN=true export MISE_AQUA_SLSA=false +export MISE_AQUA_GITHUB_ATTESTATIONS=false echo "=== Testing Native Cosign Verification ===" -# Test: Install sops which has cosign signatures configured (v3.8.0+) -echo "Installing sops with native Cosign verification..." +# Test: Install fork-cleaner which has cosign bundle verification configured +echo "Installing fork-cleaner with native Cosign verification..." # Capture the installation output to verify the native verification is being used -output=$(mise install aqua:getsops/sops@3.9.0 2>&1) +output=$(mise install aqua:caarlos0/fork-cleaner@2.4.0 2>&1) echo "$output" # Verify the native Cosign verification was used -if echo "$output" | grep -q "verify checksums with cosign"; then +if echo "$output" | grep -q "Cosign"; then echo "✅ Native Cosign verification was used" else echo "❌ ERROR: Cosign verification message not found in output" @@ -27,11 +30,11 @@ else fi # Verify the tool works -assert_contains "mise x aqua:getsops/sops@3.9.0 -- sops --version" "3.9.0" -echo "✓ sops installed and working correctly" +assert_contains "mise x aqua:caarlos0/fork-cleaner@2.4.0 -- fork-cleaner --version" "2.4.0" +echo "✓ fork-cleaner installed and working correctly" # Cleanup -mise uninstall aqua:getsops/sops@3.9.0 || true +mise uninstall aqua:caarlos0/fork-cleaner@2.4.0 || true echo "" echo "=== Native Cosign Verification Test Passed ✓ ===" diff --git a/e2e/backend/test_backend_missing_deps b/e2e/backend/test_backend_missing_deps index bb77baad68..42f679231e 100644 --- a/e2e/backend/test_backend_missing_deps +++ b/e2e/backend/test_backend_missing_deps @@ -69,11 +69,11 @@ test_npm() { # Test with bun mode enabled but npm still missing echo "Testing npm backend with bun mode enabled but npm missing..." - export MISE_NPM_BUN=true + export MISE_NPM_PACKAGE_MANAGER=bun test_backend_warning "npm (bun mode)" "mise ls-remote npm:test-package" "npm may be required but was not found" output=$(mise ls-remote npm:test-package 2>&1 || true) test_error_message "npm (bun mode) shows npm is required for queries" "$output" "npm is required for querying package information" - unset MISE_NPM_BUN + unset MISE_NPM_PACKAGE_MANAGER } # Test cargo backend diff --git a/e2e/lockfile/test_lockfile_provenance b/e2e/lockfile/test_lockfile_provenance index 1f368fc670..f6e62eb19e 100644 --- a/e2e/lockfile/test_lockfile_provenance +++ b/e2e/lockfile/test_lockfile_provenance @@ -16,7 +16,8 @@ EOF mise lock --platform "$PLATFORM" assert "test -f mise.lock" # sops has SLSA provenance configured in the aqua registry -assert_contains "cat mise.lock" 'provenance = "slsa"' +# Lock-time verification records the SLSA provenance URL (intoto.jsonl) +assert_contains "cat mise.lock" 'provenance.slsa' echo "=== Testing provenance downgrade attack detection ===" rm -f mise.lock mise.toml diff --git a/schema/mise.json b/schema/mise.json index 4f514296e6..28029b9867 100644 --- a/schema/mise.json +++ b/schema/mise.json @@ -922,6 +922,11 @@ "description": "Require lockfile URLs to be present during installation.", "type": "boolean" }, + "locked_verify_provenance": { + "default": false, + "description": "Re-verify provenance at install time even when the lockfile already has provenance.", + "type": "boolean" + }, "lockfile": { "description": "Create and read lockfiles for tool versions.", "type": "boolean" diff --git a/settings.toml b/settings.toml index 16c9cac497..f70e00be0d 100644 --- a/settings.toml +++ b/settings.toml @@ -993,6 +993,25 @@ Equivalent to passing `--locked` to `mise install`. env = "MISE_LOCKED" type = "Bool" +[locked_verify_provenance] +default = false +description = "Re-verify provenance at install time even when the lockfile already has provenance." +docs = """ +When enabled, `mise install` will re-verify provenance (SLSA, cosign, minisign, +GitHub artifact attestations) at install time even when the lockfile already +contains both a checksum and a provenance entry. + +By default, when a lockfile has a checksum and provenance type recorded, +`mise install` trusts the lockfile and skips re-verification to avoid redundant +API calls (e.g., to GitHub). Enabling this setting forces re-verification every +time, which provides stronger security guarantees at the cost of additional +network requests. + +This is automatically enabled when `paranoid` is set to `true`. +""" +env = "MISE_LOCKED_VERIFY_PROVENANCE" +type = "Bool" + [lockfile] description = "Create and read lockfiles for tool versions." docs = """ diff --git a/src/backend/aqua.rs b/src/backend/aqua.rs index f0f3858c61..c9022885bf 100644 --- a/src/backend/aqua.rs +++ b/src/backend/aqua.rs @@ -14,6 +14,7 @@ use crate::path::{Path, PathBuf, PathExt}; use crate::plugins::VERSION_REGEX; use crate::registry::REGISTRY; use crate::toolset::ToolVersion; +use crate::ui::progress_report::SingleReport; use crate::{ aqua::aqua_registry_wrapper::{ AQUA_REGISTRY, AquaChecksum, AquaChecksumType, AquaMinisignType, AquaPackage, @@ -657,7 +658,38 @@ impl Backend for AquaBackend { }; // Detect provenance from aqua registry config - let provenance = self.detect_provenance_type(&pkg); + let mut provenance = self.detect_provenance_type(&pkg); + + // For the current platform, verify provenance cryptographically at lock time. + // This ensures the lockfile's provenance entry is backed by actual verification, + // not just registry metadata. Cross-platform entries remain detection-only. + if provenance.is_some() + && target.is_current() + && let Some(ref artifact_url) = url + { + match self + .verify_provenance_at_lock_time( + &pkg, + &v, + artifact_url, + provenance.as_ref().unwrap(), + ) + .await + { + Ok(verified) => provenance = Some(verified), + Err(e) => { + // Clear provenance so install-time verification will run. + // If we kept the unverified provenance, has_lockfile_integrity + // would be true and verify_provenance() would be skipped. + warn!( + "lock-time provenance verification failed for {}, \ + will be verified at install time: {e}", + self.id + ); + provenance = None; + } + } + } Ok(PlatformInfo { url, @@ -672,18 +704,30 @@ impl AquaBackend { /// Detect provenance type from aqua registry package config. /// /// Returns the highest-priority provenance type that is configured and - /// enabled for the package. GithubAttestations is NOT detected here - /// because it requires downloading the artifact to query the attestation - /// API — it is recorded at install-time after successful verification. + /// enabled for the package, based on the `ProvenanceType` priority order: + /// GithubAttestations (3) > Slsa (2) > Cosign (1) > Minisign (0). /// - /// NOTE: For packages with both `slsa_provenance` and `github_artifact_attestations`, - /// this returns `Slsa`. Subsequent `mise install` will enforce SLSA verification even - /// though attestations would also work. If SLSA verification fails (missing asset, - /// format change), the lockfile entry must be deleted and re-locked. + /// This detection is based on registry metadata only — no cryptographic + /// verification happens here. Actual verification occurs at install time + /// (and is always performed when `locked_verify_provenance` or `paranoid` + /// is enabled). fn detect_provenance_type(&self, pkg: &AquaPackage) -> Option { let settings = Settings::get(); - // Check for SLSA provenance (highest priority available at lock-time) + // Check for GitHub artifact attestations (highest priority) + // The registry metadata (enabled flag, signer_workflow) is sufficient for + // detection at lock-time. Actual cryptographic verification happens at + // install time (always when locked_verify_provenance/paranoid is enabled, + // or on first install when the lockfile doesn't yet have provenance). + if settings.github_attestations + && settings.aqua.github_attestations + && let Some(att) = &pkg.github_artifact_attestations + && att.enabled != Some(false) + { + return Some(ProvenanceType::GithubAttestations); + } + + // Check for SLSA provenance if settings.slsa && settings.aqua.slsa && let Some(slsa) = &pkg.slsa_provenance @@ -717,6 +761,318 @@ impl AquaBackend { None } + /// Verify provenance at lock time by downloading the artifact to a temp directory + /// and running the appropriate cryptographic verification. Only called for the + /// current platform during `mise lock`. + async fn verify_provenance_at_lock_time( + &self, + pkg: &AquaPackage, + v: &str, + artifact_url: &str, + detected: &ProvenanceType, + ) -> Result { + let tmp_dir = tempfile::tempdir()?; + let filename = get_filename_from_url(artifact_url); + let artifact_path = tmp_dir.path().join(&filename); + + info!( + "downloading artifact for lock-time provenance verification: {}", + filename + ); + HTTP.download_file(artifact_url, &artifact_path, None) + .await?; + + match detected { + ProvenanceType::GithubAttestations => { + self.run_github_attestation_check(&artifact_path, pkg) + .await?; + Ok(ProvenanceType::GithubAttestations) + } + ProvenanceType::Slsa { .. } => { + let provenance_url = self + .run_slsa_check(&artifact_path, pkg, v, tmp_dir.path(), None) + .await?; + Ok(ProvenanceType::Slsa { + url: Some(provenance_url), + }) + } + ProvenanceType::Minisign => { + self.run_minisign_check(&artifact_path, &filename, pkg, v, tmp_dir.path(), None) + .await?; + Ok(ProvenanceType::Minisign) + } + ProvenanceType::Cosign => { + let checksum_config = pkg + .checksum + .as_ref() + .wrap_err("cosign provenance detected but no checksum config found")?; + let checksum_path = self + .download_checksum_file(checksum_config, pkg, v, tmp_dir.path(), None) + .await?; + self.run_cosign_check(&checksum_path, pkg, v, tmp_dir.path(), None) + .await?; + Ok(ProvenanceType::Cosign) + } + } + } + + // --- Shared verification helpers used by both lock-time and install-time --- + + /// Run GitHub artifact attestation verification against an already-downloaded artifact. + async fn run_github_attestation_check( + &self, + artifact_path: &Path, + pkg: &AquaPackage, + ) -> Result<()> { + let signer_workflow = pkg + .github_artifact_attestations + .as_ref() + .and_then(|att| att.signer_workflow.clone()); + + match sigstore_verification::verify_github_attestation( + artifact_path, + &pkg.repo_owner, + &pkg.repo_name, + env::GITHUB_TOKEN.as_deref(), + signer_workflow.as_deref(), + ) + .await + { + Ok(true) => { + debug!( + "GitHub attestations verified for {}/{}", + pkg.repo_owner, pkg.repo_name + ); + Ok(()) + } + Ok(false) => Err(eyre!( + "GitHub artifact attestations verification returned false" + )), + Err(e) => Err(eyre!( + "GitHub artifact attestations verification failed: {e}" + )), + } + } + + /// Download SLSA provenance file and verify against an already-downloaded artifact. + /// Returns the provenance download URL on success. + async fn run_slsa_check( + &self, + artifact_path: &Path, + pkg: &AquaPackage, + v: &str, + download_dir: &Path, + pr: Option<&dyn SingleReport>, + ) -> Result { + let slsa = pkg + .slsa_provenance + .as_ref() + .wrap_err("SLSA provenance detected but no config found")?; + + let mut slsa_pkg = pkg.clone(); + (slsa_pkg.repo_owner, slsa_pkg.repo_name) = + resolve_repo_info(slsa.repo_owner.as_ref(), slsa.repo_name.as_ref(), pkg); + + let (provenance_path, provenance_url) = match slsa.r#type.as_deref().unwrap_or_default() { + "github_release" => { + let asset_strs = slsa.asset_strs(pkg, v, os(), arch())?; + let (url, _) = self.github_release_asset(&slsa_pkg, v, asset_strs).await?; + let path = download_dir.join(get_filename_from_url(&url)); + HTTP.download_file(&url, &path, pr).await?; + (path, url) + } + "http" => { + let url = slsa.url(pkg, v, os(), arch())?; + let path = download_dir.join(get_filename_from_url(&url)); + HTTP.download_file(&url, &path, pr).await?; + (path, url) + } + t => return Err(eyre!("unsupported slsa type: {t}")), + }; + + match sigstore_verification::verify_slsa_provenance(artifact_path, &provenance_path, 1u8) + .await + { + Ok(true) => { + debug!("SLSA provenance verified"); + Ok(provenance_url) + } + Ok(false) => Err(eyre!("SLSA provenance verification failed")), + Err(e) => Err(e.into()), + } + } + + /// Download minisign signature and verify against an already-downloaded artifact. + async fn run_minisign_check( + &self, + artifact_path: &Path, + artifact_filename: &str, + pkg: &AquaPackage, + v: &str, + download_dir: &Path, + pr: Option<&dyn SingleReport>, + ) -> Result<()> { + let minisign_config = pkg + .minisign + .as_ref() + .wrap_err("minisign provenance detected but no config found")?; + + let sig_path = match minisign_config._type() { + AquaMinisignType::GithubRelease => { + let asset = minisign_config.asset(pkg, v, os(), arch())?; + let (repo_owner, repo_name) = resolve_repo_info( + minisign_config.repo_owner.as_ref(), + minisign_config.repo_name.as_ref(), + pkg, + ); + let url = github::get_release(&format!("{repo_owner}/{repo_name}"), v) + .await? + .assets + .into_iter() + .find(|a| a.name == asset) + .map(|a| a.browser_download_url) + .wrap_err_with(|| format!("no asset found for minisign: {asset}"))?; + let path = download_dir.join(&asset); + HTTP.download_file(&url, &path, pr).await?; + path + } + AquaMinisignType::Http => { + let url = minisign_config.url(pkg, v, os(), arch())?; + let path = download_dir.join(format!("{artifact_filename}.minisig")); + HTTP.download_file(&url, &path, pr).await?; + path + } + }; + let data = file::read(artifact_path)?; + let sig = file::read_to_string(&sig_path)?; + minisign::verify( + &minisign_config.public_key(pkg, v, os(), arch())?, + &data, + &sig, + )?; + debug!("minisign verified"); + Ok(()) + } + + /// Download cosign key/signature/bundle and verify checksums file. + /// The checksum file must already be downloaded at `checksum_path`. + async fn run_cosign_check( + &self, + checksum_path: &Path, + pkg: &AquaPackage, + v: &str, + download_dir: &Path, + pr: Option<&dyn SingleReport>, + ) -> Result<()> { + let cosign = pkg + .checksum + .as_ref() + .and_then(|c| c.cosign.as_ref()) + .wrap_err("cosign provenance detected but no config found")?; + + if let Some(key) = &cosign.key { + let mut key_pkg = pkg.clone(); + (key_pkg.repo_owner, key_pkg.repo_name) = + resolve_repo_info(key.repo_owner.as_ref(), key.repo_name.as_ref(), pkg); + let key_url = match key.r#type.as_deref().unwrap_or_default() { + "github_release" => { + let asset_strs = key.asset_strs(pkg, v, os(), arch())?; + self.github_release_asset(&key_pkg, v, asset_strs).await?.0 + } + "http" => key.url(pkg, v, os(), arch())?, + t => return Err(eyre!("unsupported cosign key type: {t}")), + }; + let key_path = download_dir.join(get_filename_from_url(&key_url)); + HTTP.download_file(&key_url, &key_path, pr).await?; + + let sig_path = if let Some(signature) = &cosign.signature { + let mut sig_pkg = pkg.clone(); + (sig_pkg.repo_owner, sig_pkg.repo_name) = resolve_repo_info( + signature.repo_owner.as_ref(), + signature.repo_name.as_ref(), + pkg, + ); + let sig_url = match signature.r#type.as_deref().unwrap_or_default() { + "github_release" => { + let asset_strs = signature.asset_strs(pkg, v, os(), arch())?; + self.github_release_asset(&sig_pkg, v, asset_strs).await?.0 + } + "http" => signature.url(pkg, v, os(), arch())?, + t => return Err(eyre!("unsupported cosign signature type: {t}")), + }; + let path = download_dir.join(get_filename_from_url(&sig_url)); + HTTP.download_file(&sig_url, &path, pr).await?; + path + } else { + checksum_path.with_extension("sig") + }; + + match sigstore_verification::verify_cosign_signature_with_key( + checksum_path, + &sig_path, + &key_path, + ) + .await + { + Ok(true) => { + debug!("cosign (key) verified"); + Ok(()) + } + Ok(false) => Err(eyre!("cosign key-based verification returned false")), + Err(e) => Err(eyre!("cosign key-based verification failed: {e}")), + } + } else if let Some(bundle) = &cosign.bundle { + let mut bundle_pkg = pkg.clone(); + (bundle_pkg.repo_owner, bundle_pkg.repo_name) = + resolve_repo_info(bundle.repo_owner.as_ref(), bundle.repo_name.as_ref(), pkg); + let bundle_url = match bundle.r#type.as_deref().unwrap_or_default() { + "github_release" => { + let asset_strs = bundle.asset_strs(pkg, v, os(), arch())?; + self.github_release_asset(&bundle_pkg, v, asset_strs) + .await? + .0 + } + "http" => bundle.url(pkg, v, os(), arch())?, + t => return Err(eyre!("unsupported cosign bundle type: {t}")), + }; + let bundle_path = download_dir.join(get_filename_from_url(&bundle_url)); + HTTP.download_file(&bundle_url, &bundle_path, pr).await?; + + match sigstore_verification::verify_cosign_signature(checksum_path, &bundle_path).await + { + Ok(true) => { + debug!("cosign (bundle) verified"); + Ok(()) + } + Ok(false) => Err(eyre!("cosign bundle-based verification returned false")), + Err(e) => Err(eyre!("cosign bundle-based verification failed: {e}")), + } + } else { + Err(eyre!("cosign detected but no key or bundle configured")) + } + } + + /// Download checksum file to the given directory. + async fn download_checksum_file( + &self, + checksum: &AquaChecksum, + pkg: &AquaPackage, + v: &str, + download_dir: &Path, + pr: Option<&dyn SingleReport>, + ) -> Result { + let url = match checksum._type() { + AquaChecksumType::GithubRelease => { + let asset_strs = checksum.asset_strs(pkg, v, os(), arch())?; + self.github_release_asset(pkg, v, asset_strs).await?.0 + } + AquaChecksumType::Http => checksum.url(pkg, v, os(), arch())?, + }; + let path = download_dir.join(get_filename_from_url(&url)); + HTTP.download_file(&url, &path, pr).await?; + Ok(path) + } + pub fn from_arg(ba: BackendArg) -> Self { let full = ba.full_without_opts(); let mut id = full.split_once(":").unwrap_or(("", &full)).1; @@ -966,24 +1322,6 @@ impl AquaBackend { Ok(checksum_str.to_string()) } - /// Download a URL to a path, or convert a local path string to PathBuf. - /// Returns the path where the file is located. - async fn download_url_to_path( - &self, - url: &str, - download_path: &Path, - ctx: &InstallContext, - ) -> Result { - if url.starts_with("http") { - let path = download_path.join(get_filename_from_url(url)); - HTTP.download_file(url, &path, Some(ctx.pr.as_ref())) - .await?; - Ok(path) - } else { - Ok(PathBuf::from(url)) - } - } - async fn download( &self, ctx: &InstallContext, @@ -1014,12 +1352,19 @@ impl AquaBackend { // by the checksum, so re-verifying attestations would just be redundant API calls. // However, still check that the recorded provenance type's setting is enabled — // disabling a verification setting with a provenance-bearing lockfile is a downgrade. + // + // When locked_verify_provenance is enabled (or paranoid mode is on), always + // re-verify provenance at install time regardless of what the lockfile contains. + // This closes the gap where lock-time detection records provenance from registry + // metadata without cryptographic verification. + let settings = Settings::get(); + let force_verify = settings.force_provenance_verify(); let platform_key = self.get_platform_key(); let has_lockfile_integrity = tv .lock_platforms .get(&platform_key) .is_some_and(|pi| pi.checksum.is_some() && pi.provenance.is_some()); - if has_lockfile_integrity { + if has_lockfile_integrity && !force_verify { self.ensure_provenance_setting_enabled(tv, &platform_key)?; } else { self.verify_provenance(ctx, tv, pkg, v, filename).await?; @@ -1204,41 +1549,16 @@ impl AquaBackend { } ctx.pr.set_message("verify minisign".to_string()); debug!("minisign: {:?}", minisign); - let sig_path = match minisign._type() { - AquaMinisignType::GithubRelease => { - let asset = minisign.asset(pkg, v, os(), arch())?; - let (repo_owner, repo_name) = resolve_repo_info( - minisign.repo_owner.as_ref(), - minisign.repo_name.as_ref(), - pkg, - ); - let url = github::get_release(&format!("{repo_owner}/{repo_name}"), v) - .await? - .assets - .into_iter() - .find(|a| a.name == asset) - .map(|a| a.browser_download_url); - if let Some(url) = url { - let path = tv.download_path().join(asset); - HTTP.download_file(&url, &path, Some(ctx.pr.as_ref())) - .await?; - path - } else { - warn!("no asset found for minisign of {tv}: {asset}"); - return Ok(()); - } - } - AquaMinisignType::Http => { - let url = minisign.url(pkg, v, os(), arch())?; - let path = tv.download_path().join(filename).with_extension(".minisig"); - HTTP.download_file(&url, &path, Some(ctx.pr.as_ref())) - .await?; - path - } - }; - let data = file::read(tv.download_path().join(filename))?; - let sig = file::read_to_string(sig_path)?; - minisign::verify(&minisign.public_key(pkg, v, os(), arch())?, &data, &sig)?; + let artifact_path = tv.download_path().join(filename); + self.run_minisign_check( + &artifact_path, + filename, + pkg, + v, + &tv.download_path(), + Some(ctx.pr.as_ref()), + ) + .await?; // Record minisign provenance if no higher-priority verification already recorded let platform_key = self.get_platform_key(); @@ -1269,95 +1589,26 @@ impl AquaBackend { } ctx.pr.set_message("verify slsa".to_string()); - - // Download the provenance file - let mut slsa_pkg = pkg.clone(); - (slsa_pkg.repo_owner, slsa_pkg.repo_name) = - resolve_repo_info(slsa.repo_owner.as_ref(), slsa.repo_name.as_ref(), pkg); - - let (provenance_path, provenance_download_url) = - match slsa.r#type.as_deref().unwrap_or_default() { - "github_release" => { - let asset_strs = slsa.asset_strs(pkg, v, os(), arch())?; - if asset_strs.is_empty() { - warn!("no asset configured for slsa verification of {tv}"); - return Ok(()); - } - match self.github_release_asset(&slsa_pkg, v, asset_strs).await { - Ok((url, _)) => { - let asset_filename = get_filename_from_url(&url); - let path = tv.download_path().join(asset_filename); - HTTP.download_file(&url, &path, Some(ctx.pr.as_ref())) - .await?; - (path, url) - } - Err(e) => { - warn!("no asset found for slsa verification of {tv}: {e}"); - return Ok(()); - } - } - } - "http" => { - let url = slsa.url(pkg, v, os(), arch())?; - let path = tv.download_path().join(get_filename_from_url(&url)); - HTTP.download_file(&url, &path, Some(ctx.pr.as_ref())) - .await?; - (path, url) - } - t => { - warn!("unsupported slsa type: {t}"); - return Ok(()); - } - }; - let artifact_path = tv.download_path().join(filename); + let provenance_url = self + .run_slsa_check( + &artifact_path, + pkg, + v, + &tv.download_path(), + Some(ctx.pr.as_ref()), + ) + .await?; - // Use native sigstore-verification crate for SLSA verification - // Default to SLSA level 1 (sops provides level 1, newer tools provide level 2+) - let min_level = 1u8; - - match sigstore_verification::verify_slsa_provenance( - &artifact_path, - &provenance_path, - min_level, - ) - .await - { - Ok(true) => { - ctx.pr - .set_message(format!("✓ SLSA provenance verified (level {})", min_level)); - debug!( - "SLSA provenance verified successfully for {tv} at level {}", - min_level - ); - // Record provenance in lockfile only if not already set by a - // higher-priority verification (github-attestations runs first) - let platform_key = self.get_platform_key(); - let pi = tv.lock_platforms.entry(platform_key).or_default(); - if pi.provenance.is_none() { - pi.provenance = Some(ProvenanceType::Slsa { - url: Some(provenance_download_url.clone()), - }); - } - } - Ok(false) => { - return Err(eyre!("SLSA provenance verification failed for {tv}")); - } - Err(e) => { - // Use proper error type matching instead of string matching - match &e { - sigstore_verification::AttestationError::NoAttestations => { - // SLSA verification was explicitly configured but attestations are missing - // This should be treated as a security failure, not a warning - return Err(eyre!( - "SLSA verification failed for {tv}: Package configuration requires SLSA provenance but no attestations found" - )); - } - _ => { - return Err(eyre!("SLSA verification error for {tv}: {e}")); - } - } - } + ctx.pr.set_message("✓ SLSA provenance verified".to_string()); + // Record provenance in lockfile only if not already set by a + // higher-priority verification (github-attestations runs first) + let platform_key = self.get_platform_key(); + let pi = tv.lock_platforms.entry(platform_key).or_default(); + if pi.provenance.is_none() { + pi.provenance = Some(ProvenanceType::Slsa { + url: Some(provenance_url), + }); } } Ok(()) @@ -1386,49 +1637,16 @@ impl AquaBackend { ctx.pr .set_message("verify GitHub artifact attestations".to_string()); - let artifact_path = tv.download_path().join(filename); + self.run_github_attestation_check(&artifact_path, pkg) + .await?; - // Get expected workflow from registry - let signer_workflow = pkg - .github_artifact_attestations - .as_ref() - .and_then(|att| att.signer_workflow.clone()); - - match sigstore_verification::verify_github_attestation( - &artifact_path, - &pkg.repo_owner, - &pkg.repo_name, - env::GITHUB_TOKEN.as_deref(), - signer_workflow.as_deref(), - ) - .await - { - Ok(true) => { - ctx.pr - .set_message("✓ GitHub artifact attestations verified".to_string()); - debug!("GitHub artifact attestations verified successfully for {tv}"); - let platform_key = self.get_platform_key(); - let pi = tv.lock_platforms.entry(platform_key).or_default(); - if pi.provenance.is_none() { - pi.provenance = Some(ProvenanceType::GithubAttestations); - } - } - Ok(false) => { - return Err(eyre!( - "GitHub artifact attestations verification returned false for {tv}" - )); - } - Err(sigstore_verification::AttestationError::NoAttestations) => { - return Err(eyre!( - "No GitHub artifact attestations found for {tv}, but they are expected per aqua registry configuration" - )); - } - Err(e) => { - return Err(eyre!( - "GitHub artifact attestations verification failed for {tv}: {e}" - )); - } + ctx.pr + .set_message("✓ GitHub artifact attestations verified".to_string()); + let platform_key = self.get_platform_key(); + let pi = tv.lock_platforms.entry(platform_key).or_default(); + if pi.provenance.is_none() { + pi.provenance = Some(ProvenanceType::GithubAttestations); } } @@ -1453,168 +1671,26 @@ impl AquaBackend { return Ok(()); } + // Opts-only config (no key or bundle) — nothing to verify natively + if cosign.key.is_none() && cosign.bundle.is_none() { + debug!("cosign for {tv} uses opts-only config, skipping native verification"); + return Ok(()); + } + ctx.pr .set_message("verify checksums with cosign".to_string()); + self.run_cosign_check(checksum_path, pkg, v, download_path, Some(ctx.pr.as_ref())) + .await?; - // Use native sigstore-verification crate - if let Some(key) = &cosign.key { - // Key-based verification - let mut key_pkg = pkg.clone(); - (key_pkg.repo_owner, key_pkg.repo_name) = - resolve_repo_info(key.repo_owner.as_ref(), key.repo_name.as_ref(), pkg); - let key_arg = match key.r#type.as_deref().unwrap_or_default() { - "github_release" => { - let asset_strs = key.asset_strs(pkg, v, os(), arch())?; - if asset_strs.is_empty() { - String::new() - } else { - self.github_release_asset(&key_pkg, v, asset_strs).await?.0 - } - } - "http" => key.url(pkg, v, os(), arch())?, - t => { - warn!( - "unsupported cosign key type for {}/{}: {t}", - pkg.repo_owner, pkg.repo_name - ); - String::new() - } - }; - if !key_arg.is_empty() { - // Download or locate the public key - let key_path = self - .download_url_to_path(&key_arg, download_path, ctx) - .await?; - - // Download signature if specified - let sig_path = if let Some(signature) = &cosign.signature { - let mut sig_pkg = pkg.clone(); - (sig_pkg.repo_owner, sig_pkg.repo_name) = resolve_repo_info( - signature.repo_owner.as_ref(), - signature.repo_name.as_ref(), - pkg, - ); - let sig_arg = match signature.r#type.as_deref().unwrap_or_default() { - "github_release" => { - let asset_strs = signature.asset_strs(pkg, v, os(), arch())?; - if asset_strs.is_empty() { - String::new() - } else { - self.github_release_asset(&sig_pkg, v, asset_strs).await?.0 - } - } - "http" => signature.url(pkg, v, os(), arch())?, - t => { - warn!( - "unsupported cosign signature type for {}/{}: {t}", - pkg.repo_owner, pkg.repo_name - ); - String::new() - } - }; - if !sig_arg.is_empty() { - self.download_url_to_path(&sig_arg, download_path, ctx) - .await? - } else { - // Default signature path - checksum_path.with_extension("sig") - } - } else { - // Default signature path - checksum_path.with_extension("sig") - }; - - // Verify with key - match sigstore_verification::verify_cosign_signature_with_key( - checksum_path, - &sig_path, - &key_path, - ) - .await - { - Ok(true) => { - ctx.pr - .set_message("✓ Cosign signature verified with key".to_string()); - debug!("Cosign signature verified successfully with key for {tv}"); - let platform_key = self.get_platform_key(); - let pi = tv.lock_platforms.entry(platform_key).or_default(); - if pi - .provenance - .as_ref() - .is_none_or(|p| *p < ProvenanceType::Cosign) - { - pi.provenance = Some(ProvenanceType::Cosign); - } - } - Ok(false) => { - return Err(eyre!("Cosign signature verification failed for {tv}")); - } - Err(e) => { - return Err(eyre!("Cosign verification error for {tv}: {e}")); - } - } - } - } else if let Some(bundle) = &cosign.bundle { - // Bundle-based keyless verification - let mut bundle_pkg = pkg.clone(); - (bundle_pkg.repo_owner, bundle_pkg.repo_name) = - resolve_repo_info(bundle.repo_owner.as_ref(), bundle.repo_name.as_ref(), pkg); - let bundle_arg = match bundle.r#type.as_deref().unwrap_or_default() { - "github_release" => { - let asset_strs = bundle.asset_strs(pkg, v, os(), arch())?; - if asset_strs.is_empty() { - String::new() - } else { - self.github_release_asset(&bundle_pkg, v, asset_strs) - .await? - .0 - } - } - "http" => bundle.url(pkg, v, os(), arch())?, - t => { - warn!( - "unsupported cosign bundle type for {}/{}: {t}", - pkg.repo_owner, pkg.repo_name - ); - String::new() - } - }; - if !bundle_arg.is_empty() { - let bundle_path = self - .download_url_to_path(&bundle_arg, download_path, ctx) - .await?; - - // Verify with bundle (keyless) - match sigstore_verification::verify_cosign_signature( - checksum_path, - &bundle_path, - ) - .await - { - Ok(true) => { - ctx.pr - .set_message("✓ Cosign bundle verified (keyless)".to_string()); - debug!("Cosign bundle verified successfully for {tv}"); - let platform_key = self.get_platform_key(); - let pi = tv.lock_platforms.entry(platform_key).or_default(); - if pi - .provenance - .as_ref() - .is_none_or(|p| *p < ProvenanceType::Cosign) - { - pi.provenance = Some(ProvenanceType::Cosign); - } - } - Ok(false) => { - return Err(eyre!("Cosign bundle verification failed for {tv}")); - } - Err(e) => { - return Err(eyre!("Cosign bundle verification error for {tv}: {e}")); - } - } - } - } else { - debug!("cosign for {tv} uses opts-only config, skipping native verification"); + ctx.pr.set_message("✓ Cosign verified".to_string()); + let platform_key = self.get_platform_key(); + let pi = tv.lock_platforms.entry(platform_key).or_default(); + if pi + .provenance + .as_ref() + .is_none_or(|p| *p < ProvenanceType::Cosign) + { + pi.provenance = Some(ProvenanceType::Cosign); } } Ok(()) diff --git a/src/backend/github.rs b/src/backend/github.rs index cc5bd6b228..e884246123 100644 --- a/src/backend/github.rs +++ b/src/backend/github.rs @@ -381,7 +381,7 @@ impl Backend for UnifiedGitBackend { match asset { Ok(asset) => { // Detect provenance availability from release assets and attestation API - let provenance = if !self.is_gitlab() && !self.is_forgejo() { + let mut provenance = if !self.is_gitlab() && !self.is_forgejo() { self.detect_provenance_type( tv, &opts, @@ -395,6 +395,27 @@ impl Backend for UnifiedGitBackend { None }; + // For the current platform, verify provenance cryptographically at lock time. + // This ensures the lockfile's provenance entry is backed by actual verification, + // not just an API query. Cross-platform entries remain detection-only. + if provenance.is_some() && target.is_current() { + match self + .verify_provenance_at_lock_time(tv, &opts, &repo, &api_url, &asset) + .await + { + Ok(verified) => provenance = Some(verified), + Err(e) => { + // Clear provenance so install-time verification will run. + warn!( + "lock-time provenance verification failed for {}, \ + will be verified at install time: {e}", + self.ba.full() + ); + provenance = None; + } + } + } + Ok(PlatformInfo { url: Some(asset.url), url_api: Some(asset.url_api), @@ -503,6 +524,147 @@ impl UnifiedGitBackend { None } + /// Verify provenance at lock time by downloading the artifact to a temp directory + /// and running cryptographic verification. Only called for the current platform + /// during `mise lock`. + async fn verify_provenance_at_lock_time( + &self, + tv: &ToolVersion, + opts: &ToolVersionOptions, + repo: &str, + api_url: &str, + asset: &ReleaseAsset, + ) -> Result { + let tmp_dir = tempfile::tempdir()?; + let filename = get_filename_from_url(&asset.url); + let artifact_path = tmp_dir.path().join(&filename); + + info!( + "downloading artifact for lock-time provenance verification: {}", + filename + ); + + // Use the API URL with appropriate headers for downloading + let download_url = if self.is_gitlab() { + asset.url.clone() + } else { + asset.url_api.clone() + }; + let headers = if self.is_gitlab() { + gitlab::get_headers(&download_url) + } else if self.is_forgejo() { + forgejo::get_headers(&download_url) + } else { + github::get_headers(&download_url) + }; + HTTP.download_file_with_headers(&download_url, &artifact_path, &headers, None) + .await?; + + let settings = Settings::get(); + + // Try GitHub artifact attestations first (highest priority) + if settings.github_attestations && settings.github.github_attestations { + let parts: Vec<&str> = repo.split('/').collect(); + if parts.len() == 2 { + let (owner, repo_name) = (parts[0], parts[1]); + match sigstore_verification::verify_github_attestation( + &artifact_path, + owner, + repo_name, + env::GITHUB_TOKEN.as_deref(), + None, + ) + .await + { + Ok(true) => { + debug!("lock-time GitHub attestations verified for {}", repo); + return Ok(ProvenanceType::GithubAttestations); + } + Ok(false) => { + return Err(eyre::eyre!( + "GitHub artifact attestations verification returned false" + )); + } + Err(sigstore_verification::AttestationError::NoAttestations) => { + debug!("no GitHub attestations found at lock time, trying SLSA"); + } + Err(e) => { + return Err(eyre::eyre!( + "GitHub artifact attestations verification failed: {e}" + )); + } + } + } + } + + // Fall back to SLSA provenance + if settings.slsa && settings.github.slsa { + let version = &tv.version; + let version_prefix = opts.get("version_prefix"); + let release = + try_with_v_prefix_and_repo(version, version_prefix, Some(repo), |candidate| { + let api_url = api_url.to_string(); + let repo = repo.to_string(); + async move { github::get_release_for_url(&api_url, &repo, &candidate).await } + }) + .await?; + + let asset_names: Vec = release.assets.iter().map(|a| a.name.clone()).collect(); + let current_platform = PlatformTarget::from_current(); + let picker = AssetPicker::with_libc( + current_platform.os_name().to_string(), + current_platform.arch_name().to_string(), + current_platform.qualifier().map(|s| s.to_string()), + ); + + if let Some(provenance_name) = picker.pick_best_provenance(&asset_names) { + let provenance_asset = release + .assets + .iter() + .find(|a| a.name == provenance_name) + .expect("provenance asset should exist since we found its name"); + + let provenance_path = tmp_dir.path().join(&provenance_asset.name); + HTTP.download_file( + &provenance_asset.browser_download_url, + &provenance_path, + None, + ) + .await?; + + let provenance_url = provenance_asset.browser_download_url.clone(); + match sigstore_verification::verify_slsa_provenance( + &artifact_path, + &provenance_path, + 1u8, + ) + .await + { + Ok(true) => { + debug!("lock-time SLSA provenance verified for {}", repo); + return Ok(ProvenanceType::Slsa { + url: Some(provenance_url), + }); + } + Ok(false) => { + return Err(eyre::eyre!("SLSA provenance verification failed")); + } + Err(e) => { + if is_slsa_format_issue(&e) { + debug!("SLSA provenance file not in verifiable format: {e}"); + } else { + return Err(eyre::eyre!("SLSA verification error: {e}")); + } + } + } + } + } + + Err(eyre::eyre!( + "provenance was detected but could not be verified at lock time" + )) + } + fn is_gitlab(&self) -> bool { self.ba.backend_type() == BackendType::Gitlab } @@ -630,7 +792,9 @@ impl UnifiedGitBackend { self.verify_checksum(ctx, tv, &file_path)?; - if has_lockfile_integrity { + let settings = Settings::get(); + let force_verify = settings.force_provenance_verify(); + if has_lockfile_integrity && !force_verify { // Still check that the recorded provenance type's setting is enabled — // disabling a verification setting with a provenance-bearing lockfile is a downgrade. self.ensure_provenance_setting_enabled(tv, &platform_key)?; diff --git a/src/config/settings.rs b/src/config/settings.rs index 5fd88be77f..c5cc8a5eee 100644 --- a/src/config/settings.rs +++ b/src/config/settings.rs @@ -540,6 +540,10 @@ impl Settings { self.lockfile.unwrap_or(true) } + pub fn force_provenance_verify(&self) -> bool { + self.locked_verify_provenance || self.paranoid + } + pub fn ensure_experimental(&self, what: &str) -> Result<()> { if !self.experimental { bail!("{what} is experimental. Enable it with `mise settings experimental=true`");