diff --git a/crates/socket-patch-cli/src/commands/apply.rs b/crates/socket-patch-cli/src/commands/apply.rs index 1034b2a..6510d3f 100644 --- a/crates/socket-patch-cli/src/commands/apply.rs +++ b/crates/socket-patch-cli/src/commands/apply.rs @@ -48,6 +48,10 @@ pub struct ApplyArgs { /// Restrict patching to specific ecosystems #[arg(long, value_delimiter = ',')] pub ecosystems: Option>, + + /// Skip pre-application hash verification (apply even if package version differs) + #[arg(short = 'f', long, default_value_t = false)] + pub force: bool, } pub async fn run(args: ApplyArgs) -> i32 { @@ -246,11 +250,13 @@ async fn apply_patches_inner( None => continue, }; - // Check first file hash match - if let Some((file_name, file_info)) = patch.files.iter().next() { - let verify = verify_file_patch(pkg_path, file_name, file_info).await; - if verify.status == socket_patch_core::patch::apply::VerifyStatus::HashMismatch { - continue; + // Check first file hash match (skip when --force) + if !args.force { + if let Some((file_name, file_info)) = patch.files.iter().next() { + let verify = verify_file_patch(pkg_path, file_name, file_info).await; + if verify.status == socket_patch_core::patch::apply::VerifyStatus::HashMismatch { + continue; + } } } @@ -260,6 +266,7 @@ async fn apply_patches_inner( &patch.files, &blobs_path, args.dry_run, + args.force, ) .await; @@ -292,6 +299,7 @@ async fn apply_patches_inner( &patch.files, &blobs_path, args.dry_run, + args.force, ) .await; diff --git a/crates/socket-patch-cli/src/commands/get.rs b/crates/socket-patch-cli/src/commands/get.rs index f48bdbb..342d9e7 100644 --- a/crates/socket-patch-cli/src/commands/get.rs +++ b/crates/socket-patch-cli/src/commands/get.rs @@ -57,8 +57,8 @@ pub struct GetArgs { pub api_token: Option, /// Download patch without applying it - #[arg(long = "no-apply", default_value_t = false)] - pub no_apply: bool, + #[arg(long = "save-only", alias = "no-apply", default_value_t = false)] + pub save_only: bool, /// Apply patch to globally installed npm packages #[arg(short = 'g', long, default_value_t = false)] @@ -110,8 +110,8 @@ pub async fn run(args: GetArgs) -> i32 { eprintln!("Error: Only one of --id, --cve, --ghsa, or --package can be specified"); return 1; } - if args.one_off && args.no_apply { - eprintln!("Error: --one-off and --no-apply cannot be used together"); + if args.one_off && args.save_only { + eprintln!("Error: --one-off and --save-only cannot be used together"); return 1; } @@ -125,11 +125,8 @@ pub async fn run(args: GetArgs) -> i32 { let (api_client, use_public_proxy) = get_api_client_from_env(args.org.as_deref()).await; - let effective_org_slug: Option<&str> = if use_public_proxy { - None - } else { - None // org slug is already stored in the client - }; + // org slug is already stored in the client + let effective_org_slug: Option<&str> = None; // Determine identifier type let id_type = if args.id { @@ -438,8 +435,8 @@ pub async fn run(args: GetArgs) -> i32 { println!(" Failed: {patches_failed}"); } - // Auto-apply unless --no-apply - if !args.no_apply && patches_added > 0 { + // Auto-apply unless --save-only + if !args.save_only && patches_added > 0 { println!("\nApplying patches..."); let apply_args = super::apply::ApplyArgs { cwd: args.cwd.clone(), @@ -450,6 +447,7 @@ pub async fn run(args: GetArgs) -> i32 { global: args.global, global_prefix: args.global_prefix.clone(), ecosystems: None, + force: false, }; let code = super::apply::run(apply_args).await; if code != 0 { @@ -621,7 +619,7 @@ async fn save_and_apply_patch( println!(" Skipped: 1 (already exists)"); } - if !args.no_apply { + if !args.save_only { println!("\nApplying patches..."); let apply_args = super::apply::ApplyArgs { cwd: args.cwd.clone(), @@ -632,6 +630,7 @@ async fn save_and_apply_patch( global: args.global, global_prefix: args.global_prefix.clone(), ecosystems: None, + force: false, }; let code = super::apply::run(apply_args).await; if code != 0 { diff --git a/crates/socket-patch-cli/src/commands/scan.rs b/crates/socket-patch-cli/src/commands/scan.rs index c4d535a..01788f0 100644 --- a/crates/socket-patch-cli/src/commands/scan.rs +++ b/crates/socket-patch-cli/src/commands/scan.rs @@ -53,13 +53,10 @@ pub async fn run(args: ScanArgs) -> i32 { std::env::set_var("SOCKET_API_TOKEN", token); } - let (api_client, use_public_proxy) = get_api_client_from_env(args.org.as_deref()).await; + let (api_client, _use_public_proxy) = get_api_client_from_env(args.org.as_deref()).await; - let effective_org_slug: Option<&str> = if use_public_proxy { - None - } else { - None // org slug is already stored in the client - }; + // org slug is already stored in the client + let effective_org_slug: Option<&str> = None; let crawler_options = CrawlerOptions { cwd: args.cwd.clone(), diff --git a/crates/socket-patch-cli/tests/e2e_npm.rs b/crates/socket-patch-cli/tests/e2e_npm.rs index d03aa39..4bfa39a 100644 --- a/crates/socket-patch-cli/tests/e2e_npm.rs +++ b/crates/socket-patch-cli/tests/e2e_npm.rs @@ -393,6 +393,96 @@ fn test_npm_global_lifecycle() { ); } +/// `get --save-only` should save the patch to the manifest without applying. +#[test] +#[ignore] +fn test_npm_save_only() { + if !has_command("npm") { + eprintln!("SKIP: npm not found on PATH"); + return; + } + + let dir = tempfile::tempdir().unwrap(); + let cwd = dir.path(); + + write_package_json(cwd); + npm_run(cwd, &["install", "minimist@1.2.2"]); + + let index_js = cwd.join("node_modules/minimist/index.js"); + assert_eq!(git_sha256_file(&index_js), BEFORE_HASH); + + // Download with --save-only (new name for --no-apply). + assert_run_ok(cwd, &["get", NPM_UUID, "--save-only"], "get --save-only"); + + // File should still be original. + assert_eq!( + git_sha256_file(&index_js), + BEFORE_HASH, + "file should not change after get --save-only" + ); + + // Manifest should exist with the patch. + let manifest_path = cwd.join(".socket/manifest.json"); + assert!(manifest_path.exists(), "manifest should exist after get --save-only"); + + let manifest: serde_json::Value = + serde_json::from_str(&std::fs::read_to_string(&manifest_path).unwrap()).unwrap(); + let patch = &manifest["patches"][NPM_PURL]; + assert!(patch.is_object(), "manifest should contain {NPM_PURL}"); + assert_eq!(patch["uuid"].as_str().unwrap(), NPM_UUID); + + // Real apply should work. + assert_run_ok(cwd, &["apply"], "apply"); + assert_eq!( + git_sha256_file(&index_js), + AFTER_HASH, + "file should match afterHash after apply" + ); +} + +/// `apply --force` should apply patches even when file hashes don't match. +#[test] +#[ignore] +fn test_npm_apply_force() { + if !has_command("npm") { + eprintln!("SKIP: npm not found on PATH"); + return; + } + + let dir = tempfile::tempdir().unwrap(); + let cwd = dir.path(); + + write_package_json(cwd); + npm_run(cwd, &["install", "minimist@1.2.2"]); + + let index_js = cwd.join("node_modules/minimist/index.js"); + assert_eq!(git_sha256_file(&index_js), BEFORE_HASH); + + // Save the patch without applying. + assert_run_ok(cwd, &["get", NPM_UUID, "--save-only"], "get --save-only"); + + // Corrupt the file to create a hash mismatch (keep same version so PURL matches). + std::fs::write(&index_js, b"// corrupted content\n").unwrap(); + assert_ne!( + git_sha256_file(&index_js), + BEFORE_HASH, + "corrupted file should have a different hash" + ); + + // Normal apply should fail due to hash mismatch. + let (code, _stdout, _stderr) = run(cwd, &["apply"]); + assert_ne!(code, 0, "apply without --force should fail on hash mismatch"); + + // Apply with --force should succeed. + assert_run_ok(cwd, &["apply", "--force"], "apply --force"); + + assert_eq!( + git_sha256_file(&index_js), + AFTER_HASH, + "index.js should match afterHash after apply --force" + ); +} + /// UUID shortcut: `socket-patch ` should behave like `socket-patch get `. #[test] #[ignore] diff --git a/crates/socket-patch-cli/tests/e2e_pypi.rs b/crates/socket-patch-cli/tests/e2e_pypi.rs index a7fad83..8661b7c 100644 --- a/crates/socket-patch-cli/tests/e2e_pypi.rs +++ b/crates/socket-patch-cli/tests/e2e_pypi.rs @@ -517,6 +517,60 @@ fn test_pypi_global_lifecycle() { ); } +/// `get --save-only` should save the patch to the manifest without applying. +#[test] +#[ignore] +fn test_pypi_save_only() { + if !has_python3() { + eprintln!("SKIP: python3 not found on PATH"); + return; + } + + let dir = tempfile::tempdir().unwrap(); + let cwd = dir.path(); + + setup_venv(cwd); + + let site_packages = find_site_packages(cwd); + let messages_py = site_packages.join("pydantic_ai/messages.py"); + assert!(messages_py.exists()); + let original_hash = git_sha256_file(&messages_py); + + // Download with --save-only. + assert_run_ok(cwd, &["get", PYPI_UUID, "--save-only"], "get --save-only"); + + // File should be unchanged. + assert_eq!( + git_sha256_file(&messages_py), + original_hash, + "file should not change after get --save-only" + ); + + // Manifest should exist with the patch. + let manifest_path = cwd.join(".socket/manifest.json"); + assert!(manifest_path.exists(), "manifest should exist after get --save-only"); + + let (purl, _) = read_patch_files(&manifest_path); + assert!( + purl.starts_with(PYPI_PURL_PREFIX), + "manifest should contain a pydantic-ai patch" + ); + + // Real apply should work. + assert_run_ok(cwd, &["apply"], "apply"); + + let (_, files_value) = read_patch_files(&manifest_path); + let files = files_value.as_object().unwrap(); + let after_hash = files["pydantic_ai/messages.py"]["afterHash"] + .as_str() + .unwrap(); + assert_eq!( + git_sha256_file(&messages_py), + after_hash, + "file should match afterHash after apply" + ); +} + /// UUID shortcut: `socket-patch ` should behave like `socket-patch get `. #[test] #[ignore] diff --git a/crates/socket-patch-core/src/crawlers/types.rs b/crates/socket-patch-core/src/crawlers/types.rs index 1eef452..ae9ff01 100644 --- a/crates/socket-patch-core/src/crawlers/types.rs +++ b/crates/socket-patch-core/src/crawlers/types.rs @@ -27,9 +27,9 @@ impl Ecosystem { return Some(Ecosystem::Cargo); } if purl.starts_with("pkg:npm/") { - return Some(Ecosystem::Npm) + Some(Ecosystem::Npm) } else if purl.starts_with("pkg:pypi/") { - return Some(Ecosystem::Pypi) + Some(Ecosystem::Pypi) } else { None } diff --git a/crates/socket-patch-core/src/patch/apply.rs b/crates/socket-patch-core/src/patch/apply.rs index 9bf1256..8f49491 100644 --- a/crates/socket-patch-core/src/patch/apply.rs +++ b/crates/socket-patch-core/src/patch/apply.rs @@ -193,6 +193,7 @@ pub async fn apply_package_patch( files: &HashMap, blobs_path: &Path, dry_run: bool, + force: bool, ) -> ApplyResult { let mut result = ApplyResult { package_key: package_key.to_string(), @@ -205,32 +206,46 @@ pub async fn apply_package_patch( // First, verify all files for (file_name, file_info) in files { - let verify_result = verify_file_patch(pkg_path, file_name, file_info).await; + let mut verify_result = verify_file_patch(pkg_path, file_name, file_info).await; - // If any file is not ready or already patched, we can't proceed if verify_result.status != VerifyStatus::Ready && verify_result.status != VerifyStatus::AlreadyPatched { - let msg = verify_result - .message - .clone() - .unwrap_or_else(|| format!("{:?}", verify_result.status)); - result.error = Some(format!( - "Cannot apply patch: {} - {}", - verify_result.file, msg - )); - result.files_verified.push(verify_result); - return result; + if force { + match verify_result.status { + VerifyStatus::HashMismatch => { + // Force: treat hash mismatch as ready + verify_result.status = VerifyStatus::Ready; + } + VerifyStatus::NotFound => { + // Force: skip files that don't exist (non-new files) + result.files_verified.push(verify_result); + continue; + } + _ => {} + } + } else { + let msg = verify_result + .message + .clone() + .unwrap_or_else(|| format!("{:?}", verify_result.status)); + result.error = Some(format!( + "Cannot apply patch: {} - {}", + verify_result.file, msg + )); + result.files_verified.push(verify_result); + return result; + } } result.files_verified.push(verify_result); } - // Check if all files are already patched + // Check if all files are already patched (or skipped due to NotFound with force) let all_patched = result .files_verified .iter() - .all(|v| v.status == VerifyStatus::AlreadyPatched); + .all(|v| v.status == VerifyStatus::AlreadyPatched || v.status == VerifyStatus::NotFound); if all_patched { result.success = true; return result; @@ -246,7 +261,9 @@ pub async fn apply_package_patch( for (file_name, file_info) in files { let verify_result = result.files_verified.iter().find(|v| v.file == *file_name); if let Some(vr) = verify_result { - if vr.status == VerifyStatus::AlreadyPatched { + if vr.status == VerifyStatus::AlreadyPatched + || vr.status == VerifyStatus::NotFound + { continue; } } @@ -455,7 +472,7 @@ mod tests { ); let result = - apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false) + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false, false) .await; assert!(result.success); @@ -485,7 +502,7 @@ mod tests { ); let result = - apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), true) + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), true, false) .await; assert!(result.success); @@ -518,7 +535,7 @@ mod tests { ); let result = - apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false) + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false, false) .await; assert!(result.success); @@ -544,10 +561,87 @@ mod tests { ); let result = - apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false) + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false, false) .await; assert!(!result.success); assert!(result.error.is_some()); } + + #[tokio::test] + async fn test_apply_package_patch_force_hash_mismatch() { + let pkg_dir = tempfile::tempdir().unwrap(); + let blobs_dir = tempfile::tempdir().unwrap(); + + let patched = b"patched content"; + let after_hash = compute_git_sha256_from_bytes(patched); + + // Write a file whose hash does NOT match before_hash + tokio::fs::write(pkg_dir.path().join("index.js"), b"something unexpected") + .await + .unwrap(); + + // Write blob + tokio::fs::write(blobs_dir.path().join(&after_hash), patched) + .await + .unwrap(); + + let mut files = HashMap::new(); + files.insert( + "index.js".to_string(), + PatchFileInfo { + before_hash: "aaaa".to_string(), + after_hash: after_hash.clone(), + }, + ); + + // Without force: should fail + let result = + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false, false) + .await; + assert!(!result.success); + + // Reset the file + tokio::fs::write(pkg_dir.path().join("index.js"), b"something unexpected") + .await + .unwrap(); + + // With force: should succeed + let result = + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false, true) + .await; + assert!(result.success); + assert_eq!(result.files_patched.len(), 1); + + let written = tokio::fs::read(pkg_dir.path().join("index.js")).await.unwrap(); + assert_eq!(written, patched); + } + + #[tokio::test] + async fn test_apply_package_patch_force_not_found_skips() { + let pkg_dir = tempfile::tempdir().unwrap(); + let blobs_dir = tempfile::tempdir().unwrap(); + + let mut files = HashMap::new(); + files.insert( + "missing.js".to_string(), + PatchFileInfo { + before_hash: "aaaa".to_string(), + after_hash: "bbbb".to_string(), + }, + ); + + // Without force: should fail (NotFound for non-new file) + let result = + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false, false) + .await; + assert!(!result.success); + + // With force: should succeed by skipping the missing file + let result = + apply_package_patch("pkg:npm/test@1.0.0", pkg_dir.path(), &files, blobs_dir.path(), false, true) + .await; + assert!(result.success); + assert_eq!(result.files_patched.len(), 0); + } }