diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..ae482d0
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,90 @@
+name: CI
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+
+permissions:
+ contents: read
+
+jobs:
+ test:
+ strategy:
+ matrix:
+ os: [ubuntu-latest, windows-latest]
+ runs-on: ${{ matrix.os }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
+
+ - name: Install Rust
+ uses: dtolnay/rust-toolchain@efa25f7f19611383d5b0ccf2d1c8914531636bf9 # stable
+ with:
+ toolchain: stable
+ components: clippy
+
+ - name: Cache cargo
+ uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
+ with:
+ path: |
+ ~/.cargo/registry
+ ~/.cargo/git
+ target
+ key: ${{ matrix.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
+ restore-keys: ${{ matrix.os }}-cargo-
+
+ - name: Run clippy
+ run: cargo clippy --workspace -- -D warnings
+
+ - name: Run tests
+ run: cargo test --workspace
+
+ e2e:
+ needs: test
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - os: ubuntu-latest
+ suite: e2e_npm
+ - os: ubuntu-latest
+ suite: e2e_pypi
+ - os: macos-latest
+ suite: e2e_npm
+ - os: macos-latest
+ suite: e2e_pypi
+ runs-on: ${{ matrix.os }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
+
+ - name: Install Rust
+ uses: dtolnay/rust-toolchain@efa25f7f19611383d5b0ccf2d1c8914531636bf9 # stable
+ with:
+ toolchain: stable
+
+ - name: Cache cargo
+ uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
+ with:
+ path: |
+ ~/.cargo/registry
+ ~/.cargo/git
+ target
+ key: ${{ matrix.os }}-cargo-e2e-${{ hashFiles('**/Cargo.lock') }}
+ restore-keys: ${{ matrix.os }}-cargo-e2e-
+
+ - name: Setup Node.js
+ if: matrix.suite == 'e2e_npm'
+ uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
+ with:
+ node-version: 20
+
+ - name: Setup Python
+ if: matrix.suite == 'e2e_pypi'
+ uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
+ with:
+ python-version: "3.12"
+
+ - name: Run e2e tests
+ run: cargo test -p socket-patch-cli --test ${{ matrix.suite }} -- --ignored
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 288efdc..ad5cefd 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -1,4 +1,4 @@
-name: 📦 Publish
+name: Publish
on:
workflow_dispatch:
@@ -11,29 +11,13 @@ on:
- patch
- minor
- major
- dist-tag:
- description: 'npm dist-tag (latest, next, beta, canary, backport, etc.)'
- required: false
- default: 'latest'
- type: string
- debug:
- description: 'Enable debug output'
- required: false
- default: '0'
- type: choice
- options:
- - '0'
- - '1'
permissions:
contents: write
- id-token: write
jobs:
- bump-version:
+ bump-and-tag:
runs-on: ubuntu-latest
- outputs:
- new-tag: ${{ steps.bump.outputs.new-tag }}
steps:
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
@@ -48,23 +32,20 @@ jobs:
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
- - name: Bump version
- id: bump
+ - name: Bump version and sync
run: |
- npm version ${{ inputs.version-bump }} -m "v%s"
- echo "new-tag=$(git describe --tags --abbrev=0)" >> "$GITHUB_OUTPUT"
-
- - name: Push changes
+ CURRENT=$(node -p "require('./npm/socket-patch/package.json').version")
+ VERSION=$(node -e "
+ const [major, minor, patch] = '$CURRENT'.split('.').map(Number);
+ const bump = '${{ inputs.version-bump }}';
+ if (bump === 'major') console.log((major+1)+'.0.0');
+ else if (bump === 'minor') console.log(major+'.'+(minor+1)+'.0');
+ else console.log(major+'.'+minor+'.'+(patch+1));
+ ")
+ bash scripts/version-sync.sh "$VERSION"
+ git add Cargo.toml npm/
+ git commit -m "v$VERSION"
+ git tag "v$VERSION"
+
+ - name: Push changes and tag
run: git push && git push --tags
-
- publish:
- needs: bump-version
- uses: SocketDev/socket-registry/.github/workflows/provenance.yml@main
- with:
- debug: ${{ inputs.debug }}
- dist-tag: ${{ inputs.dist-tag }}
- package-name: '@socketsecurity/socket-patch'
- publish-script: 'publish:ci'
- ref: ${{ needs.bump-version.outputs.new-tag }}
- setup-script: 'pnpm run build'
- use-trusted-publishing: true
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..90f44a1
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,177 @@
+name: Release
+
+on:
+ push:
+ tags:
+ - 'v*'
+
+permissions:
+ contents: write
+ id-token: write
+
+jobs:
+ build:
+ strategy:
+ matrix:
+ include:
+ - target: aarch64-apple-darwin
+ runner: macos-14
+ archive: tar.gz
+ build-tool: cargo
+ - target: x86_64-apple-darwin
+ runner: macos-13
+ archive: tar.gz
+ build-tool: cargo
+ - target: x86_64-unknown-linux-musl
+ runner: ubuntu-latest
+ archive: tar.gz
+ build-tool: cross
+ - target: aarch64-unknown-linux-gnu
+ runner: ubuntu-latest
+ archive: tar.gz
+ build-tool: cross
+ - target: x86_64-pc-windows-msvc
+ runner: windows-latest
+ archive: zip
+ build-tool: cargo
+ runs-on: ${{ matrix.runner }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
+
+ - name: Install Rust
+ uses: dtolnay/rust-toolchain@efa25f7f19611383d5b0ccf2d1c8914531636bf9 # stable
+ with:
+ toolchain: stable
+ targets: ${{ matrix.target }}
+
+ - name: Install cross
+ if: matrix.build-tool == 'cross'
+ run: cargo install cross --git https://github.com/cross-rs/cross
+
+ - name: Build (cargo)
+ if: matrix.build-tool == 'cargo'
+ run: cargo build --release --target ${{ matrix.target }}
+
+ - name: Build (cross)
+ if: matrix.build-tool == 'cross'
+ run: cross build --release --target ${{ matrix.target }}
+
+ - name: Package (unix)
+ if: matrix.archive == 'tar.gz'
+ run: |
+ cd target/${{ matrix.target }}/release
+ tar czf ../../../socket-patch-${{ matrix.target }}.tar.gz socket-patch
+ cd ../../..
+
+ - name: Package (windows)
+ if: matrix.archive == 'zip'
+ shell: pwsh
+ run: |
+ Compress-Archive -Path "target/${{ matrix.target }}/release/socket-patch.exe" -DestinationPath "socket-patch-${{ matrix.target }}.zip"
+
+ - name: Upload artifact (tar.gz)
+ if: matrix.archive == 'tar.gz'
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
+ with:
+ name: socket-patch-${{ matrix.target }}
+ path: socket-patch-${{ matrix.target }}.tar.gz
+
+ - name: Upload artifact (zip)
+ if: matrix.archive == 'zip'
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
+ with:
+ name: socket-patch-${{ matrix.target }}
+ path: socket-patch-${{ matrix.target }}.zip
+
+ github-release:
+ needs: build
+ runs-on: ubuntu-latest
+ steps:
+ - name: Download all artifacts
+ uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
+ with:
+ path: artifacts
+ merge-multiple: true
+
+ - name: Create GitHub Release
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ TAG="${GITHUB_REF_NAME}"
+ gh release create "$TAG" \
+ --repo "$GITHUB_REPOSITORY" \
+ --generate-notes \
+ artifacts/*
+
+ cargo-publish:
+ needs: build
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ id-token: write
+ steps:
+ - name: Checkout
+ uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
+
+ - name: Install Rust
+ uses: dtolnay/rust-toolchain@efa25f7f19611383d5b0ccf2d1c8914531636bf9 # stable
+ with:
+ toolchain: stable
+
+ - name: Authenticate with crates.io
+ uses: rust-lang/crates-io-auth-action@b7e9a28eded4986ec6b1fa40eeee8f8f165559ec # v1.0.3
+
+ - name: Publish socket-patch-core
+ run: cargo publish -p socket-patch-core
+
+ - name: Wait for crates.io index update
+ run: sleep 30
+
+ - name: Publish socket-patch-cli
+ run: cargo publish -p socket-patch-cli
+
+ npm-publish:
+ needs: build
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
+
+ - name: Download all artifacts
+ uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
+ with:
+ path: artifacts
+ merge-multiple: true
+
+ - name: Setup Node.js
+ uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
+ with:
+ node-version: '20'
+ registry-url: 'https://registry.npmjs.org'
+
+ - name: Extract version and sync
+ run: |
+ VERSION="${GITHUB_REF_NAME#v}"
+ echo "VERSION=$VERSION" >> "$GITHUB_ENV"
+ bash scripts/version-sync.sh "$VERSION"
+
+ - name: Stage binaries
+ run: |
+ mkdir -p npm/socket-patch/bin
+ tar xzf artifacts/socket-patch-aarch64-apple-darwin.tar.gz -C npm/socket-patch/bin/
+ mv npm/socket-patch/bin/socket-patch npm/socket-patch/bin/socket-patch-darwin-arm64
+ tar xzf artifacts/socket-patch-x86_64-apple-darwin.tar.gz -C npm/socket-patch/bin/
+ mv npm/socket-patch/bin/socket-patch npm/socket-patch/bin/socket-patch-darwin-x64
+ tar xzf artifacts/socket-patch-x86_64-unknown-linux-musl.tar.gz -C npm/socket-patch/bin/
+ mv npm/socket-patch/bin/socket-patch npm/socket-patch/bin/socket-patch-linux-x64
+ tar xzf artifacts/socket-patch-aarch64-unknown-linux-gnu.tar.gz -C npm/socket-patch/bin/
+ mv npm/socket-patch/bin/socket-patch npm/socket-patch/bin/socket-patch-linux-arm64
+ cd npm/socket-patch/bin
+ unzip ../../../artifacts/socket-patch-x86_64-pc-windows-msvc.zip
+ mv socket-patch.exe socket-patch-win32-x64.exe
+
+ - name: Publish package
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+ run: npm publish npm/socket-patch --provenance --access public
diff --git a/.gitignore b/.gitignore
index 9a5aced..c2fe737 100644
--- a/.gitignore
+++ b/.gitignore
@@ -137,3 +137,9 @@ dist
# Vite logs files
vite.config.js.timestamp-*
vite.config.ts.timestamp-*
+
+# Rust
+target/
+
+# npm binaries (populated at publish time)
+npm/socket-patch/bin/socket-patch-*
diff --git a/.npmignore b/.npmignore
deleted file mode 100644
index ae7dfe3..0000000
--- a/.npmignore
+++ /dev/null
@@ -1,33 +0,0 @@
-# Source files (dist is published instead)
-src/
-
-# GitHub workflows and configs
-.github/
-
-# Linting and formatting configs
-.oxlintrc.json
-biome.json
-
-# TypeScript config
-tsconfig.json
-
-# Lock files
-pnpm-lock.yaml
-
-# Git files
-.gitignore
-
-# Documentation (except README and LICENSE which npm includes by default)
-EDGE_CASES.md
-
-# Build artifacts
-*.tsbuildinfo
-
-# Environment files
-.env*
-!.env.example
-
-# Test files
-*.test.ts
-*.test.js
-__tests__/
diff --git a/.oxlintrc.json b/.oxlintrc.json
deleted file mode 100644
index 3415f15..0000000
--- a/.oxlintrc.json
+++ /dev/null
@@ -1,64 +0,0 @@
-{
- "$schema": "./node_modules/oxlint/configuration_schema.json",
- "ignorePatterns": ["**/dist/", "**/node_modules/", "**/.git/"],
- "plugins": ["typescript", "oxc", "promise", "import"],
- "categories": {
- "correctness": "warn",
- "perf": "warn",
- "suspicious": "warn"
- },
- "rules": {
- "no-unused-vars": "allow",
- "no-new-array": "allow",
- "no-empty-file": "allow",
- "no-await-in-loop": "allow",
- "consistent-function-scoping": "allow",
- "no-new": "allow",
- "no-extraneous-class": "allow",
- "no-array-index-key": "allow",
- "no-unsafe-optional-chaining": "allow",
- "no-promise-in-callback": "allow",
- "no-callback-in-promise": "allow",
- "consistent-type-imports": "deny",
- "no-empty-named-blocks": "allow",
- "no-unnecessary-parameter-property-assignment": "allow",
- "no-unneeded-ternary": "allow",
- "no-eq-null": "allow",
- "max-lines-per-function": "allow",
- "max-depth": "allow",
- "no-magic-numbers": "allow",
- "no-unassigned-import": "allow",
- "promise/always-return": "allow",
- "no-unassigned-vars": "deny",
- "typescript/no-floating-promises": "deny",
- "typescript/no-misused-promises": "deny",
- "typescript/return-await": "allow",
- "typescript/await-thenable": "allow",
- "typescript/consistent-type-imports": "allow",
- "typescript/no-base-to-string": "allow",
- "typescript/no-duplicate-type-constituents": "allow",
- "typescript/no-for-in-array": "allow",
- "typescript/no-meaningless-void-operator": "allow",
- "typescript/no-misused-spread": "allow",
- "typescript/no-redundant-type-constituents": "allow",
- "typescript/no-unnecessary-boolean-literal-compare": "allow",
- "typescript/no-unnecessary-template-expression": "allow",
- "typescript/no-unnecessary-type-arguments": "allow",
- "typescript/no-unnecessary-type-assertion": "allow",
- "typescript/no-unsafe-enum-comparison": "allow",
- "typescript/no-unsafe-type-assertion": "allow",
- "typescript/require-array-sort-compare": "allow",
- "typescript/restrict-template-expressions": "allow",
- "typescript/triple-slash-reference": "allow",
- "typescript/unbound-method": "allow"
- },
- "overrides": [
- {
- "files": ["**/*.test.ts", "**/*.test.js", "**/*.spec.ts", "**/*.spec.js"],
- "rules": {
- "typescript/no-floating-promises": "allow",
- "typescript/no-misused-promises": "allow"
- }
- }
- ]
-}
diff --git a/Cargo.lock b/Cargo.lock
new file mode 100644
index 0000000..4a4482a
--- /dev/null
+++ b/Cargo.lock
@@ -0,0 +1,2117 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 4
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "anstream"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "3.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
+dependencies = [
+ "anstyle",
+ "once_cell_polyfill",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.102"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
+
+[[package]]
+name = "atomic-waker"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
+
+[[package]]
+name = "base64"
+version = "0.22.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
+
+[[package]]
+name = "bitflags"
+version = "2.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
+
+[[package]]
+name = "block-buffer"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb"
+
+[[package]]
+name = "bytes"
+version = "1.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
+
+[[package]]
+name = "cc"
+version = "1.2.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2"
+dependencies = [
+ "find-msvc-tools",
+ "shlex",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
+
+[[package]]
+name = "cfg_aliases"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
+
+[[package]]
+name = "clap"
+version = "4.5.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a"
+dependencies = [
+ "clap_builder",
+ "clap_derive",
+]
+
+[[package]]
+name = "clap_builder"
+version = "4.5.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "clap_lex",
+ "strsim",
+]
+
+[[package]]
+name = "clap_derive"
+version = "4.5.55"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "clap_lex"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831"
+
+[[package]]
+name = "colorchoice"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
+
+[[package]]
+name = "console"
+version = "0.15.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
+dependencies = [
+ "encode_unicode",
+ "libc",
+ "once_cell",
+ "unicode-width",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "dialoguer"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de"
+dependencies = [
+ "console",
+ "shell-words",
+ "tempfile",
+ "thiserror 1.0.69",
+ "zeroize",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+dependencies = [
+ "block-buffer",
+ "crypto-common",
+]
+
+[[package]]
+name = "displaydoc"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "encode_unicode"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
+
+[[package]]
+name = "equivalent"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
+
+[[package]]
+name = "errno"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
+dependencies = [
+ "libc",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "fastrand"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
+
+[[package]]
+name = "find-msvc-tools"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
+
+[[package]]
+name = "foldhash"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
+dependencies = [
+ "futures-core",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
+
+[[package]]
+name = "futures-task"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
+
+[[package]]
+name = "futures-util"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "pin-project-lite",
+ "slab",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "wasi",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "r-efi 5.3.0",
+ "wasip2",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "r-efi 6.0.0",
+ "wasip2",
+ "wasip3",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.15.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
+dependencies = [
+ "foldhash",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.16.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
+
+[[package]]
+name = "heck"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
+[[package]]
+name = "http"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
+dependencies = [
+ "bytes",
+ "itoa",
+]
+
+[[package]]
+name = "http-body"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
+dependencies = [
+ "bytes",
+ "http",
+]
+
+[[package]]
+name = "http-body-util"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http",
+ "http-body",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "httparse"
+version = "1.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
+
+[[package]]
+name = "hyper"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "http",
+ "http-body",
+ "httparse",
+ "itoa",
+ "pin-project-lite",
+ "pin-utils",
+ "smallvec",
+ "tokio",
+ "want",
+]
+
+[[package]]
+name = "hyper-rustls"
+version = "0.27.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
+dependencies = [
+ "http",
+ "hyper",
+ "hyper-util",
+ "rustls",
+ "rustls-pki-types",
+ "tokio",
+ "tokio-rustls",
+ "tower-service",
+ "webpki-roots",
+]
+
+[[package]]
+name = "hyper-util"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
+dependencies = [
+ "base64",
+ "bytes",
+ "futures-channel",
+ "futures-util",
+ "http",
+ "http-body",
+ "hyper",
+ "ipnet",
+ "libc",
+ "percent-encoding",
+ "pin-project-lite",
+ "socket2",
+ "tokio",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "icu_collections"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
+dependencies = [
+ "displaydoc",
+ "potential_utf",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locale_core"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
+dependencies = [
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
+
+[[package]]
+name = "icu_properties"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec"
+dependencies = [
+ "icu_collections",
+ "icu_locale_core",
+ "icu_properties_data",
+ "icu_provider",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af"
+
+[[package]]
+name = "icu_provider"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
+dependencies = [
+ "displaydoc",
+ "icu_locale_core",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "id-arena"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954"
+
+[[package]]
+name = "idna"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
+dependencies = [
+ "idna_adapter",
+ "smallvec",
+ "utf8_iter",
+]
+
+[[package]]
+name = "idna_adapter"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
+dependencies = [
+ "icu_normalizer",
+ "icu_properties",
+]
+
+[[package]]
+name = "indexmap"
+version = "2.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
+dependencies = [
+ "equivalent",
+ "hashbrown 0.16.1",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "indicatif"
+version = "0.17.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
+dependencies = [
+ "console",
+ "number_prefix",
+ "portable-atomic",
+ "unicode-width",
+ "web-time",
+]
+
+[[package]]
+name = "ipnet"
+version = "2.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2"
+
+[[package]]
+name = "iri-string"
+version = "0.7.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a"
+dependencies = [
+ "memchr",
+ "serde",
+]
+
+[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
+
+[[package]]
+name = "itoa"
+version = "1.0.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
+
+[[package]]
+name = "js-sys"
+version = "0.3.91"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c"
+dependencies = [
+ "once_cell",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "leb128fmt"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
+
+[[package]]
+name = "libc"
+version = "0.2.182"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53"
+
+[[package]]
+name = "litemap"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
+
+[[package]]
+name = "lock_api"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
+dependencies = [
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
+
+[[package]]
+name = "lru-slab"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
+
+[[package]]
+name = "memchr"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
+
+[[package]]
+name = "mio"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
+dependencies = [
+ "libc",
+ "wasi",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "number_prefix"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
+
+[[package]]
+name = "once_cell"
+version = "1.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
+
+[[package]]
+name = "once_cell_polyfill"
+version = "1.70.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
+
+[[package]]
+name = "parking_lot"
+version = "0.12.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-link",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "portable-atomic"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
+
+[[package]]
+name = "potential_utf"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
+dependencies = [
+ "zerovec",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "prettyplease"
+version = "0.2.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
+dependencies = [
+ "proc-macro2",
+ "syn",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.106"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quinn"
+version = "0.11.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
+dependencies = [
+ "bytes",
+ "cfg_aliases",
+ "pin-project-lite",
+ "quinn-proto",
+ "quinn-udp",
+ "rustc-hash",
+ "rustls",
+ "socket2",
+ "thiserror 2.0.18",
+ "tokio",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-proto"
+version = "0.11.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
+dependencies = [
+ "bytes",
+ "getrandom 0.3.4",
+ "lru-slab",
+ "rand",
+ "ring",
+ "rustc-hash",
+ "rustls",
+ "rustls-pki-types",
+ "slab",
+ "thiserror 2.0.18",
+ "tinyvec",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-udp"
+version = "0.5.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
+dependencies = [
+ "cfg_aliases",
+ "libc",
+ "once_cell",
+ "socket2",
+ "tracing",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "r-efi"
+version = "5.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
+
+[[package]]
+name = "r-efi"
+version = "6.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf"
+
+[[package]]
+name = "rand"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
+dependencies = [
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c"
+dependencies = [
+ "getrandom 0.3.4",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.5.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex"
+version = "1.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a"
+
+[[package]]
+name = "reqwest"
+version = "0.12.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147"
+dependencies = [
+ "base64",
+ "bytes",
+ "futures-core",
+ "http",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-rustls",
+ "hyper-util",
+ "js-sys",
+ "log",
+ "percent-encoding",
+ "pin-project-lite",
+ "quinn",
+ "rustls",
+ "rustls-pki-types",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "sync_wrapper",
+ "tokio",
+ "tokio-rustls",
+ "tower",
+ "tower-http",
+ "tower-service",
+ "url",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+ "webpki-roots",
+]
+
+[[package]]
+name = "ring"
+version = "0.17.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "getrandom 0.2.17",
+ "libc",
+ "untrusted",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "rustc-hash"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
+
+[[package]]
+name = "rustix"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
+dependencies = [
+ "bitflags",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "rustls"
+version = "0.23.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4"
+dependencies = [
+ "once_cell",
+ "ring",
+ "rustls-pki-types",
+ "rustls-webpki",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-pki-types"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
+dependencies = [
+ "web-time",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-webpki"
+version = "0.103.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
+dependencies = [
+ "ring",
+ "rustls-pki-types",
+ "untrusted",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
+
+[[package]]
+name = "ryu"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
+name = "semver"
+version = "1.0.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
+
+[[package]]
+name = "serde"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
+dependencies = [
+ "serde_core",
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.149"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
+dependencies = [
+ "itoa",
+ "memchr",
+ "serde",
+ "serde_core",
+ "zmij",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha2"
+version = "0.10.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "shell-words"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc6fe69c597f9c37bfeeeeeb33da3530379845f10be461a66d16d03eca2ded77"
+
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
+name = "signal-hook-registry"
+version = "1.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b"
+dependencies = [
+ "errno",
+ "libc",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5"
+
+[[package]]
+name = "smallvec"
+version = "1.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
+
+[[package]]
+name = "socket-patch-cli"
+version = "1.2.0"
+dependencies = [
+ "clap",
+ "dialoguer",
+ "hex",
+ "indicatif",
+ "regex",
+ "serde",
+ "serde_json",
+ "sha2",
+ "socket-patch-core",
+ "tempfile",
+ "tokio",
+ "uuid",
+]
+
+[[package]]
+name = "socket-patch-core"
+version = "1.2.0"
+dependencies = [
+ "hex",
+ "once_cell",
+ "regex",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "sha2",
+ "tempfile",
+ "thiserror 2.0.18",
+ "tokio",
+ "uuid",
+ "walkdir",
+]
+
+[[package]]
+name = "socket2"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0"
+dependencies = [
+ "libc",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
+
+[[package]]
+name = "strsim"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
+
+[[package]]
+name = "subtle"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
+
+[[package]]
+name = "syn"
+version = "2.0.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "sync_wrapper"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
+dependencies = [
+ "futures-core",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.26.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0"
+dependencies = [
+ "fastrand",
+ "getrandom 0.4.2",
+ "once_cell",
+ "rustix",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
+dependencies = [
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4"
+dependencies = [
+ "thiserror-impl 2.0.18",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "2.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tinystr"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
+dependencies = [
+ "displaydoc",
+ "zerovec",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
+[[package]]
+name = "tokio"
+version = "1.50.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d"
+dependencies = [
+ "bytes",
+ "libc",
+ "mio",
+ "parking_lot",
+ "pin-project-lite",
+ "signal-hook-registry",
+ "socket2",
+ "tokio-macros",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tokio-rustls"
+version = "0.26.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61"
+dependencies = [
+ "rustls",
+ "tokio",
+]
+
+[[package]]
+name = "tower"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project-lite",
+ "sync_wrapper",
+ "tokio",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-http"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
+dependencies = [
+ "bitflags",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "iri-string",
+ "pin-project-lite",
+ "tower",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
+
+[[package]]
+name = "tower-service"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
+
+[[package]]
+name = "tracing"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
+dependencies = [
+ "pin-project-lite",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "try-lock"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+
+[[package]]
+name = "typenum"
+version = "1.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
+
+[[package]]
+name = "unicode-width"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
+
+[[package]]
+name = "untrusted"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
+
+[[package]]
+name = "url"
+version = "2.5.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "utf8_iter"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
+
+[[package]]
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
+[[package]]
+name = "uuid"
+version = "1.21.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb"
+dependencies = [
+ "getrandom 0.4.2",
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "version_check"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
+
+[[package]]
+name = "walkdir"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "want"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
+dependencies = [
+ "try-lock",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.1+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
+
+[[package]]
+name = "wasip2"
+version = "1.0.2+wasi-0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5"
+dependencies = [
+ "wit-bindgen",
+]
+
+[[package]]
+name = "wasip3"
+version = "0.4.0+wasi-0.3.0-rc-2026-01-06"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5"
+dependencies = [
+ "wit-bindgen",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "rustversion",
+ "wasm-bindgen-macro",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.64"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8"
+dependencies = [
+ "cfg-if",
+ "futures-util",
+ "js-sys",
+ "once_cell",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3"
+dependencies = [
+ "bumpalo",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "wasm-encoder"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319"
+dependencies = [
+ "leb128fmt",
+ "wasmparser",
+]
+
+[[package]]
+name = "wasm-metadata"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
+dependencies = [
+ "anyhow",
+ "indexmap",
+ "wasm-encoder",
+ "wasmparser",
+]
+
+[[package]]
+name = "wasmparser"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
+dependencies = [
+ "bitflags",
+ "hashbrown 0.15.5",
+ "indexmap",
+ "semver",
+]
+
+[[package]]
+name = "web-sys"
+version = "0.3.91"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "web-time"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed"
+dependencies = [
+ "rustls-pki-types",
+]
+
+[[package]]
+name = "winapi-util"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "windows-link"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets 0.53.5",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.61.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm 0.52.6",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
+dependencies = [
+ "windows-link",
+ "windows_aarch64_gnullvm 0.53.1",
+ "windows_aarch64_msvc 0.53.1",
+ "windows_i686_gnu 0.53.1",
+ "windows_i686_gnullvm 0.53.1",
+ "windows_i686_msvc 0.53.1",
+ "windows_x86_64_gnu 0.53.1",
+ "windows_x86_64_gnullvm 0.53.1",
+ "windows_x86_64_msvc 0.53.1",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
+
+[[package]]
+name = "wit-bindgen"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
+dependencies = [
+ "wit-bindgen-rust-macro",
+]
+
+[[package]]
+name = "wit-bindgen-core"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc"
+dependencies = [
+ "anyhow",
+ "heck",
+ "wit-parser",
+]
+
+[[package]]
+name = "wit-bindgen-rust"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
+dependencies = [
+ "anyhow",
+ "heck",
+ "indexmap",
+ "prettyplease",
+ "syn",
+ "wasm-metadata",
+ "wit-bindgen-core",
+ "wit-component",
+]
+
+[[package]]
+name = "wit-bindgen-rust-macro"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a"
+dependencies = [
+ "anyhow",
+ "prettyplease",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wit-bindgen-core",
+ "wit-bindgen-rust",
+]
+
+[[package]]
+name = "wit-component"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
+dependencies = [
+ "anyhow",
+ "bitflags",
+ "indexmap",
+ "log",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "wasm-encoder",
+ "wasm-metadata",
+ "wasmparser",
+ "wit-parser",
+]
+
+[[package]]
+name = "wit-parser"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
+dependencies = [
+ "anyhow",
+ "id-arena",
+ "indexmap",
+ "log",
+ "semver",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "unicode-xid",
+ "wasmparser",
+]
+
+[[package]]
+name = "writeable"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
+
+[[package]]
+name = "yoke"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
+dependencies = [
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
+]
+
+[[package]]
+name = "yoke-derive"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zerocopy"
+version = "0.8.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5"
+dependencies = [
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.8.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "zerofrom"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
+dependencies = [
+ "zerofrom-derive",
+]
+
+[[package]]
+name = "zerofrom-derive"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zeroize"
+version = "1.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
+
+[[package]]
+name = "zerotrie"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+]
+
+[[package]]
+name = "zerovec"
+version = "0.11.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
+
+[[package]]
+name = "zerovec-derive"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "zmij"
+version = "1.0.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa"
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..2abc6a9
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,32 @@
+[workspace]
+members = ["crates/socket-patch-core", "crates/socket-patch-cli"]
+resolver = "2"
+
+[workspace.package]
+version = "1.2.0"
+edition = "2021"
+license = "MIT"
+repository = "https://github.com/SocketDev/socket-patch"
+
+[workspace.dependencies]
+socket-patch-core = { path = "crates/socket-patch-core", version = "1.2.0" }
+clap = { version = "4", features = ["derive"] }
+serde = { version = "1", features = ["derive"] }
+serde_json = "1"
+sha2 = "0.10"
+hex = "0.4"
+reqwest = { version = "0.12", features = ["rustls-tls", "json"], default-features = false }
+tokio = { version = "1", features = ["full"] }
+thiserror = "2"
+walkdir = "2"
+uuid = { version = "1", features = ["v4"] }
+dialoguer = "0.11"
+indicatif = "0.17"
+tempfile = "3"
+regex = "1"
+once_cell = "1"
+
+[profile.release]
+strip = true
+lto = true
+opt-level = "s"
diff --git a/EDGE_CASES.md b/EDGE_CASES.md
deleted file mode 100644
index 4bda7ea..0000000
--- a/EDGE_CASES.md
+++ /dev/null
@@ -1,464 +0,0 @@
-# Socket-Patch Setup Command: Edge Case Analysis
-
-This document provides a comprehensive analysis of all edge cases handled by the `socket-patch setup` command.
-
-## Detection Logic
-
-The setup command detects if a postinstall script is already configured by checking if the string contains `'socket-patch apply'`. This substring match is intentionally lenient to recognize various valid formats.
-
-## Edge Cases
-
-### 1. No scripts field at all
-
-**Input:**
-```json
-{
- "name": "test",
- "version": "1.0.0"
-}
-```
-
-**Behavior:** ✅ Creates scripts field and adds postinstall
-
-**Output:**
-```json
-{
- "name": "test",
- "version": "1.0.0",
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply"
- }
-}
-```
-
----
-
-### 2. Scripts field exists but no postinstall
-
-**Input:**
-```json
-{
- "scripts": {
- "test": "jest",
- "build": "tsc"
- }
-}
-```
-
-**Behavior:** ✅ Adds postinstall to existing scripts object
-
-**Output:**
-```json
-{
- "scripts": {
- "test": "jest",
- "build": "tsc",
- "postinstall": "npx @socketsecurity/socket-patch apply"
- }
-}
-```
-
----
-
-### 2a. Postinstall is null
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": null
- }
-}
-```
-
-**Behavior:** ✅ Treats as missing, adds socket-patch command
-
-**Output:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply"
- }
-}
-```
-
----
-
-### 2b. Postinstall is empty string
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": ""
- }
-}
-```
-
-**Behavior:** ✅ Replaces empty string with socket-patch command
-
-**Output:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply"
- }
-}
-```
-
----
-
-### 2c. Postinstall is whitespace only
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": " \n\t "
- }
-}
-```
-
-**Behavior:** ✅ Treats as empty, adds socket-patch command
-
-**Output:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply"
- }
-}
-```
-
----
-
-### 3. Postinstall exists but missing socket-patch setup
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "echo 'Running postinstall tasks'"
- }
-}
-```
-
-**Behavior:** ✅ Prepends socket-patch before existing script
-
-**Output:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply && echo 'Running postinstall tasks'"
- }
-}
-```
-
-**Rationale:** Socket-patch runs first to apply security patches before other setup tasks. Uses `&&` to ensure existing script only runs if patching succeeds.
-
----
-
-### 4a. socket-patch apply without npx
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "socket-patch apply"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Valid if socket-patch is installed as a dependency. The substring `'socket-patch apply'` is present.
-
----
-
-### 4b. npx socket-patch apply (without @socketsecurity/)
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "npx socket-patch apply"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Valid format. The substring `'socket-patch apply'` is present.
-
----
-
-### 4c. Canonical format: npx @socketsecurity/socket-patch apply
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** This is the recommended canonical format.
-
----
-
-### 4d. pnpm socket-patch apply
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "pnpm socket-patch apply"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Valid format for pnpm users. The substring `'socket-patch apply'` is present.
-
----
-
-### 4e. yarn socket-patch apply
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "yarn socket-patch apply"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Valid format for yarn users. The substring `'socket-patch apply'` is present.
-
----
-
-### 4f. node_modules/.bin/socket-patch apply (direct path)
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "node_modules/.bin/socket-patch apply"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Valid format using direct path. The substring `'socket-patch apply'` is present.
-
----
-
-### 4g. socket apply (main Socket CLI - DIFFERENT command)
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "socket apply"
- }
-}
-```
-
-**Behavior:** ⚠️ NOT recognized as configured, adds socket-patch
-
-**Output:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply && socket apply"
- }
-}
-```
-
-**Rationale:** `socket apply` is a DIFFERENT command from the main Socket CLI. The substring `'socket-patch apply'` is NOT present. Socket-patch should be added separately.
-
----
-
-### 4h. socket-patch list (wrong subcommand)
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "socket-patch list"
- }
-}
-```
-
-**Behavior:** ⚠️ NOT recognized as configured, adds socket-patch apply
-
-**Output:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply && socket-patch list"
- }
-}
-```
-
-**Rationale:** `socket-patch list` is a different subcommand. The substring `'socket-patch apply'` is NOT present (missing "apply"). Socket-patch apply should be added.
-
----
-
-### 4i. socket-patch apply with flags
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "npx @socketsecurity/socket-patch apply --silent"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Valid format with flags. The substring `'socket-patch apply'` is present.
-
----
-
-### 4j. socket-patch apply in middle of script chain
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "echo start && socket-patch apply && echo done"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Socket-patch is already in the chain. The substring `'socket-patch apply'` is present.
-
----
-
-### 4k. socket-patch apply at end of chain
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": "npm run prepare && socket-patch apply"
- }
-}
-```
-
-**Behavior:** ✅ Recognized as configured, no changes
-
-**Rationale:** Socket-patch is already present. The substring `'socket-patch apply'` is present.
-
-**Note:** While this is recognized, it's not ideal since patches won't be applied before the prepare script runs. However, we don't modify it to avoid breaking existing setups.
-
----
-
-### 5. Postinstall with invalid data types
-
-#### 5a. Number instead of string
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": 123
- }
-}
-```
-
-**Behavior:** ✅ Treated as not configured, adds socket-patch
-
-**Rationale:** Invalid type is coerced or ignored. Setup adds proper string command.
-
-#### 5b. Array instead of string
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": ["echo", "hello"]
- }
-}
-```
-
-**Behavior:** ✅ Treated as not configured, adds socket-patch
-
-**Rationale:** Invalid type. Setup adds proper string command.
-
-#### 5c. Object instead of string
-
-**Input:**
-```json
-{
- "scripts": {
- "postinstall": { "command": "echo hello" }
- }
-}
-```
-
-**Behavior:** ✅ Treated as not configured, adds socket-patch
-
-**Rationale:** Invalid type. Setup adds proper string command.
-
----
-
-### 6. Malformed JSON
-
-**Input:**
-```
-{ name: "test", invalid json }
-```
-
-**Behavior:** ❌ Throws error: "Invalid package.json: failed to parse JSON"
-
-**Rationale:** Cannot process malformed JSON. User must fix the JSON first.
-
----
-
-## Summary Table
-
-| Scenario | Contains `'socket-patch apply'`? | Behavior |
-|----------|----------------------------------|----------|
-| No scripts field | ❌ | Add scripts + postinstall |
-| Scripts exists, no postinstall | ❌ | Add postinstall |
-| Postinstall is null/undefined/empty | ❌ | Add socket-patch command |
-| Postinstall has other command | ❌ | Prepend socket-patch |
-| `socket-patch apply` | ✅ | Skip (already configured) |
-| `npx socket-patch apply` | ✅ | Skip (already configured) |
-| `npx @socketsecurity/socket-patch apply` | ✅ | Skip (already configured) |
-| `pnpm/yarn socket-patch apply` | ✅ | Skip (already configured) |
-| `node_modules/.bin/socket-patch apply` | ✅ | Skip (already configured) |
-| `socket-patch apply --flags` | ✅ | Skip (already configured) |
-| In script chain with `socket-patch apply` | ✅ | Skip (already configured) |
-| `socket apply` (main CLI) | ❌ | Add socket-patch apply |
-| `socket-patch list` (wrong subcommand) | ❌ | Add socket-patch apply |
-| Invalid data types | ❌ | Add socket-patch command |
-| Malformed JSON | N/A | Throw error |
-
-## Testing
-
-All edge cases are tested in:
-- **Unit tests:** `submodules/socket-patch/src/package-json/detect.test.ts`
-- **E2E tests:** `workspaces/api-v0/e2e-tests/tests/59_socket-patch-setup.js`
-
-Run tests:
-```bash
-# Unit tests
-cd submodules/socket-patch
-npm test
-
-# E2E tests
-pnpm --filter @socketsecurity/api-v0 run test e2e-tests/tests/59_socket-patch-setup.js
-```
diff --git a/README.md b/README.md
index 5f41bdd..995c37e 100644
--- a/README.md
+++ b/README.md
@@ -4,6 +4,43 @@ Apply security patches to npm dependencies without waiting for upstream fixes.
## Installation
+### One-line install (recommended)
+
+```bash
+curl -fsSL https://raw.githubusercontent.com/SocketDev/socket-patch/main/scripts/install.sh | sh
+```
+
+Detects your platform (macOS/Linux, x64/ARM64), downloads the latest binary, and installs to `/usr/local/bin` or `~/.local/bin`. Use `sudo sh` instead of `sh` if `/usr/local/bin` requires root.
+
+
+Manual download
+
+Download a prebuilt binary from the [latest release](https://github.com/SocketDev/socket-patch/releases/latest):
+
+```bash
+# macOS (Apple Silicon)
+curl -fsSL https://github.com/SocketDev/socket-patch/releases/latest/download/socket-patch-aarch64-apple-darwin.tar.gz | tar xz
+
+# macOS (Intel)
+curl -fsSL https://github.com/SocketDev/socket-patch/releases/latest/download/socket-patch-x86_64-apple-darwin.tar.gz | tar xz
+
+# Linux (x86_64)
+curl -fsSL https://github.com/SocketDev/socket-patch/releases/latest/download/socket-patch-x86_64-unknown-linux-musl.tar.gz | tar xz
+
+# Linux (ARM64)
+curl -fsSL https://github.com/SocketDev/socket-patch/releases/latest/download/socket-patch-aarch64-unknown-linux-gnu.tar.gz | tar xz
+```
+
+Then move the binary onto your `PATH`:
+
+```bash
+sudo mv socket-patch /usr/local/bin/
+```
+
+
+
+### npm
+
```bash
npx @socketsecurity/socket-patch
```
@@ -14,6 +51,12 @@ Or install globally:
npm install -g @socketsecurity/socket-patch
```
+### Cargo
+
+```bash
+cargo install socket-patch-cli
+```
+
## Commands
### `apply`
diff --git a/biome.json b/biome.json
deleted file mode 100644
index f6b3f5a..0000000
--- a/biome.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- "$schema": "./node_modules/@biomejs/biome/configuration_schema.json",
- "files": {
- "includes": ["**", "!.git", "!dist", "!node_modules"]
- },
- "formatter": {
- "enabled": true,
- "formatWithErrors": false,
- "indentStyle": "space",
- "indentWidth": 2,
- "lineEnding": "lf",
- "lineWidth": 80
- },
- "linter": {
- "enabled": false,
- "rules": {
- "style": {
- "noParameterAssign": "error",
- "useAsConstAssertion": "error",
- "useDefaultParameterLast": "error",
- "useEnumInitializers": "error",
- "useSelfClosingElements": "error",
- "useSingleVarDeclarator": "error",
- "noUnusedTemplateLiteral": "error",
- "useNumberNamespace": "error",
- "noInferrableTypes": "error",
- "noUselessElse": "error"
- }
- }
- },
- "javascript": {
- "formatter": {
- "arrowParentheses": "asNeeded",
- "semicolons": "asNeeded",
- "quoteStyle": "single",
- "jsxQuoteStyle": "single",
- "trailingCommas": "all"
- }
- },
- "json": {
- "formatter": {
- "trailingCommas": "none",
- "indentStyle": "space",
- "indentWidth": 2
- }
- }
-}
diff --git a/crates/socket-patch-cli/Cargo.toml b/crates/socket-patch-cli/Cargo.toml
new file mode 100644
index 0000000..917946e
--- /dev/null
+++ b/crates/socket-patch-cli/Cargo.toml
@@ -0,0 +1,27 @@
+[package]
+name = "socket-patch-cli"
+description = "CLI binary for socket-patch: apply, rollback, get, scan security patches"
+version.workspace = true
+edition.workspace = true
+license.workspace = true
+repository.workspace = true
+
+[[bin]]
+name = "socket-patch"
+path = "src/main.rs"
+
+[dependencies]
+socket-patch-core = { workspace = true }
+clap = { workspace = true }
+serde = { workspace = true }
+serde_json = { workspace = true }
+tokio = { workspace = true }
+dialoguer = { workspace = true }
+indicatif = { workspace = true }
+uuid = { workspace = true }
+regex = { workspace = true }
+tempfile = { workspace = true }
+
+[dev-dependencies]
+sha2 = { workspace = true }
+hex = { workspace = true }
diff --git a/crates/socket-patch-cli/src/commands/apply.rs b/crates/socket-patch-cli/src/commands/apply.rs
new file mode 100644
index 0000000..24aeb7d
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/apply.rs
@@ -0,0 +1,378 @@
+use clap::Args;
+use socket_patch_core::api::blob_fetcher::{
+ fetch_missing_blobs, format_fetch_result, get_missing_blobs,
+};
+use socket_patch_core::api::client::get_api_client_from_env;
+use socket_patch_core::constants::DEFAULT_PATCH_MANIFEST_PATH;
+use socket_patch_core::crawlers::{CrawlerOptions, NpmCrawler, PythonCrawler};
+use socket_patch_core::manifest::operations::read_manifest;
+use socket_patch_core::patch::apply::{apply_package_patch, verify_file_patch, ApplyResult};
+use socket_patch_core::utils::cleanup_blobs::{cleanup_unused_blobs, format_cleanup_result};
+use socket_patch_core::utils::purl::{is_npm_purl, is_pypi_purl, strip_purl_qualifiers};
+use socket_patch_core::utils::telemetry::{track_patch_applied, track_patch_apply_failed};
+use std::collections::{HashMap, HashSet};
+use std::path::{Path, PathBuf};
+
+#[derive(Args)]
+pub struct ApplyArgs {
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Verify patches can be applied without modifying files
+ #[arg(short = 'd', long = "dry-run", default_value_t = false)]
+ pub dry_run: bool,
+
+ /// Only output errors
+ #[arg(short = 's', long, default_value_t = false)]
+ pub silent: bool,
+
+ /// Path to patch manifest file
+ #[arg(short = 'm', long = "manifest-path", default_value = DEFAULT_PATCH_MANIFEST_PATH)]
+ pub manifest_path: String,
+
+ /// Do not download missing blobs, fail if any are missing
+ #[arg(long, default_value_t = false)]
+ pub offline: bool,
+
+ /// Apply patches to globally installed npm packages
+ #[arg(short = 'g', long, default_value_t = false)]
+ pub global: bool,
+
+ /// Custom path to global node_modules
+ #[arg(long = "global-prefix")]
+ pub global_prefix: Option,
+
+ /// Restrict patching to specific ecosystems
+ #[arg(long, value_delimiter = ',')]
+ pub ecosystems: Option>,
+}
+
+pub async fn run(args: ApplyArgs) -> i32 {
+ let api_token = std::env::var("SOCKET_API_TOKEN").ok();
+ let org_slug = std::env::var("SOCKET_ORG_SLUG").ok();
+
+ let manifest_path = if Path::new(&args.manifest_path).is_absolute() {
+ PathBuf::from(&args.manifest_path)
+ } else {
+ args.cwd.join(&args.manifest_path)
+ };
+
+ // Check if manifest exists - exit successfully if no .socket folder is set up
+ if tokio::fs::metadata(&manifest_path).await.is_err() {
+ if !args.silent {
+ println!("No .socket folder found, skipping patch application.");
+ }
+ return 0;
+ }
+
+ match apply_patches_inner(&args, &manifest_path).await {
+ Ok((success, results)) => {
+ // Print results
+ if !args.silent && !results.is_empty() {
+ let patched: Vec<_> = results.iter().filter(|r| r.success).collect();
+ let already_patched: Vec<_> = results
+ .iter()
+ .filter(|r| {
+ r.files_verified
+ .iter()
+ .all(|f| f.status == socket_patch_core::patch::apply::VerifyStatus::AlreadyPatched)
+ })
+ .collect();
+
+ if args.dry_run {
+ println!("\nPatch verification complete:");
+ println!(" {} package(s) can be patched", patched.len());
+ if !already_patched.is_empty() {
+ println!(" {} package(s) already patched", already_patched.len());
+ }
+ } else {
+ println!("\nPatched packages:");
+ for result in &patched {
+ if !result.files_patched.is_empty() {
+ println!(" {}", result.package_key);
+ } else if result.files_verified.iter().all(|f| {
+ f.status == socket_patch_core::patch::apply::VerifyStatus::AlreadyPatched
+ }) {
+ println!(" {} (already patched)", result.package_key);
+ }
+ }
+ }
+ }
+
+ // Track telemetry
+ let patched_count = results
+ .iter()
+ .filter(|r| r.success && !r.files_patched.is_empty())
+ .count();
+ if success {
+ track_patch_applied(patched_count, args.dry_run, api_token.as_deref(), org_slug.as_deref()).await;
+ } else {
+ track_patch_apply_failed("One or more patches failed to apply", args.dry_run, api_token.as_deref(), org_slug.as_deref()).await;
+ }
+
+ if success { 0 } else { 1 }
+ }
+ Err(e) => {
+ track_patch_apply_failed(&e, args.dry_run, api_token.as_deref(), org_slug.as_deref()).await;
+ if !args.silent {
+ eprintln!("Error: {e}");
+ }
+ 1
+ }
+ }
+}
+
+async fn apply_patches_inner(
+ args: &ApplyArgs,
+ manifest_path: &Path,
+) -> Result<(bool, Vec), String> {
+ let manifest = read_manifest(manifest_path)
+ .await
+ .map_err(|e| e.to_string())?
+ .ok_or_else(|| "Invalid manifest".to_string())?;
+
+ let socket_dir = manifest_path.parent().unwrap();
+ let blobs_path = socket_dir.join("blobs");
+ tokio::fs::create_dir_all(&blobs_path)
+ .await
+ .map_err(|e| e.to_string())?;
+
+ // Check for and download missing blobs
+ let missing_blobs = get_missing_blobs(&manifest, &blobs_path).await;
+ if !missing_blobs.is_empty() {
+ if args.offline {
+ if !args.silent {
+ eprintln!(
+ "Error: {} blob(s) are missing and --offline mode is enabled.",
+ missing_blobs.len()
+ );
+ eprintln!("Run \"socket-patch repair\" to download missing blobs.");
+ }
+ return Ok((false, Vec::new()));
+ }
+
+ if !args.silent {
+ println!("Downloading {} missing blob(s)...", missing_blobs.len());
+ }
+
+ let (client, _) = get_api_client_from_env(None);
+ let fetch_result = fetch_missing_blobs(&manifest, &blobs_path, &client, None).await;
+
+ if !args.silent {
+ println!("{}", format_fetch_result(&fetch_result));
+ }
+
+ if fetch_result.failed > 0 {
+ if !args.silent {
+ eprintln!("Some blobs could not be downloaded. Cannot apply patches.");
+ }
+ return Ok((false, Vec::new()));
+ }
+ }
+
+ // Partition manifest PURLs by ecosystem
+ let manifest_purls: Vec = manifest.patches.keys().cloned().collect();
+ let mut npm_purls: Vec = manifest_purls.iter().filter(|p| is_npm_purl(p)).cloned().collect();
+ let mut pypi_purls: Vec = manifest_purls.iter().filter(|p| is_pypi_purl(p)).cloned().collect();
+
+ // Filter by ecosystem if specified
+ if let Some(ref ecosystems) = args.ecosystems {
+ if !ecosystems.iter().any(|e| e == "npm") {
+ npm_purls.clear();
+ }
+ if !ecosystems.iter().any(|e| e == "pypi") {
+ pypi_purls.clear();
+ }
+ }
+
+ let crawler_options = CrawlerOptions {
+ cwd: args.cwd.clone(),
+ global: args.global,
+ global_prefix: args.global_prefix.clone(),
+ batch_size: 100,
+ };
+
+ let mut all_packages: HashMap = HashMap::new();
+
+ // Find npm packages
+ if !npm_purls.is_empty() {
+ let npm_crawler = NpmCrawler;
+ match npm_crawler.get_node_modules_paths(&crawler_options).await {
+ Ok(nm_paths) => {
+ if (args.global || args.global_prefix.is_some()) && !args.silent {
+ if let Some(first) = nm_paths.first() {
+ println!("Using global npm packages at: {}", first.display());
+ }
+ }
+ for nm_path in &nm_paths {
+ if let Ok(packages) = npm_crawler.find_by_purls(nm_path, &npm_purls).await {
+ for (purl, pkg) in packages {
+ all_packages.entry(purl).or_insert(pkg.path);
+ }
+ }
+ }
+ }
+ Err(e) => {
+ if !args.silent {
+ eprintln!("Failed to find npm packages: {e}");
+ }
+ }
+ }
+ }
+
+ // Find Python packages
+ if !pypi_purls.is_empty() {
+ let python_crawler = PythonCrawler;
+ let base_pypi_purls: Vec = pypi_purls
+ .iter()
+ .map(|p| strip_purl_qualifiers(p).to_string())
+ .collect::>()
+ .into_iter()
+ .collect();
+
+ match python_crawler.get_site_packages_paths(&crawler_options).await {
+ Ok(sp_paths) => {
+ for sp_path in &sp_paths {
+ if let Ok(packages) = python_crawler.find_by_purls(sp_path, &base_pypi_purls).await {
+ for (purl, pkg) in packages {
+ all_packages.entry(purl).or_insert(pkg.path);
+ }
+ }
+ }
+ }
+ Err(e) => {
+ if !args.silent {
+ eprintln!("Failed to find Python packages: {e}");
+ }
+ }
+ }
+ }
+
+ if all_packages.is_empty() && npm_purls.is_empty() && pypi_purls.is_empty() {
+ if !args.silent {
+ if args.global || args.global_prefix.is_some() {
+ eprintln!("No global packages found");
+ } else {
+ eprintln!("No package directories found");
+ }
+ }
+ return Ok((false, Vec::new()));
+ }
+
+ if all_packages.is_empty() {
+ if !args.silent {
+ println!("No packages found that match available patches");
+ }
+ return Ok((true, Vec::new()));
+ }
+
+ // Apply patches
+ let mut results: Vec = Vec::new();
+ let mut has_errors = false;
+
+ // Group pypi PURLs by base
+ let mut pypi_qualified_groups: HashMap> = HashMap::new();
+ for purl in &pypi_purls {
+ let base = strip_purl_qualifiers(purl).to_string();
+ pypi_qualified_groups
+ .entry(base)
+ .or_default()
+ .push(purl.clone());
+ }
+
+ let mut applied_base_purls: HashSet = HashSet::new();
+
+ for (purl, pkg_path) in &all_packages {
+ if is_pypi_purl(purl) {
+ let base_purl = strip_purl_qualifiers(purl).to_string();
+ if applied_base_purls.contains(&base_purl) {
+ continue;
+ }
+
+ let variants = pypi_qualified_groups
+ .get(&base_purl)
+ .cloned()
+ .unwrap_or_else(|| vec![base_purl.clone()]);
+ let mut applied = false;
+
+ for variant_purl in &variants {
+ let patch = match manifest.patches.get(variant_purl) {
+ Some(p) => p,
+ None => continue,
+ };
+
+ // Check first file hash match
+ if let Some((file_name, file_info)) = patch.files.iter().next() {
+ let verify = verify_file_patch(pkg_path, file_name, file_info).await;
+ if verify.status == socket_patch_core::patch::apply::VerifyStatus::HashMismatch {
+ continue;
+ }
+ }
+
+ let result = apply_package_patch(
+ variant_purl,
+ pkg_path,
+ &patch.files,
+ &blobs_path,
+ args.dry_run,
+ )
+ .await;
+
+ if result.success {
+ applied = true;
+ applied_base_purls.insert(base_purl.clone());
+ results.push(result);
+ break;
+ } else {
+ results.push(result);
+ }
+ }
+
+ if !applied {
+ has_errors = true;
+ if !args.silent {
+ eprintln!("Failed to patch {base_purl}: no matching variant found");
+ }
+ }
+ } else {
+ // npm PURLs: direct lookup
+ let patch = match manifest.patches.get(purl) {
+ Some(p) => p,
+ None => continue,
+ };
+
+ let result = apply_package_patch(
+ purl,
+ pkg_path,
+ &patch.files,
+ &blobs_path,
+ args.dry_run,
+ )
+ .await;
+
+ if !result.success {
+ has_errors = true;
+ if !args.silent {
+ eprintln!(
+ "Failed to patch {}: {}",
+ purl,
+ result.error.as_deref().unwrap_or("unknown error")
+ );
+ }
+ }
+ results.push(result);
+ }
+ }
+
+ // Clean up unused blobs
+ if !args.silent {
+ if let Ok(cleanup_result) = cleanup_unused_blobs(&manifest, &blobs_path, args.dry_run).await {
+ if cleanup_result.blobs_removed > 0 {
+ println!("\n{}", format_cleanup_result(&cleanup_result, args.dry_run));
+ }
+ }
+ }
+
+ Ok((!has_errors, results))
+}
diff --git a/crates/socket-patch-cli/src/commands/get.rs b/crates/socket-patch-cli/src/commands/get.rs
new file mode 100644
index 0000000..30987be
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/get.rs
@@ -0,0 +1,685 @@
+use clap::Args;
+use regex::Regex;
+use socket_patch_core::api::client::get_api_client_from_env;
+use socket_patch_core::api::types::{PatchSearchResult, SearchResponse};
+use socket_patch_core::crawlers::{CrawlerOptions, NpmCrawler, PythonCrawler};
+use socket_patch_core::manifest::operations::{read_manifest, write_manifest};
+use socket_patch_core::manifest::schema::{
+ PatchFileInfo, PatchManifest, PatchRecord, VulnerabilityInfo,
+};
+use socket_patch_core::utils::fuzzy_match::fuzzy_match_packages;
+use socket_patch_core::utils::purl::is_purl;
+use std::collections::HashMap;
+use std::io::{self, Write};
+use std::path::PathBuf;
+
+#[derive(Args)]
+pub struct GetArgs {
+ /// Patch identifier (UUID, CVE ID, GHSA ID, PURL, or package name)
+ pub identifier: String,
+
+ /// Organization slug
+ #[arg(long)]
+ pub org: Option,
+
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Force identifier to be treated as a patch UUID
+ #[arg(long, default_value_t = false)]
+ pub id: bool,
+
+ /// Force identifier to be treated as a CVE ID
+ #[arg(long, default_value_t = false)]
+ pub cve: bool,
+
+ /// Force identifier to be treated as a GHSA ID
+ #[arg(long, default_value_t = false)]
+ pub ghsa: bool,
+
+ /// Force identifier to be treated as a package name
+ #[arg(short = 'p', long = "package", default_value_t = false)]
+ pub package: bool,
+
+ /// Skip confirmation prompt for multiple patches
+ #[arg(short = 'y', long, default_value_t = false)]
+ pub yes: bool,
+
+ /// Socket API URL (overrides SOCKET_API_URL env var)
+ #[arg(long = "api-url")]
+ pub api_url: Option,
+
+ /// Socket API token (overrides SOCKET_API_TOKEN env var)
+ #[arg(long = "api-token")]
+ pub api_token: Option,
+
+ /// Download patch without applying it
+ #[arg(long = "no-apply", default_value_t = false)]
+ pub no_apply: bool,
+
+ /// Apply patch to globally installed npm packages
+ #[arg(short = 'g', long, default_value_t = false)]
+ pub global: bool,
+
+ /// Custom path to global node_modules
+ #[arg(long = "global-prefix")]
+ pub global_prefix: Option,
+
+ /// Apply patch immediately without saving to .socket folder
+ #[arg(long = "one-off", default_value_t = false)]
+ pub one_off: bool,
+}
+
+#[derive(Debug, PartialEq)]
+enum IdentifierType {
+ Uuid,
+ Cve,
+ Ghsa,
+ Purl,
+ Package,
+}
+
+fn detect_identifier_type(identifier: &str) -> Option {
+ let uuid_re = Regex::new(r"(?i)^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$").unwrap();
+ let cve_re = Regex::new(r"(?i)^CVE-\d{4}-\d+$").unwrap();
+ let ghsa_re = Regex::new(r"(?i)^GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}$").unwrap();
+
+ if uuid_re.is_match(identifier) {
+ Some(IdentifierType::Uuid)
+ } else if cve_re.is_match(identifier) {
+ Some(IdentifierType::Cve)
+ } else if ghsa_re.is_match(identifier) {
+ Some(IdentifierType::Ghsa)
+ } else if is_purl(identifier) {
+ Some(IdentifierType::Purl)
+ } else {
+ None
+ }
+}
+
+pub async fn run(args: GetArgs) -> i32 {
+ // Validate flags
+ let type_flags = [args.id, args.cve, args.ghsa, args.package]
+ .iter()
+ .filter(|&&f| f)
+ .count();
+ if type_flags > 1 {
+ eprintln!("Error: Only one of --id, --cve, --ghsa, or --package can be specified");
+ return 1;
+ }
+ if args.one_off && args.no_apply {
+ eprintln!("Error: --one-off and --no-apply cannot be used together");
+ return 1;
+ }
+
+ // Override env vars
+ if let Some(ref url) = args.api_url {
+ std::env::set_var("SOCKET_API_URL", url);
+ }
+ if let Some(ref token) = args.api_token {
+ std::env::set_var("SOCKET_API_TOKEN", token);
+ }
+
+ let (api_client, use_public_proxy) = get_api_client_from_env(args.org.as_deref());
+
+ if !use_public_proxy && args.org.is_none() {
+ eprintln!("Error: --org is required when using SOCKET_API_TOKEN. Provide an organization slug.");
+ return 1;
+ }
+
+ let effective_org_slug = if use_public_proxy {
+ None
+ } else {
+ args.org.as_deref()
+ };
+
+ // Determine identifier type
+ let id_type = if args.id {
+ IdentifierType::Uuid
+ } else if args.cve {
+ IdentifierType::Cve
+ } else if args.ghsa {
+ IdentifierType::Ghsa
+ } else if args.package {
+ IdentifierType::Package
+ } else {
+ match detect_identifier_type(&args.identifier) {
+ Some(t) => {
+ println!("Detected identifier type: {:?}", t);
+ t
+ }
+ None => {
+ println!("Treating \"{}\" as a package name search", args.identifier);
+ IdentifierType::Package
+ }
+ }
+ };
+
+ // Handle UUID: fetch and download directly
+ if id_type == IdentifierType::Uuid {
+ println!("Fetching patch by UUID: {}", args.identifier);
+ match api_client
+ .fetch_patch(effective_org_slug, &args.identifier)
+ .await
+ {
+ Ok(Some(patch)) => {
+ if patch.tier == "paid" && use_public_proxy {
+ println!("\n\x1b[33mThis patch requires a paid subscription to download.\x1b[0m");
+ println!("\n Patch: {}", patch.purl);
+ println!(" Tier: \x1b[33mpaid\x1b[0m");
+ println!("\n Upgrade at: \x1b[36mhttps://socket.dev/pricing\x1b[0m\n");
+ return 0;
+ }
+
+ // Save to manifest
+ return save_and_apply_patch(&args, &patch.purl, &patch.uuid, effective_org_slug)
+ .await;
+ }
+ Ok(None) => {
+ println!("No patch found with UUID: {}", args.identifier);
+ return 0;
+ }
+ Err(e) => {
+ eprintln!("Error: {e}");
+ return 1;
+ }
+ }
+ }
+
+ // For CVE/GHSA/PURL/package, search first
+ let search_response: SearchResponse = match id_type {
+ IdentifierType::Cve => {
+ println!("Searching patches for CVE: {}", args.identifier);
+ match api_client
+ .search_patches_by_cve(effective_org_slug, &args.identifier)
+ .await
+ {
+ Ok(r) => r,
+ Err(e) => {
+ eprintln!("Error: {e}");
+ return 1;
+ }
+ }
+ }
+ IdentifierType::Ghsa => {
+ println!("Searching patches for GHSA: {}", args.identifier);
+ match api_client
+ .search_patches_by_ghsa(effective_org_slug, &args.identifier)
+ .await
+ {
+ Ok(r) => r,
+ Err(e) => {
+ eprintln!("Error: {e}");
+ return 1;
+ }
+ }
+ }
+ IdentifierType::Purl => {
+ println!("Searching patches for PURL: {}", args.identifier);
+ match api_client
+ .search_patches_by_package(effective_org_slug, &args.identifier)
+ .await
+ {
+ Ok(r) => r,
+ Err(e) => {
+ eprintln!("Error: {e}");
+ return 1;
+ }
+ }
+ }
+ IdentifierType::Package => {
+ println!("Enumerating packages...");
+ let crawler_options = CrawlerOptions {
+ cwd: args.cwd.clone(),
+ global: args.global,
+ global_prefix: args.global_prefix.clone(),
+ batch_size: 100,
+ };
+ let npm_crawler = NpmCrawler;
+ let python_crawler = PythonCrawler;
+ let npm_packages = npm_crawler.crawl_all(&crawler_options).await;
+ let python_packages = python_crawler.crawl_all(&crawler_options).await;
+ let mut all_packages = npm_packages;
+ all_packages.extend(python_packages);
+
+ if all_packages.is_empty() {
+ if args.global {
+ println!("No global packages found.");
+ } else {
+ println!("No packages found. Run npm/yarn/pnpm/pip install first.");
+ }
+ return 0;
+ }
+
+ println!("Found {} packages", all_packages.len());
+
+ let matches = fuzzy_match_packages(&args.identifier, &all_packages, 20);
+
+ if matches.is_empty() {
+ println!("No packages matching \"{}\" found.", args.identifier);
+ return 0;
+ }
+
+ println!(
+ "Found {} matching package(s), checking for available patches...",
+ matches.len()
+ );
+
+ // Search for patches for the best match
+ let best_match = &matches[0];
+ match api_client
+ .search_patches_by_package(effective_org_slug, &best_match.purl)
+ .await
+ {
+ Ok(r) => r,
+ Err(e) => {
+ eprintln!("Error: {e}");
+ return 1;
+ }
+ }
+ }
+ _ => unreachable!(),
+ };
+
+ if search_response.patches.is_empty() {
+ println!(
+ "No patches found for {:?}: {}",
+ id_type, args.identifier
+ );
+ return 0;
+ }
+
+ // Display results
+ display_search_results(&search_response.patches, search_response.can_access_paid_patches);
+
+ // Filter accessible patches
+ let accessible: Vec<_> = search_response
+ .patches
+ .iter()
+ .filter(|p| p.tier == "free" || search_response.can_access_paid_patches)
+ .collect();
+
+ if accessible.is_empty() {
+ println!("\n\x1b[33mAll available patches require a paid subscription.\x1b[0m");
+ println!("\n Upgrade at: \x1b[36mhttps://socket.dev/pricing\x1b[0m\n");
+ return 0;
+ }
+
+ // Prompt for confirmation
+ if accessible.len() > 1 && !args.yes {
+ print!("Download {} patch(es)? [y/N] ", accessible.len());
+ io::stdout().flush().unwrap();
+ let mut answer = String::new();
+ io::stdin().read_line(&mut answer).unwrap();
+ let answer = answer.trim().to_lowercase();
+ if answer != "y" && answer != "yes" {
+ println!("Download cancelled.");
+ return 0;
+ }
+ }
+
+ // Download and save patches
+ let socket_dir = args.cwd.join(".socket");
+ let blobs_dir = socket_dir.join("blobs");
+ let manifest_path = socket_dir.join("manifest.json");
+
+ tokio::fs::create_dir_all(&socket_dir).await.ok();
+ tokio::fs::create_dir_all(&blobs_dir).await.ok();
+
+ let mut manifest = match read_manifest(&manifest_path).await {
+ Ok(Some(m)) => m,
+ _ => PatchManifest::new(),
+ };
+
+ println!("\nDownloading {} patch(es)...", accessible.len());
+
+ let mut patches_added = 0;
+ let mut patches_skipped = 0;
+ let mut patches_failed = 0;
+
+ for search_result in &accessible {
+ match api_client
+ .fetch_patch(effective_org_slug, &search_result.uuid)
+ .await
+ {
+ Ok(Some(patch)) => {
+ // Check if already in manifest
+ if manifest
+ .patches
+ .get(&patch.purl)
+ .is_some_and(|p| p.uuid == patch.uuid)
+ {
+ println!(" [skip] {} (already in manifest)", patch.purl);
+ patches_skipped += 1;
+ continue;
+ }
+
+ // Save blob contents (afterHash only)
+ let mut files = HashMap::new();
+ for (file_path, file_info) in &patch.files {
+ if let (Some(ref before), Some(ref after)) =
+ (&file_info.before_hash, &file_info.after_hash)
+ {
+ files.insert(
+ file_path.clone(),
+ PatchFileInfo {
+ before_hash: before.clone(),
+ after_hash: after.clone(),
+ },
+ );
+ }
+
+ // Save after blob content
+ if let (Some(ref blob_content), Some(ref after_hash)) =
+ (&file_info.blob_content, &file_info.after_hash)
+ {
+ if let Ok(decoded) =
+ base64_decode(blob_content)
+ {
+ let blob_path = blobs_dir.join(after_hash);
+ tokio::fs::write(&blob_path, &decoded).await.ok();
+ }
+ }
+ }
+
+ // Build vulnerabilities
+ let vulnerabilities: HashMap = patch
+ .vulnerabilities
+ .iter()
+ .map(|(id, v)| {
+ (
+ id.clone(),
+ VulnerabilityInfo {
+ cves: v.cves.clone(),
+ summary: v.summary.clone(),
+ severity: v.severity.clone(),
+ description: v.description.clone(),
+ },
+ )
+ })
+ .collect();
+
+ manifest.patches.insert(
+ patch.purl.clone(),
+ PatchRecord {
+ uuid: patch.uuid.clone(),
+ exported_at: patch.published_at.clone(),
+ files,
+ vulnerabilities,
+ description: patch.description.clone(),
+ license: patch.license.clone(),
+ tier: patch.tier.clone(),
+ },
+ );
+
+ println!(" [add] {}", patch.purl);
+ patches_added += 1;
+ }
+ Ok(None) => {
+ println!(" [fail] {} (could not fetch details)", search_result.purl);
+ patches_failed += 1;
+ }
+ Err(e) => {
+ println!(" [fail] {} ({e})", search_result.purl);
+ patches_failed += 1;
+ }
+ }
+ }
+
+ // Write manifest
+ if let Err(e) = write_manifest(&manifest_path, &manifest).await {
+ eprintln!("Error writing manifest: {e}");
+ return 1;
+ }
+
+ println!("\nPatches saved to {}", manifest_path.display());
+ println!(" Added: {patches_added}");
+ if patches_skipped > 0 {
+ println!(" Skipped: {patches_skipped}");
+ }
+ if patches_failed > 0 {
+ println!(" Failed: {patches_failed}");
+ }
+
+ // Auto-apply unless --no-apply
+ if !args.no_apply && patches_added > 0 {
+ println!("\nApplying patches...");
+ let apply_args = super::apply::ApplyArgs {
+ cwd: args.cwd.clone(),
+ dry_run: false,
+ silent: false,
+ manifest_path: manifest_path.display().to_string(),
+ offline: false,
+ global: args.global,
+ global_prefix: args.global_prefix.clone(),
+ ecosystems: None,
+ };
+ let code = super::apply::run(apply_args).await;
+ if code != 0 {
+ eprintln!("\nSome patches could not be applied.");
+ }
+ }
+
+ 0
+}
+
+fn display_search_results(patches: &[PatchSearchResult], can_access_paid: bool) {
+ println!("\nFound patches:\n");
+
+ for (i, patch) in patches.iter().enumerate() {
+ let tier_label = if patch.tier == "paid" {
+ " [PAID]"
+ } else {
+ " [FREE]"
+ };
+ let access_label = if patch.tier == "paid" && !can_access_paid {
+ " (no access)"
+ } else {
+ ""
+ };
+
+ println!(" {}. {}{}{}", i + 1, patch.purl, tier_label, access_label);
+ println!(" UUID: {}", patch.uuid);
+ if !patch.description.is_empty() {
+ let desc = if patch.description.len() > 80 {
+ format!("{}...", &patch.description[..77])
+ } else {
+ patch.description.clone()
+ };
+ println!(" Description: {desc}");
+ }
+
+ let vuln_ids: Vec<_> = patch.vulnerabilities.keys().collect();
+ if !vuln_ids.is_empty() {
+ let vuln_summary: Vec = patch
+ .vulnerabilities
+ .iter()
+ .map(|(id, vuln)| {
+ let cves = if vuln.cves.is_empty() {
+ id.to_string()
+ } else {
+ vuln.cves.join(", ")
+ };
+ format!("{cves} ({})", vuln.severity)
+ })
+ .collect();
+ println!(" Fixes: {}", vuln_summary.join(", "));
+ }
+ println!();
+ }
+}
+
+async fn save_and_apply_patch(
+ args: &GetArgs,
+ _purl: &str,
+ uuid: &str,
+ _org_slug: Option<&str>,
+) -> i32 {
+ // For UUID mode, fetch and save
+ let (api_client, _) = get_api_client_from_env(args.org.as_deref());
+ let effective_org = if args.org.is_some() {
+ args.org.as_deref()
+ } else {
+ None
+ };
+
+ let patch = match api_client.fetch_patch(effective_org, uuid).await {
+ Ok(Some(p)) => p,
+ Ok(None) => {
+ println!("No patch found with UUID: {uuid}");
+ return 0;
+ }
+ Err(e) => {
+ eprintln!("Error: {e}");
+ return 1;
+ }
+ };
+
+ let socket_dir = args.cwd.join(".socket");
+ let blobs_dir = socket_dir.join("blobs");
+ let manifest_path = socket_dir.join("manifest.json");
+
+ tokio::fs::create_dir_all(&blobs_dir).await.ok();
+
+ let mut manifest = match read_manifest(&manifest_path).await {
+ Ok(Some(m)) => m,
+ _ => PatchManifest::new(),
+ };
+
+ // Build and save patch record
+ let mut files = HashMap::new();
+ for (file_path, file_info) in &patch.files {
+ if let Some(ref after) = file_info.after_hash {
+ files.insert(
+ file_path.clone(),
+ PatchFileInfo {
+ before_hash: file_info
+ .before_hash
+ .clone()
+ .unwrap_or_default(),
+ after_hash: after.clone(),
+ },
+ );
+ }
+ if let (Some(ref blob_content), Some(ref after_hash)) =
+ (&file_info.blob_content, &file_info.after_hash)
+ {
+ if let Ok(decoded) = base64_decode(blob_content) {
+ tokio::fs::write(blobs_dir.join(after_hash), &decoded)
+ .await
+ .ok();
+ }
+ }
+ // Also store beforeHash blob if present (needed for rollback)
+ if let (Some(ref before_blob), Some(ref before_hash)) =
+ (&file_info.before_blob_content, &file_info.before_hash)
+ {
+ if let Ok(decoded) = base64_decode(before_blob) {
+ tokio::fs::write(blobs_dir.join(before_hash), &decoded)
+ .await
+ .ok();
+ }
+ }
+ }
+
+ let vulnerabilities: HashMap = patch
+ .vulnerabilities
+ .iter()
+ .map(|(id, v)| {
+ (
+ id.clone(),
+ VulnerabilityInfo {
+ cves: v.cves.clone(),
+ summary: v.summary.clone(),
+ severity: v.severity.clone(),
+ description: v.description.clone(),
+ },
+ )
+ })
+ .collect();
+
+ let added = manifest
+ .patches
+ .get(&patch.purl)
+ .is_none_or(|p| p.uuid != patch.uuid);
+
+ manifest.patches.insert(
+ patch.purl.clone(),
+ PatchRecord {
+ uuid: patch.uuid.clone(),
+ exported_at: patch.published_at.clone(),
+ files,
+ vulnerabilities,
+ description: patch.description.clone(),
+ license: patch.license.clone(),
+ tier: patch.tier.clone(),
+ },
+ );
+
+ if let Err(e) = write_manifest(&manifest_path, &manifest).await {
+ eprintln!("Error writing manifest: {e}");
+ return 1;
+ }
+
+ println!("\nPatch saved to {}", manifest_path.display());
+ if added {
+ println!(" Added: 1");
+ } else {
+ println!(" Skipped: 1 (already exists)");
+ }
+
+ if !args.no_apply {
+ println!("\nApplying patches...");
+ let apply_args = super::apply::ApplyArgs {
+ cwd: args.cwd.clone(),
+ dry_run: false,
+ silent: false,
+ manifest_path: manifest_path.display().to_string(),
+ offline: false,
+ global: args.global,
+ global_prefix: args.global_prefix.clone(),
+ ecosystems: None,
+ };
+ let code = super::apply::run(apply_args).await;
+ if code != 0 {
+ eprintln!("\nSome patches could not be applied.");
+ }
+ }
+
+ 0
+}
+
+fn base64_decode(input: &str) -> Result, String> {
+ // Simple base64 decoder
+ let chars = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+ let mut table = [255u8; 256];
+ for (i, &c) in chars.iter().enumerate() {
+ table[c as usize] = i as u8;
+ }
+
+ let input = input.as_bytes();
+ let mut output = Vec::with_capacity(input.len() * 3 / 4);
+
+ let mut buf = 0u32;
+ let mut bits = 0u32;
+
+ for &b in input {
+ if b == b'=' || b == b'\n' || b == b'\r' {
+ continue;
+ }
+ let val = table[b as usize];
+ if val == 255 {
+ return Err(format!("Invalid base64 character: {}", b as char));
+ }
+ buf = (buf << 6) | val as u32;
+ bits += 6;
+ if bits >= 8 {
+ bits -= 8;
+ output.push((buf >> bits) as u8);
+ buf &= (1 << bits) - 1;
+ }
+ }
+
+ Ok(output)
+}
diff --git a/crates/socket-patch-cli/src/commands/list.rs b/crates/socket-patch-cli/src/commands/list.rs
new file mode 100644
index 0000000..d678fca
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/list.rs
@@ -0,0 +1,141 @@
+use clap::Args;
+use socket_patch_core::constants::DEFAULT_PATCH_MANIFEST_PATH;
+use socket_patch_core::manifest::operations::read_manifest;
+use std::path::{Path, PathBuf};
+
+#[derive(Args)]
+pub struct ListArgs {
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Path to patch manifest file
+ #[arg(short = 'm', long = "manifest-path", default_value = DEFAULT_PATCH_MANIFEST_PATH)]
+ pub manifest_path: String,
+
+ /// Output as JSON
+ #[arg(long, default_value_t = false)]
+ pub json: bool,
+}
+
+pub async fn run(args: ListArgs) -> i32 {
+ let manifest_path = if Path::new(&args.manifest_path).is_absolute() {
+ PathBuf::from(&args.manifest_path)
+ } else {
+ args.cwd.join(&args.manifest_path)
+ };
+
+ // Check if manifest exists
+ if tokio::fs::metadata(&manifest_path).await.is_err() {
+ if args.json {
+ println!(
+ "{}",
+ serde_json::json!({
+ "error": "Manifest not found",
+ "path": manifest_path.display().to_string()
+ })
+ );
+ } else {
+ eprintln!("Manifest not found at {}", manifest_path.display());
+ }
+ return 1;
+ }
+
+ match read_manifest(&manifest_path).await {
+ Ok(Some(manifest)) => {
+ let patch_entries: Vec<_> = manifest.patches.iter().collect();
+
+ if patch_entries.is_empty() {
+ if args.json {
+ println!("{}", serde_json::to_string_pretty(&serde_json::json!({ "patches": [] })).unwrap());
+ } else {
+ println!("No patches found in manifest.");
+ }
+ return 0;
+ }
+
+ if args.json {
+ let json_output = serde_json::json!({
+ "patches": patch_entries.iter().map(|(purl, patch)| {
+ serde_json::json!({
+ "purl": purl,
+ "uuid": patch.uuid,
+ "exportedAt": patch.exported_at,
+ "tier": patch.tier,
+ "license": patch.license,
+ "description": patch.description,
+ "files": patch.files.keys().collect::>(),
+ "vulnerabilities": patch.vulnerabilities.iter().map(|(id, vuln)| {
+ serde_json::json!({
+ "id": id,
+ "cves": vuln.cves,
+ "summary": vuln.summary,
+ "severity": vuln.severity,
+ "description": vuln.description,
+ })
+ }).collect::>(),
+ })
+ }).collect::>()
+ });
+ println!("{}", serde_json::to_string_pretty(&json_output).unwrap());
+ } else {
+ println!("Found {} patch(es):\n", patch_entries.len());
+
+ for (purl, patch) in &patch_entries {
+ println!("Package: {purl}");
+ println!(" UUID: {}", patch.uuid);
+ println!(" Tier: {}", patch.tier);
+ println!(" License: {}", patch.license);
+ println!(" Exported: {}", patch.exported_at);
+
+ if !patch.description.is_empty() {
+ println!(" Description: {}", patch.description);
+ }
+
+ let vuln_entries: Vec<_> = patch.vulnerabilities.iter().collect();
+ if !vuln_entries.is_empty() {
+ println!(" Vulnerabilities ({}):", vuln_entries.len());
+ for (id, vuln) in &vuln_entries {
+ let cve_list = if vuln.cves.is_empty() {
+ String::new()
+ } else {
+ format!(" ({})", vuln.cves.join(", "))
+ };
+ println!(" - {id}{cve_list}");
+ println!(" Severity: {}", vuln.severity);
+ println!(" Summary: {}", vuln.summary);
+ }
+ }
+
+ let file_list: Vec<_> = patch.files.keys().collect();
+ if !file_list.is_empty() {
+ println!(" Files patched ({}):", file_list.len());
+ for file_path in &file_list {
+ println!(" - {file_path}");
+ }
+ }
+
+ println!();
+ }
+ }
+
+ 0
+ }
+ Ok(None) => {
+ if args.json {
+ println!("{}", serde_json::json!({ "error": "Invalid manifest" }));
+ } else {
+ eprintln!("Error: Invalid manifest at {}", manifest_path.display());
+ }
+ 1
+ }
+ Err(e) => {
+ if args.json {
+ println!("{}", serde_json::json!({ "error": e.to_string() }));
+ } else {
+ eprintln!("Error: {e}");
+ }
+ 1
+ }
+ }
+}
diff --git a/crates/socket-patch-cli/src/commands/mod.rs b/crates/socket-patch-cli/src/commands/mod.rs
new file mode 100644
index 0000000..499366f
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/mod.rs
@@ -0,0 +1,8 @@
+pub mod apply;
+pub mod get;
+pub mod list;
+pub mod remove;
+pub mod repair;
+pub mod rollback;
+pub mod scan;
+pub mod setup;
diff --git a/crates/socket-patch-cli/src/commands/remove.rs b/crates/socket-patch-cli/src/commands/remove.rs
new file mode 100644
index 0000000..f05379a
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/remove.rs
@@ -0,0 +1,195 @@
+use clap::Args;
+use socket_patch_core::constants::DEFAULT_PATCH_MANIFEST_PATH;
+use socket_patch_core::manifest::operations::{read_manifest, write_manifest};
+use socket_patch_core::manifest::schema::PatchManifest;
+use socket_patch_core::utils::cleanup_blobs::{cleanup_unused_blobs, format_cleanup_result};
+use socket_patch_core::utils::telemetry::{track_patch_removed, track_patch_remove_failed};
+use std::path::{Path, PathBuf};
+
+use super::rollback::rollback_patches;
+
+#[derive(Args)]
+pub struct RemoveArgs {
+ /// Package PURL or patch UUID
+ pub identifier: String,
+
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Path to patch manifest file
+ #[arg(short = 'm', long = "manifest-path", default_value = DEFAULT_PATCH_MANIFEST_PATH)]
+ pub manifest_path: String,
+
+ /// Skip rolling back files before removing (only update manifest)
+ #[arg(long = "skip-rollback", default_value_t = false)]
+ pub skip_rollback: bool,
+
+ /// Remove patches from globally installed npm packages
+ #[arg(short = 'g', long, default_value_t = false)]
+ pub global: bool,
+
+ /// Custom path to global node_modules
+ #[arg(long = "global-prefix")]
+ pub global_prefix: Option,
+}
+
+pub async fn run(args: RemoveArgs) -> i32 {
+ let api_token = std::env::var("SOCKET_API_TOKEN").ok();
+ let org_slug = std::env::var("SOCKET_ORG_SLUG").ok();
+
+ let manifest_path = if Path::new(&args.manifest_path).is_absolute() {
+ PathBuf::from(&args.manifest_path)
+ } else {
+ args.cwd.join(&args.manifest_path)
+ };
+
+ if tokio::fs::metadata(&manifest_path).await.is_err() {
+ eprintln!("Manifest not found at {}", manifest_path.display());
+ return 1;
+ }
+
+ // First, rollback the patch if not skipped
+ if !args.skip_rollback {
+ println!("Rolling back patch before removal...");
+ match rollback_patches(
+ &args.cwd,
+ &manifest_path,
+ Some(&args.identifier),
+ false,
+ false,
+ false,
+ args.global,
+ args.global_prefix.clone(),
+ None,
+ )
+ .await
+ {
+ Ok((success, results)) => {
+ if !success {
+ track_patch_remove_failed(
+ "Rollback failed during patch removal",
+ api_token.as_deref(),
+ org_slug.as_deref(),
+ )
+ .await;
+ eprintln!("\nRollback failed. Use --skip-rollback to remove from manifest without restoring files.");
+ return 1;
+ }
+
+ let rolled_back = results
+ .iter()
+ .filter(|r| r.success && !r.files_rolled_back.is_empty())
+ .count();
+ let already_original = results
+ .iter()
+ .filter(|r| {
+ r.success
+ && r.files_verified.iter().all(|f| {
+ f.status
+ == socket_patch_core::patch::rollback::VerifyRollbackStatus::AlreadyOriginal
+ })
+ })
+ .count();
+
+ if rolled_back > 0 {
+ println!("Rolled back {rolled_back} package(s)");
+ }
+ if already_original > 0 {
+ println!("{already_original} package(s) already in original state");
+ }
+ if results.is_empty() {
+ println!("No packages found to rollback (not installed)");
+ }
+ println!();
+ }
+ Err(e) => {
+ track_patch_remove_failed(&e, api_token.as_deref(), org_slug.as_deref()).await;
+ eprintln!("Error during rollback: {e}");
+ eprintln!("\nRollback failed. Use --skip-rollback to remove from manifest without restoring files.");
+ return 1;
+ }
+ }
+ }
+
+ // Now remove from manifest
+ match remove_patch_from_manifest(&args.identifier, &manifest_path).await {
+ Ok((removed, manifest)) => {
+ if removed.is_empty() {
+ track_patch_remove_failed(
+ &format!("No patch found matching identifier: {}", args.identifier),
+ api_token.as_deref(),
+ org_slug.as_deref(),
+ )
+ .await;
+ eprintln!(
+ "No patch found matching identifier: {}",
+ args.identifier
+ );
+ return 1;
+ }
+
+ println!("Removed {} patch(es) from manifest:", removed.len());
+ for purl in &removed {
+ println!(" - {purl}");
+ }
+
+ println!("\nManifest updated at {}", manifest_path.display());
+
+ // Clean up unused blobs
+ let socket_dir = manifest_path.parent().unwrap();
+ let blobs_path = socket_dir.join("blobs");
+ if let Ok(cleanup_result) = cleanup_unused_blobs(&manifest, &blobs_path, false).await {
+ if cleanup_result.blobs_removed > 0 {
+ println!("\n{}", format_cleanup_result(&cleanup_result, false));
+ }
+ }
+
+ track_patch_removed(removed.len(), api_token.as_deref(), org_slug.as_deref()).await;
+ 0
+ }
+ Err(e) => {
+ track_patch_remove_failed(&e, api_token.as_deref(), org_slug.as_deref()).await;
+ eprintln!("Error: {e}");
+ 1
+ }
+ }
+}
+
+async fn remove_patch_from_manifest(
+ identifier: &str,
+ manifest_path: &Path,
+) -> Result<(Vec, PatchManifest), String> {
+ let mut manifest = read_manifest(manifest_path)
+ .await
+ .map_err(|e| e.to_string())?
+ .ok_or_else(|| "Invalid manifest".to_string())?;
+
+ let mut removed = Vec::new();
+
+ if identifier.starts_with("pkg:") {
+ if manifest.patches.remove(identifier).is_some() {
+ removed.push(identifier.to_string());
+ }
+ } else {
+ let purls_to_remove: Vec = manifest
+ .patches
+ .iter()
+ .filter(|(_, patch)| patch.uuid == identifier)
+ .map(|(purl, _)| purl.clone())
+ .collect();
+
+ for purl in purls_to_remove {
+ manifest.patches.remove(&purl);
+ removed.push(purl);
+ }
+ }
+
+ if !removed.is_empty() {
+ write_manifest(manifest_path, &manifest)
+ .await
+ .map_err(|e| e.to_string())?;
+ }
+
+ Ok((removed, manifest))
+}
diff --git a/crates/socket-patch-cli/src/commands/repair.rs b/crates/socket-patch-cli/src/commands/repair.rs
new file mode 100644
index 0000000..581b608
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/repair.rs
@@ -0,0 +1,133 @@
+use clap::Args;
+use socket_patch_core::api::blob_fetcher::{
+ fetch_missing_blobs, format_fetch_result, get_missing_blobs,
+};
+use socket_patch_core::api::client::get_api_client_from_env;
+use socket_patch_core::constants::DEFAULT_PATCH_MANIFEST_PATH;
+use socket_patch_core::manifest::operations::read_manifest;
+use socket_patch_core::utils::cleanup_blobs::{cleanup_unused_blobs, format_cleanup_result};
+use std::path::{Path, PathBuf};
+
+#[derive(Args)]
+pub struct RepairArgs {
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Path to patch manifest file
+ #[arg(short = 'm', long = "manifest-path", default_value = DEFAULT_PATCH_MANIFEST_PATH)]
+ pub manifest_path: String,
+
+ /// Show what would be done without actually doing it
+ #[arg(short = 'd', long = "dry-run", default_value_t = false)]
+ pub dry_run: bool,
+
+ /// Skip network operations (cleanup only)
+ #[arg(long, default_value_t = false)]
+ pub offline: bool,
+
+ /// Only download missing blobs, do not clean up
+ #[arg(long = "download-only", default_value_t = false)]
+ pub download_only: bool,
+}
+
+pub async fn run(args: RepairArgs) -> i32 {
+ let manifest_path = if Path::new(&args.manifest_path).is_absolute() {
+ PathBuf::from(&args.manifest_path)
+ } else {
+ args.cwd.join(&args.manifest_path)
+ };
+
+ if tokio::fs::metadata(&manifest_path).await.is_err() {
+ eprintln!("Manifest not found at {}", manifest_path.display());
+ return 1;
+ }
+
+ match repair_inner(&args, &manifest_path).await {
+ Ok(()) => 0,
+ Err(e) => {
+ eprintln!("Error: {e}");
+ 1
+ }
+ }
+}
+
+async fn repair_inner(args: &RepairArgs, manifest_path: &Path) -> Result<(), String> {
+ let manifest = read_manifest(manifest_path)
+ .await
+ .map_err(|e| e.to_string())?
+ .ok_or_else(|| "Invalid manifest".to_string())?;
+
+ let socket_dir = manifest_path.parent().unwrap();
+ let blobs_path = socket_dir.join("blobs");
+
+ // Step 1: Check for and download missing blobs
+ if !args.offline {
+ let missing_blobs = get_missing_blobs(&manifest, &blobs_path).await;
+
+ if !missing_blobs.is_empty() {
+ println!("Found {} missing blob(s)", missing_blobs.len());
+
+ if args.dry_run {
+ println!("\nDry run - would download:");
+ for hash in missing_blobs.iter().take(10) {
+ println!(" - {}...", &hash[..12.min(hash.len())]);
+ }
+ if missing_blobs.len() > 10 {
+ println!(" ... and {} more", missing_blobs.len() - 10);
+ }
+ } else {
+ println!("\nDownloading missing blobs...");
+ let (client, _) = get_api_client_from_env(None);
+ let fetch_result = fetch_missing_blobs(&manifest, &blobs_path, &client, None).await;
+ println!("{}", format_fetch_result(&fetch_result));
+ }
+ } else {
+ println!("All blobs are present locally.");
+ }
+ } else {
+ let missing_blobs = get_missing_blobs(&manifest, &blobs_path).await;
+ if !missing_blobs.is_empty() {
+ println!(
+ "Warning: {} blob(s) are missing (offline mode - not downloading)",
+ missing_blobs.len()
+ );
+ for hash in missing_blobs.iter().take(5) {
+ println!(" - {}...", &hash[..12.min(hash.len())]);
+ }
+ if missing_blobs.len() > 5 {
+ println!(" ... and {} more", missing_blobs.len() - 5);
+ }
+ } else {
+ println!("All blobs are present locally.");
+ }
+ }
+
+ // Step 2: Clean up unused blobs
+ if !args.download_only {
+ println!();
+ match cleanup_unused_blobs(&manifest, &blobs_path, args.dry_run).await {
+ Ok(cleanup_result) => {
+ if cleanup_result.blobs_checked == 0 {
+ println!("No blobs directory found, nothing to clean up.");
+ } else if cleanup_result.blobs_removed == 0 {
+ println!(
+ "Checked {} blob(s), all are in use.",
+ cleanup_result.blobs_checked
+ );
+ } else {
+ println!("{}", format_cleanup_result(&cleanup_result, args.dry_run));
+ }
+ }
+ Err(e) => {
+ eprintln!("Warning: cleanup failed: {e}");
+ }
+ }
+ }
+
+ if !args.dry_run {
+ println!("\nRepair complete.");
+ }
+
+ Ok(())
+}
diff --git a/crates/socket-patch-cli/src/commands/rollback.rs b/crates/socket-patch-cli/src/commands/rollback.rs
new file mode 100644
index 0000000..c09f161
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/rollback.rs
@@ -0,0 +1,493 @@
+use clap::Args;
+use socket_patch_core::api::blob_fetcher::{
+ fetch_blobs_by_hash, format_fetch_result,
+};
+use socket_patch_core::api::client::get_api_client_from_env;
+use socket_patch_core::constants::DEFAULT_PATCH_MANIFEST_PATH;
+use socket_patch_core::crawlers::{CrawlerOptions, NpmCrawler, PythonCrawler};
+use socket_patch_core::manifest::operations::read_manifest;
+use socket_patch_core::manifest::schema::{PatchManifest, PatchRecord};
+use socket_patch_core::patch::rollback::{rollback_package_patch, RollbackResult};
+use socket_patch_core::utils::global_packages::get_global_prefix;
+use socket_patch_core::utils::purl::{is_pypi_purl, strip_purl_qualifiers};
+use socket_patch_core::utils::telemetry::{track_patch_rolled_back, track_patch_rollback_failed};
+use std::collections::{HashMap, HashSet};
+use std::path::{Path, PathBuf};
+
+#[derive(Args)]
+pub struct RollbackArgs {
+ /// Package PURL or patch UUID to rollback. Omit to rollback all patches.
+ pub identifier: Option,
+
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Verify rollback can be performed without modifying files
+ #[arg(short = 'd', long = "dry-run", default_value_t = false)]
+ pub dry_run: bool,
+
+ /// Only output errors
+ #[arg(short = 's', long, default_value_t = false)]
+ pub silent: bool,
+
+ /// Path to patch manifest file
+ #[arg(short = 'm', long = "manifest-path", default_value = DEFAULT_PATCH_MANIFEST_PATH)]
+ pub manifest_path: String,
+
+ /// Do not download missing blobs, fail if any are missing
+ #[arg(long, default_value_t = false)]
+ pub offline: bool,
+
+ /// Rollback patches from globally installed npm packages
+ #[arg(short = 'g', long, default_value_t = false)]
+ pub global: bool,
+
+ /// Custom path to global node_modules
+ #[arg(long = "global-prefix")]
+ pub global_prefix: Option,
+
+ /// Rollback a patch by fetching beforeHash blobs from API (no manifest required)
+ #[arg(long = "one-off", default_value_t = false)]
+ pub one_off: bool,
+
+ /// Organization slug
+ #[arg(long)]
+ pub org: Option,
+
+ /// Socket API URL (overrides SOCKET_API_URL env var)
+ #[arg(long = "api-url")]
+ pub api_url: Option,
+
+ /// Socket API token (overrides SOCKET_API_TOKEN env var)
+ #[arg(long = "api-token")]
+ pub api_token: Option,
+
+ /// Restrict rollback to specific ecosystems
+ #[arg(long, value_delimiter = ',')]
+ pub ecosystems: Option>,
+}
+
+struct PatchToRollback {
+ purl: String,
+ patch: PatchRecord,
+}
+
+fn find_patches_to_rollback(
+ manifest: &PatchManifest,
+ identifier: Option<&str>,
+) -> Vec {
+ match identifier {
+ None => manifest
+ .patches
+ .iter()
+ .map(|(purl, patch)| PatchToRollback {
+ purl: purl.clone(),
+ patch: patch.clone(),
+ })
+ .collect(),
+ Some(id) => {
+ let mut patches = Vec::new();
+ if id.starts_with("pkg:") {
+ if let Some(patch) = manifest.patches.get(id) {
+ patches.push(PatchToRollback {
+ purl: id.to_string(),
+ patch: patch.clone(),
+ });
+ }
+ } else {
+ for (purl, patch) in &manifest.patches {
+ if patch.uuid == id {
+ patches.push(PatchToRollback {
+ purl: purl.clone(),
+ patch: patch.clone(),
+ });
+ }
+ }
+ }
+ patches
+ }
+ }
+}
+
+fn get_before_hash_blobs(manifest: &PatchManifest) -> HashSet {
+ let mut blobs = HashSet::new();
+ for patch in manifest.patches.values() {
+ for file_info in patch.files.values() {
+ blobs.insert(file_info.before_hash.clone());
+ }
+ }
+ blobs
+}
+
+async fn get_missing_before_blobs(
+ manifest: &PatchManifest,
+ blobs_path: &Path,
+) -> HashSet {
+ let before_blobs = get_before_hash_blobs(manifest);
+ let mut missing = HashSet::new();
+ for hash in before_blobs {
+ let blob_path = blobs_path.join(&hash);
+ if tokio::fs::metadata(&blob_path).await.is_err() {
+ missing.insert(hash);
+ }
+ }
+ missing
+}
+
+pub async fn run(args: RollbackArgs) -> i32 {
+ let api_token = args
+ .api_token
+ .clone()
+ .or_else(|| std::env::var("SOCKET_API_TOKEN").ok());
+ let org_slug = args
+ .org
+ .clone()
+ .or_else(|| std::env::var("SOCKET_ORG_SLUG").ok());
+
+ // Validate one-off requires identifier
+ if args.one_off && args.identifier.is_none() {
+ eprintln!("Error: --one-off requires an identifier (UUID or PURL)");
+ return 1;
+ }
+
+ // Override env vars if CLI options provided
+ if let Some(ref url) = args.api_url {
+ std::env::set_var("SOCKET_API_URL", url);
+ }
+ if let Some(ref token) = args.api_token {
+ std::env::set_var("SOCKET_API_TOKEN", token);
+ }
+
+ // Handle one-off mode
+ if args.one_off {
+ // One-off mode not fully implemented yet - placeholder
+ eprintln!("One-off rollback mode: fetching patch data...");
+ // TODO: implement one-off rollback
+ return 1;
+ }
+
+ let manifest_path = if Path::new(&args.manifest_path).is_absolute() {
+ PathBuf::from(&args.manifest_path)
+ } else {
+ args.cwd.join(&args.manifest_path)
+ };
+
+ if tokio::fs::metadata(&manifest_path).await.is_err() {
+ if !args.silent {
+ eprintln!("Manifest not found at {}", manifest_path.display());
+ }
+ return 1;
+ }
+
+ match rollback_patches_inner(&args, &manifest_path).await {
+ Ok((success, results)) => {
+ if !args.silent && !results.is_empty() {
+ let rolled_back: Vec<_> = results
+ .iter()
+ .filter(|r| r.success && !r.files_rolled_back.is_empty())
+ .collect();
+ let already_original: Vec<_> = results
+ .iter()
+ .filter(|r| {
+ r.success
+ && r.files_verified.iter().all(|f| {
+ f.status
+ == socket_patch_core::patch::rollback::VerifyRollbackStatus::AlreadyOriginal
+ })
+ })
+ .collect();
+ let failed: Vec<_> = results.iter().filter(|r| !r.success).collect();
+
+ if args.dry_run {
+ println!("\nRollback verification complete:");
+ let can_rollback = results.iter().filter(|r| r.success).count();
+ println!(" {can_rollback} package(s) can be rolled back");
+ if !already_original.is_empty() {
+ println!(
+ " {} package(s) already in original state",
+ already_original.len()
+ );
+ }
+ if !failed.is_empty() {
+ println!(" {} package(s) cannot be rolled back", failed.len());
+ }
+ } else {
+ if !rolled_back.is_empty() || !already_original.is_empty() {
+ println!("\nRolled back packages:");
+ for result in &rolled_back {
+ println!(" {}", result.package_key);
+ }
+ for result in &already_original {
+ println!(" {} (already original)", result.package_key);
+ }
+ }
+ if !failed.is_empty() {
+ println!("\nFailed to rollback:");
+ for result in &failed {
+ println!(
+ " {}: {}",
+ result.package_key,
+ result.error.as_deref().unwrap_or("unknown error")
+ );
+ }
+ }
+ }
+ }
+
+ let rolled_back_count = results
+ .iter()
+ .filter(|r| r.success && !r.files_rolled_back.is_empty())
+ .count();
+ if success {
+ track_patch_rolled_back(rolled_back_count, api_token.as_deref(), org_slug.as_deref()).await;
+ } else {
+ track_patch_rollback_failed("One or more rollbacks failed", api_token.as_deref(), org_slug.as_deref()).await;
+ }
+
+ if success { 0 } else { 1 }
+ }
+ Err(e) => {
+ track_patch_rollback_failed(&e, api_token.as_deref(), org_slug.as_deref()).await;
+ if !args.silent {
+ eprintln!("Error: {e}");
+ }
+ 1
+ }
+ }
+}
+
+async fn rollback_patches_inner(
+ args: &RollbackArgs,
+ manifest_path: &Path,
+) -> Result<(bool, Vec), String> {
+ let manifest = read_manifest(manifest_path)
+ .await
+ .map_err(|e| e.to_string())?
+ .ok_or_else(|| "Invalid manifest".to_string())?;
+
+ let socket_dir = manifest_path.parent().unwrap();
+ let blobs_path = socket_dir.join("blobs");
+ tokio::fs::create_dir_all(&blobs_path)
+ .await
+ .map_err(|e| e.to_string())?;
+
+ let patches_to_rollback =
+ find_patches_to_rollback(&manifest, args.identifier.as_deref());
+
+ if patches_to_rollback.is_empty() {
+ if args.identifier.is_some() {
+ return Err(format!(
+ "No patch found matching identifier: {}",
+ args.identifier.as_deref().unwrap()
+ ));
+ }
+ if !args.silent {
+ println!("No patches found in manifest");
+ }
+ return Ok((true, Vec::new()));
+ }
+
+ // Create filtered manifest
+ let filtered_manifest = PatchManifest {
+ patches: patches_to_rollback
+ .iter()
+ .map(|p| (p.purl.clone(), p.patch.clone()))
+ .collect(),
+ };
+
+ // Check for missing beforeHash blobs
+ let missing_blobs = get_missing_before_blobs(&filtered_manifest, &blobs_path).await;
+ if !missing_blobs.is_empty() {
+ if args.offline {
+ if !args.silent {
+ eprintln!(
+ "Error: {} blob(s) are missing and --offline mode is enabled.",
+ missing_blobs.len()
+ );
+ eprintln!("Run \"socket-patch repair\" to download missing blobs.");
+ }
+ return Ok((false, Vec::new()));
+ }
+
+ if !args.silent {
+ println!("Downloading {} missing blob(s)...", missing_blobs.len());
+ }
+
+ let (client, _) = get_api_client_from_env(None);
+ let fetch_result = fetch_blobs_by_hash(&missing_blobs, &blobs_path, &client, None).await;
+
+ if !args.silent {
+ println!("{}", format_fetch_result(&fetch_result));
+ }
+
+ let still_missing = get_missing_before_blobs(&filtered_manifest, &blobs_path).await;
+ if !still_missing.is_empty() {
+ if !args.silent {
+ eprintln!(
+ "{} blob(s) could not be downloaded. Cannot rollback.",
+ still_missing.len()
+ );
+ }
+ return Ok((false, Vec::new()));
+ }
+ }
+
+ // Partition PURLs by ecosystem
+ let rollback_purls: Vec = patches_to_rollback.iter().map(|p| p.purl.clone()).collect();
+ let mut npm_purls: Vec = rollback_purls.iter().filter(|p| !is_pypi_purl(p)).cloned().collect();
+ let mut pypi_purls: Vec = rollback_purls.iter().filter(|p| is_pypi_purl(p)).cloned().collect();
+
+ if let Some(ref ecosystems) = args.ecosystems {
+ if !ecosystems.iter().any(|e| e == "npm") {
+ npm_purls.clear();
+ }
+ if !ecosystems.iter().any(|e| e == "pypi") {
+ pypi_purls.clear();
+ }
+ }
+
+ let crawler_options = CrawlerOptions {
+ cwd: args.cwd.clone(),
+ global: args.global,
+ global_prefix: args.global_prefix.clone(),
+ batch_size: 100,
+ };
+
+ let mut all_packages: HashMap = HashMap::new();
+
+ // Find npm packages
+ if !npm_purls.is_empty() {
+ if args.global || args.global_prefix.is_some() {
+ match get_global_prefix(args.global_prefix.as_ref().map(|p| p.to_str().unwrap_or(""))) {
+ Ok(prefix) => {
+ if !args.silent {
+ println!("Using global npm packages at: {prefix}");
+ }
+ let npm_crawler = NpmCrawler;
+ if let Ok(packages) = npm_crawler.find_by_purls(Path::new(&prefix), &npm_purls).await {
+ for (purl, pkg) in packages {
+ all_packages.entry(purl).or_insert(pkg.path);
+ }
+ }
+ }
+ Err(e) => {
+ if !args.silent {
+ eprintln!("Failed to find global npm packages: {e}");
+ }
+ return Ok((false, Vec::new()));
+ }
+ }
+ } else {
+ let npm_crawler = NpmCrawler;
+ if let Ok(nm_paths) = npm_crawler.get_node_modules_paths(&crawler_options).await {
+ for nm_path in &nm_paths {
+ if let Ok(packages) = npm_crawler.find_by_purls(nm_path, &npm_purls).await {
+ for (purl, pkg) in packages {
+ all_packages.entry(purl).or_insert(pkg.path);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Find Python packages
+ if !pypi_purls.is_empty() {
+ let python_crawler = PythonCrawler;
+ let base_pypi_purls: Vec = pypi_purls
+ .iter()
+ .map(|p| strip_purl_qualifiers(p).to_string())
+ .collect::>()
+ .into_iter()
+ .collect();
+
+ if let Ok(sp_paths) = python_crawler.get_site_packages_paths(&crawler_options).await {
+ for sp_path in &sp_paths {
+ if let Ok(packages) = python_crawler.find_by_purls(sp_path, &base_pypi_purls).await {
+ for (base_purl, pkg) in packages {
+ for qualified_purl in &pypi_purls {
+ if strip_purl_qualifiers(qualified_purl) == base_purl
+ && !all_packages.contains_key(qualified_purl)
+ {
+ all_packages.insert(qualified_purl.clone(), pkg.path.clone());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ if all_packages.is_empty() {
+ if !args.silent {
+ println!("No packages found that match patches to rollback");
+ }
+ return Ok((true, Vec::new()));
+ }
+
+ // Rollback patches
+ let mut results: Vec = Vec::new();
+ let mut has_errors = false;
+
+ for (purl, pkg_path) in &all_packages {
+ let patch = match filtered_manifest.patches.get(purl) {
+ Some(p) => p,
+ None => continue,
+ };
+
+ let result = rollback_package_patch(
+ purl,
+ pkg_path,
+ &patch.files,
+ &blobs_path,
+ args.dry_run,
+ )
+ .await;
+
+ if !result.success {
+ has_errors = true;
+ if !args.silent {
+ eprintln!(
+ "Failed to rollback {}: {}",
+ purl,
+ result.error.as_deref().unwrap_or("unknown error")
+ );
+ }
+ }
+ results.push(result);
+ }
+
+ Ok((!has_errors, results))
+}
+
+// Export for use by remove command
+#[allow(clippy::too_many_arguments)]
+pub async fn rollback_patches(
+ cwd: &Path,
+ manifest_path: &Path,
+ identifier: Option<&str>,
+ dry_run: bool,
+ silent: bool,
+ offline: bool,
+ global: bool,
+ global_prefix: Option,
+ ecosystems: Option>,
+) -> Result<(bool, Vec), String> {
+ let args = RollbackArgs {
+ identifier: identifier.map(String::from),
+ cwd: cwd.to_path_buf(),
+ dry_run,
+ silent,
+ manifest_path: manifest_path.display().to_string(),
+ offline,
+ global,
+ global_prefix,
+ one_off: false,
+ org: None,
+ api_url: None,
+ api_token: None,
+ ecosystems,
+ };
+ rollback_patches_inner(&args, manifest_path).await
+}
diff --git a/crates/socket-patch-cli/src/commands/scan.rs b/crates/socket-patch-cli/src/commands/scan.rs
new file mode 100644
index 0000000..81478aa
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/scan.rs
@@ -0,0 +1,360 @@
+use clap::Args;
+use socket_patch_core::api::client::get_api_client_from_env;
+use socket_patch_core::api::types::BatchPackagePatches;
+use socket_patch_core::crawlers::{CrawlerOptions, NpmCrawler, PythonCrawler};
+use std::collections::HashSet;
+use std::path::PathBuf;
+
+const DEFAULT_BATCH_SIZE: usize = 100;
+
+#[derive(Args)]
+pub struct ScanArgs {
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Organization slug
+ #[arg(long)]
+ pub org: Option,
+
+ /// Output results as JSON
+ #[arg(long, default_value_t = false)]
+ pub json: bool,
+
+ /// Scan globally installed npm packages
+ #[arg(short = 'g', long, default_value_t = false)]
+ pub global: bool,
+
+ /// Custom path to global node_modules
+ #[arg(long = "global-prefix")]
+ pub global_prefix: Option,
+
+ /// Number of packages to query per API request
+ #[arg(long = "batch-size", default_value_t = DEFAULT_BATCH_SIZE)]
+ pub batch_size: usize,
+
+ /// Socket API URL (overrides SOCKET_API_URL env var)
+ #[arg(long = "api-url")]
+ pub api_url: Option,
+
+ /// Socket API token (overrides SOCKET_API_TOKEN env var)
+ #[arg(long = "api-token")]
+ pub api_token: Option,
+}
+
+pub async fn run(args: ScanArgs) -> i32 {
+ // Override env vars if CLI options provided
+ if let Some(ref url) = args.api_url {
+ std::env::set_var("SOCKET_API_URL", url);
+ }
+ if let Some(ref token) = args.api_token {
+ std::env::set_var("SOCKET_API_TOKEN", token);
+ }
+
+ let (api_client, use_public_proxy) = get_api_client_from_env(args.org.as_deref());
+
+ if !use_public_proxy && args.org.is_none() {
+ eprintln!("Error: --org is required when using SOCKET_API_TOKEN. Provide an organization slug.");
+ return 1;
+ }
+
+ let effective_org_slug = if use_public_proxy {
+ None
+ } else {
+ args.org.as_deref()
+ };
+
+ let crawler_options = CrawlerOptions {
+ cwd: args.cwd.clone(),
+ global: args.global,
+ global_prefix: args.global_prefix.clone(),
+ batch_size: args.batch_size,
+ };
+
+ let scan_target = if args.global || args.global_prefix.is_some() {
+ "global packages"
+ } else {
+ "packages"
+ };
+
+ if !args.json {
+ eprint!("Scanning {scan_target}...");
+ }
+
+ // Crawl packages
+ let npm_crawler = NpmCrawler;
+ let python_crawler = PythonCrawler;
+
+ let npm_packages = npm_crawler.crawl_all(&crawler_options).await;
+ let python_packages = python_crawler.crawl_all(&crawler_options).await;
+
+ let mut all_purls: Vec = Vec::new();
+ for pkg in &npm_packages {
+ all_purls.push(pkg.purl.clone());
+ }
+ for pkg in &python_packages {
+ all_purls.push(pkg.purl.clone());
+ }
+
+ let package_count = all_purls.len();
+ let npm_count = npm_packages.len();
+ let python_count = python_packages.len();
+
+ if package_count == 0 {
+ if !args.json {
+ eprintln!();
+ }
+ if args.json {
+ println!(
+ "{}",
+ serde_json::to_string_pretty(&serde_json::json!({
+ "scannedPackages": 0,
+ "packagesWithPatches": 0,
+ "totalPatches": 0,
+ "freePatches": 0,
+ "paidPatches": 0,
+ "canAccessPaidPatches": false,
+ "packages": [],
+ }))
+ .unwrap()
+ );
+ } else if args.global || args.global_prefix.is_some() {
+ println!("No global packages found.");
+ } else {
+ println!("No packages found. Run npm/yarn/pnpm/pip install first.");
+ }
+ return 0;
+ }
+
+ // Build ecosystem summary
+ let mut eco_parts = Vec::new();
+ if npm_count > 0 {
+ eco_parts.push(format!("{npm_count} npm"));
+ }
+ if python_count > 0 {
+ eco_parts.push(format!("{python_count} python"));
+ }
+ let eco_summary = if eco_parts.is_empty() {
+ String::new()
+ } else {
+ format!(" ({})", eco_parts.join(", "))
+ };
+
+ if !args.json {
+ eprintln!("\rFound {package_count} packages{eco_summary}");
+ }
+
+ // Query API in batches
+ let mut all_packages_with_patches: Vec = Vec::new();
+ let mut can_access_paid_patches = false;
+ let total_batches = all_purls.len().div_ceil(args.batch_size);
+
+ if !args.json {
+ eprint!("Querying API for patches... (batch 1/{total_batches})");
+ }
+
+ for (batch_idx, chunk) in all_purls.chunks(args.batch_size).enumerate() {
+ if !args.json {
+ eprint!(
+ "\rQuerying API for patches... (batch {}/{})",
+ batch_idx + 1,
+ total_batches
+ );
+ }
+
+ let purls: Vec = chunk.to_vec();
+ match api_client
+ .search_patches_batch(effective_org_slug, &purls)
+ .await
+ {
+ Ok(response) => {
+ if response.can_access_paid_patches {
+ can_access_paid_patches = true;
+ }
+ for pkg in response.packages {
+ if !pkg.patches.is_empty() {
+ all_packages_with_patches.push(pkg);
+ }
+ }
+ }
+ Err(e) => {
+ if !args.json {
+ eprintln!("\nError querying batch {}: {e}", batch_idx + 1);
+ }
+ }
+ }
+ }
+
+ let total_patches_found: usize = all_packages_with_patches
+ .iter()
+ .map(|p| p.patches.len())
+ .sum();
+
+ if !args.json {
+ if total_patches_found > 0 {
+ eprintln!(
+ "\rFound {total_patches_found} patches for {} packages",
+ all_packages_with_patches.len()
+ );
+ } else {
+ eprintln!("\rAPI query complete");
+ }
+ }
+
+ // Calculate patch counts
+ let mut free_patches = 0usize;
+ let mut paid_patches = 0usize;
+ for pkg in &all_packages_with_patches {
+ for patch in &pkg.patches {
+ if patch.tier == "free" {
+ free_patches += 1;
+ } else {
+ paid_patches += 1;
+ }
+ }
+ }
+ let total_patches = free_patches + paid_patches;
+
+ if args.json {
+ let result = serde_json::json!({
+ "scannedPackages": package_count,
+ "packagesWithPatches": all_packages_with_patches.len(),
+ "totalPatches": total_patches,
+ "freePatches": free_patches,
+ "paidPatches": paid_patches,
+ "canAccessPaidPatches": can_access_paid_patches,
+ "packages": all_packages_with_patches,
+ });
+ println!("{}", serde_json::to_string_pretty(&result).unwrap());
+ return 0;
+ }
+
+ if all_packages_with_patches.is_empty() {
+ println!("\nNo patches available for installed packages.");
+ return 0;
+ }
+
+ // Print table
+ println!("\n{}", "=".repeat(100));
+ println!(
+ "{} {} {} VULNERABILITIES",
+ "PACKAGE".to_string() + &" ".repeat(33),
+ "PATCHES".to_string() + " ",
+ "SEVERITY".to_string() + &" ".repeat(8),
+ );
+ println!("{}", "=".repeat(100));
+
+ for pkg in &all_packages_with_patches {
+ let max_purl_len = 40;
+ let display_purl = if pkg.purl.len() > max_purl_len {
+ format!("{}...", &pkg.purl[..max_purl_len - 3])
+ } else {
+ pkg.purl.clone()
+ };
+
+ let pkg_free = pkg.patches.iter().filter(|p| p.tier == "free").count();
+ let pkg_paid = pkg.patches.iter().filter(|p| p.tier == "paid").count();
+
+ let count_str = if pkg_paid > 0 {
+ if can_access_paid_patches {
+ format!("{}+{}", pkg_free, pkg_paid)
+ } else {
+ format!("{}\x1b[33m+{}\x1b[0m", pkg_free, pkg_paid)
+ }
+ } else {
+ format!("{}", pkg_free)
+ };
+
+ // Get highest severity
+ let severity = pkg
+ .patches
+ .iter()
+ .filter_map(|p| p.severity.as_deref())
+ .min_by_key(|s| severity_order(s))
+ .unwrap_or("unknown");
+
+ // Collect vuln IDs
+ let mut all_cves = HashSet::new();
+ let mut all_ghsas = HashSet::new();
+ for patch in &pkg.patches {
+ for cve in &patch.cve_ids {
+ all_cves.insert(cve.clone());
+ }
+ for ghsa in &patch.ghsa_ids {
+ all_ghsas.insert(ghsa.clone());
+ }
+ }
+ let vuln_ids: Vec<_> = all_cves.into_iter().chain(all_ghsas).collect();
+ let vuln_str = if vuln_ids.len() > 2 {
+ format!(
+ "{} (+{})",
+ vuln_ids[..2].join(", "),
+ vuln_ids.len() - 2
+ )
+ } else if vuln_ids.is_empty() {
+ "-".to_string()
+ } else {
+ vuln_ids.join(", ")
+ };
+
+ println!(
+ "{:<40} {:>8} {:<16} {}",
+ display_purl,
+ count_str,
+ format_severity(severity),
+ vuln_str,
+ );
+ }
+
+ println!("{}", "=".repeat(100));
+
+ // Summary
+ if can_access_paid_patches {
+ println!(
+ "\nSummary: {} package(s) with {} available patch(es)",
+ all_packages_with_patches.len(),
+ total_patches,
+ );
+ } else {
+ println!(
+ "\nSummary: {} package(s) with {} free patch(es)",
+ all_packages_with_patches.len(),
+ free_patches,
+ );
+ if paid_patches > 0 {
+ println!(
+ "\x1b[33m + {} additional patch(es) available with paid subscription\x1b[0m",
+ paid_patches,
+ );
+ println!(
+ "\nUpgrade to Socket's paid plan to access all patches: https://socket.dev/pricing"
+ );
+ }
+ }
+
+ println!("\nTo apply a patch, run:");
+ println!(" socket-patch get ");
+ println!(" socket-patch get ");
+
+ 0
+}
+
+fn severity_order(s: &str) -> u8 {
+ match s.to_lowercase().as_str() {
+ "critical" => 0,
+ "high" => 1,
+ "medium" => 2,
+ "low" => 3,
+ _ => 4,
+ }
+}
+
+fn format_severity(s: &str) -> String {
+ match s.to_lowercase().as_str() {
+ "critical" => "\x1b[31mcritical\x1b[0m".to_string(),
+ "high" => "\x1b[91mhigh\x1b[0m".to_string(),
+ "medium" => "\x1b[33mmedium\x1b[0m".to_string(),
+ "low" => "\x1b[36mlow\x1b[0m".to_string(),
+ other => other.to_string(),
+ }
+}
diff --git a/crates/socket-patch-cli/src/commands/setup.rs b/crates/socket-patch-cli/src/commands/setup.rs
new file mode 100644
index 0000000..c72a092
--- /dev/null
+++ b/crates/socket-patch-cli/src/commands/setup.rs
@@ -0,0 +1,152 @@
+use clap::Args;
+use socket_patch_core::package_json::find::find_package_json_files;
+use socket_patch_core::package_json::update::{update_package_json, UpdateStatus};
+use std::io::{self, Write};
+use std::path::{Path, PathBuf};
+
+#[derive(Args)]
+pub struct SetupArgs {
+ /// Working directory
+ #[arg(long, default_value = ".")]
+ pub cwd: PathBuf,
+
+ /// Preview changes without modifying files
+ #[arg(short = 'd', long = "dry-run", default_value_t = false)]
+ pub dry_run: bool,
+
+ /// Skip confirmation prompt
+ #[arg(short = 'y', long, default_value_t = false)]
+ pub yes: bool,
+}
+
+pub async fn run(args: SetupArgs) -> i32 {
+ println!("Searching for package.json files...");
+
+ let package_json_files = find_package_json_files(&args.cwd).await;
+
+ if package_json_files.is_empty() {
+ println!("No package.json files found");
+ return 0;
+ }
+
+ println!("Found {} package.json file(s)", package_json_files.len());
+
+ // Preview changes (always preview first)
+ let mut preview_results = Vec::new();
+ for loc in &package_json_files {
+ let result = update_package_json(&loc.path, true).await;
+ preview_results.push(result);
+ }
+
+ // Display preview
+ let to_update: Vec<_> = preview_results
+ .iter()
+ .filter(|r| r.status == UpdateStatus::Updated)
+ .collect();
+ let already_configured: Vec<_> = preview_results
+ .iter()
+ .filter(|r| r.status == UpdateStatus::AlreadyConfigured)
+ .collect();
+ let errors: Vec<_> = preview_results
+ .iter()
+ .filter(|r| r.status == UpdateStatus::Error)
+ .collect();
+
+ println!("\nPackage.json files to be updated:\n");
+
+ if !to_update.is_empty() {
+ println!("Will update:");
+ for result in &to_update {
+ let rel_path = pathdiff(&result.path, &args.cwd);
+ println!(" + {rel_path}");
+ if result.old_script.is_empty() {
+ println!(" Current: (no postinstall script)");
+ } else {
+ println!(" Current: \"{}\"", result.old_script);
+ }
+ println!(" New: \"{}\"", result.new_script);
+ }
+ println!();
+ }
+
+ if !already_configured.is_empty() {
+ println!("Already configured (will skip):");
+ for result in &already_configured {
+ let rel_path = pathdiff(&result.path, &args.cwd);
+ println!(" = {rel_path}");
+ }
+ println!();
+ }
+
+ if !errors.is_empty() {
+ println!("Errors:");
+ for result in &errors {
+ let rel_path = pathdiff(&result.path, &args.cwd);
+ println!(
+ " ! {}: {}",
+ rel_path,
+ result.error.as_deref().unwrap_or("unknown error")
+ );
+ }
+ println!();
+ }
+
+ if to_update.is_empty() {
+ println!("All package.json files are already configured with socket-patch!");
+ return 0;
+ }
+
+ // If not dry-run, ask for confirmation
+ if !args.dry_run {
+ if !args.yes {
+ print!("Proceed with these changes? (y/N): ");
+ io::stdout().flush().unwrap();
+ let mut answer = String::new();
+ io::stdin().read_line(&mut answer).unwrap();
+ let answer = answer.trim().to_lowercase();
+ if answer != "y" && answer != "yes" {
+ println!("Aborted");
+ return 0;
+ }
+ }
+
+ println!("\nApplying changes...");
+ let mut results = Vec::new();
+ for loc in &package_json_files {
+ let result = update_package_json(&loc.path, false).await;
+ results.push(result);
+ }
+
+ let updated = results.iter().filter(|r| r.status == UpdateStatus::Updated).count();
+ let already = results.iter().filter(|r| r.status == UpdateStatus::AlreadyConfigured).count();
+ let errs = results.iter().filter(|r| r.status == UpdateStatus::Error).count();
+
+ println!("\nSummary:");
+ println!(" {updated} file(s) updated");
+ println!(" {already} file(s) already configured");
+ if errs > 0 {
+ println!(" {errs} error(s)");
+ }
+
+ if errs > 0 { 1 } else { 0 }
+ } else {
+ let updated = preview_results.iter().filter(|r| r.status == UpdateStatus::Updated).count();
+ let already = preview_results.iter().filter(|r| r.status == UpdateStatus::AlreadyConfigured).count();
+ let errs = preview_results.iter().filter(|r| r.status == UpdateStatus::Error).count();
+
+ println!("\nSummary:");
+ println!(" {updated} file(s) would be updated");
+ println!(" {already} file(s) already configured");
+ if errs > 0 {
+ println!(" {errs} error(s)");
+ }
+ 0
+ }
+}
+
+fn pathdiff(path: &str, base: &Path) -> String {
+ let p = Path::new(path);
+ p.strip_prefix(base)
+ .map(|r| r.display().to_string())
+ .unwrap_or_else(|_| path.to_string())
+}
diff --git a/crates/socket-patch-cli/src/main.rs b/crates/socket-patch-cli/src/main.rs
new file mode 100644
index 0000000..e04f9ce
--- /dev/null
+++ b/crates/socket-patch-cli/src/main.rs
@@ -0,0 +1,62 @@
+mod commands;
+
+use clap::{Parser, Subcommand};
+
+#[derive(Parser)]
+#[command(
+ name = "socket-patch",
+ about = "CLI tool for applying security patches to dependencies",
+ version,
+ propagate_version = true
+)]
+struct Cli {
+ #[command(subcommand)]
+ command: Commands,
+}
+
+#[derive(Subcommand)]
+enum Commands {
+ /// Apply security patches to dependencies
+ Apply(commands::apply::ApplyArgs),
+
+ /// Rollback patches to restore original files
+ Rollback(commands::rollback::RollbackArgs),
+
+ /// Get security patches from Socket API and apply them
+ #[command(visible_alias = "download")]
+ Get(commands::get::GetArgs),
+
+ /// Scan installed packages for available security patches
+ Scan(commands::scan::ScanArgs),
+
+ /// List all patches in the local manifest
+ List(commands::list::ListArgs),
+
+ /// Remove a patch from the manifest by PURL or UUID (rolls back files first)
+ Remove(commands::remove::RemoveArgs),
+
+ /// Configure package.json postinstall scripts to apply patches
+ Setup(commands::setup::SetupArgs),
+
+ /// Download missing blobs and clean up unused blobs
+ #[command(visible_alias = "gc")]
+ Repair(commands::repair::RepairArgs),
+}
+
+#[tokio::main]
+async fn main() {
+ let cli = Cli::parse();
+
+ let exit_code = match cli.command {
+ Commands::Apply(args) => commands::apply::run(args).await,
+ Commands::Rollback(args) => commands::rollback::run(args).await,
+ Commands::Get(args) => commands::get::run(args).await,
+ Commands::Scan(args) => commands::scan::run(args).await,
+ Commands::List(args) => commands::list::run(args).await,
+ Commands::Remove(args) => commands::remove::run(args).await,
+ Commands::Setup(args) => commands::setup::run(args).await,
+ Commands::Repair(args) => commands::repair::run(args).await,
+ };
+
+ std::process::exit(exit_code);
+}
diff --git a/crates/socket-patch-cli/tests/e2e_npm.rs b/crates/socket-patch-cli/tests/e2e_npm.rs
new file mode 100644
index 0000000..03208af
--- /dev/null
+++ b/crates/socket-patch-cli/tests/e2e_npm.rs
@@ -0,0 +1,268 @@
+//! End-to-end tests for the npm patch lifecycle.
+//!
+//! These tests exercise the full CLI against the real Socket API, using the
+//! **minimist@1.2.2** patch (UUID `80630680-4da6-45f9-bba8-b888e0ffd58c`),
+//! which fixes CVE-2021-44906 (Prototype Pollution).
+//!
+//! # Prerequisites
+//! - `npm` on PATH
+//! - Network access to `patches-api.socket.dev` and `registry.npmjs.org`
+//!
+//! # Running
+//! ```sh
+//! cargo test -p socket-patch-cli --test e2e_npm -- --ignored
+//! ```
+
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+
+use sha2::{Digest, Sha256};
+
+// ---------------------------------------------------------------------------
+// Constants
+// ---------------------------------------------------------------------------
+
+const NPM_UUID: &str = "80630680-4da6-45f9-bba8-b888e0ffd58c";
+const NPM_PURL: &str = "pkg:npm/minimist@1.2.2";
+
+/// Git SHA-256 of the *unpatched* `index.js` shipped with minimist 1.2.2.
+const BEFORE_HASH: &str = "311f1e893e6eac502693fad8617dcf5353a043ccc0f7b4ba9fe385e838b67a10";
+
+/// Git SHA-256 of the *patched* `index.js` after the security fix.
+const AFTER_HASH: &str = "043f04d19e884aa5f8371428718d2a3f27a0d231afe77a2620ac6312f80aaa28";
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+fn binary() -> PathBuf {
+ env!("CARGO_BIN_EXE_socket-patch").into()
+}
+
+fn has_command(cmd: &str) -> bool {
+ Command::new(cmd)
+ .arg("--version")
+ .stdout(std::process::Stdio::null())
+ .stderr(std::process::Stdio::null())
+ .status()
+ .is_ok()
+}
+
+/// Compute Git SHA-256: `SHA256("blob \0" ++ content)`.
+fn git_sha256(content: &[u8]) -> String {
+ let header = format!("blob {}\0", content.len());
+ let mut hasher = Sha256::new();
+ hasher.update(header.as_bytes());
+ hasher.update(content);
+ hex::encode(hasher.finalize())
+}
+
+fn git_sha256_file(path: &Path) -> String {
+ let content = std::fs::read(path).unwrap_or_else(|e| panic!("read {}: {e}", path.display()));
+ git_sha256(&content)
+}
+
+/// Run the CLI binary with the given args, setting `cwd` as the working dir.
+/// Returns `(exit_code, stdout, stderr)`.
+fn run(cwd: &Path, args: &[&str]) -> (i32, String, String) {
+ let out: Output = Command::new(binary())
+ .args(args)
+ .current_dir(cwd)
+ .env_remove("SOCKET_API_TOKEN") // force public proxy (free-tier)
+ .output()
+ .expect("failed to execute socket-patch binary");
+
+ let code = out.status.code().unwrap_or(-1);
+ let stdout = String::from_utf8_lossy(&out.stdout).to_string();
+ let stderr = String::from_utf8_lossy(&out.stderr).to_string();
+ (code, stdout, stderr)
+}
+
+fn assert_run_ok(cwd: &Path, args: &[&str], context: &str) -> (String, String) {
+ let (code, stdout, stderr) = run(cwd, args);
+ assert_eq!(
+ code, 0,
+ "{context} failed (exit {code}).\nstdout:\n{stdout}\nstderr:\n{stderr}"
+ );
+ (stdout, stderr)
+}
+
+fn npm_run(cwd: &Path, args: &[&str]) {
+ let out = Command::new("npm")
+ .args(args)
+ .current_dir(cwd)
+ .output()
+ .expect("failed to run npm");
+ assert!(
+ out.status.success(),
+ "npm {args:?} failed (exit {:?}).\nstdout:\n{}\nstderr:\n{}",
+ out.status.code(),
+ String::from_utf8_lossy(&out.stdout),
+ String::from_utf8_lossy(&out.stderr),
+ );
+}
+
+/// Write a minimal package.json (avoids `npm init -y` which rejects temp dir
+/// names that start with `.` or contain invalid characters).
+fn write_package_json(cwd: &Path) {
+ std::fs::write(
+ cwd.join("package.json"),
+ r#"{"name":"e2e-test","version":"0.0.0","private":true}"#,
+ )
+ .expect("write package.json");
+}
+
+// ---------------------------------------------------------------------------
+// Tests
+// ---------------------------------------------------------------------------
+
+/// Full lifecycle: get → verify → list → rollback → apply → remove.
+#[test]
+#[ignore]
+fn test_npm_full_lifecycle() {
+ if !has_command("npm") {
+ eprintln!("SKIP: npm not found on PATH");
+ return;
+ }
+
+ let dir = tempfile::tempdir().unwrap();
+ let cwd = dir.path();
+
+ // -- Setup: create a project and install minimist@1.2.2 ----------------
+ write_package_json(cwd);
+ npm_run(cwd, &["install", "minimist@1.2.2"]);
+
+ let index_js = cwd.join("node_modules/minimist/index.js");
+ assert!(index_js.exists(), "minimist/index.js must exist after npm install");
+
+ // Confirm the original file matches the expected before-hash.
+ assert_eq!(
+ git_sha256_file(&index_js),
+ BEFORE_HASH,
+ "freshly installed index.js should have the expected beforeHash"
+ );
+
+ // -- GET: download + apply patch ---------------------------------------
+ assert_run_ok(cwd, &["get", NPM_UUID], "get");
+
+ // Manifest should exist and contain the patch.
+ let manifest_path = cwd.join(".socket/manifest.json");
+ assert!(manifest_path.exists(), ".socket/manifest.json should exist after get");
+
+ let manifest: serde_json::Value =
+ serde_json::from_str(&std::fs::read_to_string(&manifest_path).unwrap()).unwrap();
+ let patch = &manifest["patches"][NPM_PURL];
+ assert!(patch.is_object(), "manifest should contain {NPM_PURL}");
+ assert_eq!(patch["uuid"].as_str().unwrap(), NPM_UUID);
+
+ // The file should now be patched.
+ assert_eq!(
+ git_sha256_file(&index_js),
+ AFTER_HASH,
+ "index.js should match afterHash after get"
+ );
+
+ // -- LIST: verify JSON output ------------------------------------------
+ let (stdout, _) = assert_run_ok(cwd, &["list", "--json"], "list --json");
+ let list: serde_json::Value = serde_json::from_str(&stdout).unwrap();
+ let patches = list["patches"].as_array().expect("patches should be an array");
+ assert_eq!(patches.len(), 1);
+ assert_eq!(patches[0]["uuid"].as_str().unwrap(), NPM_UUID);
+ assert_eq!(patches[0]["purl"].as_str().unwrap(), NPM_PURL);
+
+ let vulns = patches[0]["vulnerabilities"]
+ .as_array()
+ .expect("vulnerabilities array");
+ assert!(!vulns.is_empty(), "patch should report at least one vulnerability");
+
+ // Verify the vulnerability details match CVE-2021-44906
+ let has_cve = vulns.iter().any(|v| {
+ v["cves"]
+ .as_array()
+ .map_or(false, |cves| cves.iter().any(|c| c == "CVE-2021-44906"))
+ });
+ assert!(has_cve, "vulnerability list should include CVE-2021-44906");
+
+ // -- ROLLBACK: restore original file -----------------------------------
+ assert_run_ok(cwd, &["rollback"], "rollback");
+
+ assert_eq!(
+ git_sha256_file(&index_js),
+ BEFORE_HASH,
+ "index.js should match beforeHash after rollback"
+ );
+
+ // -- APPLY: re-apply from manifest ------------------------------------
+ assert_run_ok(cwd, &["apply"], "apply");
+
+ assert_eq!(
+ git_sha256_file(&index_js),
+ AFTER_HASH,
+ "index.js should match afterHash after re-apply"
+ );
+
+ // -- REMOVE: rollback + remove from manifest ---------------------------
+ assert_run_ok(cwd, &["remove", NPM_UUID], "remove");
+
+ // File should be back to original.
+ assert_eq!(
+ git_sha256_file(&index_js),
+ BEFORE_HASH,
+ "index.js should match beforeHash after remove"
+ );
+
+ // Manifest should have no patches left.
+ let manifest: serde_json::Value =
+ serde_json::from_str(&std::fs::read_to_string(&manifest_path).unwrap()).unwrap();
+ assert!(
+ manifest["patches"].as_object().unwrap().is_empty(),
+ "manifest should be empty after remove"
+ );
+}
+
+/// `apply --dry-run` should not modify files on disk.
+#[test]
+#[ignore]
+fn test_npm_dry_run() {
+ if !has_command("npm") {
+ eprintln!("SKIP: npm not found on PATH");
+ return;
+ }
+
+ let dir = tempfile::tempdir().unwrap();
+ let cwd = dir.path();
+
+ write_package_json(cwd);
+ npm_run(cwd, &["install", "minimist@1.2.2"]);
+
+ let index_js = cwd.join("node_modules/minimist/index.js");
+ assert_eq!(git_sha256_file(&index_js), BEFORE_HASH);
+
+ // Download the patch *without* applying.
+ assert_run_ok(cwd, &["get", NPM_UUID, "--no-apply"], "get --no-apply");
+
+ // File should still be original.
+ assert_eq!(
+ git_sha256_file(&index_js),
+ BEFORE_HASH,
+ "file should not change after get --no-apply"
+ );
+
+ // Dry-run should succeed but leave file untouched.
+ assert_run_ok(cwd, &["apply", "--dry-run"], "apply --dry-run");
+
+ assert_eq!(
+ git_sha256_file(&index_js),
+ BEFORE_HASH,
+ "file should not change after apply --dry-run"
+ );
+
+ // Real apply should work.
+ assert_run_ok(cwd, &["apply"], "apply");
+
+ assert_eq!(
+ git_sha256_file(&index_js),
+ AFTER_HASH,
+ "file should match afterHash after real apply"
+ );
+}
diff --git a/crates/socket-patch-cli/tests/e2e_pypi.rs b/crates/socket-patch-cli/tests/e2e_pypi.rs
new file mode 100644
index 0000000..7756db3
--- /dev/null
+++ b/crates/socket-patch-cli/tests/e2e_pypi.rs
@@ -0,0 +1,383 @@
+//! End-to-end tests for the PyPI patch lifecycle.
+//!
+//! These tests exercise the full CLI against the real Socket API, using the
+//! **pydantic-ai@0.0.36** patch (UUID `725a5343-52ec-4290-b7ce-e1cec55878e1`),
+//! which fixes CVE-2026-25580 (SSRF in URL Download Handling).
+//!
+//! # Prerequisites
+//! - `python3` on PATH (with `venv` and `pip` modules)
+//! - Network access to `patches-api.socket.dev` and `pypi.org`
+//!
+//! # Running
+//! ```sh
+//! cargo test -p socket-patch-cli --test e2e_pypi -- --ignored
+//! ```
+
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+
+use sha2::{Digest, Sha256};
+
+// ---------------------------------------------------------------------------
+// Constants
+// ---------------------------------------------------------------------------
+
+const PYPI_UUID: &str = "725a5343-52ec-4290-b7ce-e1cec55878e1";
+const PYPI_PURL_PREFIX: &str = "pkg:pypi/pydantic-ai@0.0.36";
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+fn binary() -> PathBuf {
+ env!("CARGO_BIN_EXE_socket-patch").into()
+}
+
+fn has_python3() -> bool {
+ Command::new("python3")
+ .arg("--version")
+ .stdout(std::process::Stdio::null())
+ .stderr(std::process::Stdio::null())
+ .status()
+ .map(|s| s.success())
+ .unwrap_or(false)
+}
+
+/// Compute Git SHA-256: `SHA256("blob \0" ++ content)`.
+fn git_sha256(content: &[u8]) -> String {
+ let header = format!("blob {}\0", content.len());
+ let mut hasher = Sha256::new();
+ hasher.update(header.as_bytes());
+ hasher.update(content);
+ hex::encode(hasher.finalize())
+}
+
+fn git_sha256_file(path: &Path) -> String {
+ let content = std::fs::read(path).unwrap_or_else(|e| panic!("read {}: {e}", path.display()));
+ git_sha256(&content)
+}
+
+/// Run the CLI binary with the given args, setting `cwd` as the working dir.
+fn run(cwd: &Path, args: &[&str]) -> (i32, String, String) {
+ let out: Output = Command::new(binary())
+ .args(args)
+ .current_dir(cwd)
+ .env_remove("SOCKET_API_TOKEN") // force public proxy (free-tier)
+ .output()
+ .expect("failed to execute socket-patch binary");
+
+ let code = out.status.code().unwrap_or(-1);
+ let stdout = String::from_utf8_lossy(&out.stdout).to_string();
+ let stderr = String::from_utf8_lossy(&out.stderr).to_string();
+ (code, stdout, stderr)
+}
+
+fn assert_run_ok(cwd: &Path, args: &[&str], context: &str) -> (String, String) {
+ let (code, stdout, stderr) = run(cwd, args);
+ assert_eq!(
+ code, 0,
+ "{context} failed (exit {code}).\nstdout:\n{stdout}\nstderr:\n{stderr}"
+ );
+ (stdout, stderr)
+}
+
+/// Find the `site-packages` directory inside a venv.
+///
+/// On Unix: `.venv/lib/python3.X/site-packages`
+/// On Windows: `.venv/Lib/site-packages`
+fn find_site_packages(cwd: &Path) -> PathBuf {
+ let venv = cwd.join(".venv");
+ if cfg!(windows) {
+ let sp = venv.join("Lib").join("site-packages");
+ assert!(sp.exists(), "site-packages not found at {}", sp.display());
+ return sp;
+ }
+ // Unix: glob for python3.* directory
+ let lib = venv.join("lib");
+ for entry in std::fs::read_dir(&lib).expect("read .venv/lib") {
+ let entry = entry.unwrap();
+ let name = entry.file_name();
+ let name = name.to_string_lossy();
+ if name.starts_with("python3.") {
+ let sp = entry.path().join("site-packages");
+ if sp.exists() {
+ return sp;
+ }
+ }
+ }
+ panic!("site-packages not found under {}", lib.display());
+}
+
+/// Create a venv and install pydantic-ai (without transitive deps for speed).
+fn setup_venv(cwd: &Path) {
+ let status = Command::new("python3")
+ .args(["-m", "venv", ".venv"])
+ .current_dir(cwd)
+ .status()
+ .expect("failed to create venv");
+ assert!(status.success(), "python3 -m venv failed");
+
+ let pip = if cfg!(windows) {
+ cwd.join(".venv/Scripts/pip")
+ } else {
+ cwd.join(".venv/bin/pip")
+ };
+
+ // Install both the meta-package (for dist-info that matches the PURL)
+ // and the slim package (for the actual Python source files).
+ // --no-deps keeps the install fast by skipping transitive dependencies.
+ let out = Command::new(&pip)
+ .args([
+ "install",
+ "--no-deps",
+ "--disable-pip-version-check",
+ "pydantic-ai==0.0.36",
+ "pydantic-ai-slim==0.0.36",
+ ])
+ .current_dir(cwd)
+ .output()
+ .expect("failed to run pip install");
+ assert!(
+ out.status.success(),
+ "pip install failed.\nstdout:\n{}\nstderr:\n{}",
+ String::from_utf8_lossy(&out.stdout),
+ String::from_utf8_lossy(&out.stderr),
+ );
+}
+
+/// Read the manifest and return the files map for the pydantic-ai patch.
+/// Returns `(purl, files)` where files is `{ relative_path: { beforeHash, afterHash } }`.
+fn read_patch_files(manifest_path: &Path) -> (String, serde_json::Value) {
+ let manifest: serde_json::Value =
+ serde_json::from_str(&std::fs::read_to_string(manifest_path).unwrap()).unwrap();
+
+ let patches = manifest["patches"].as_object().expect("patches object");
+ let (purl, patch) = patches
+ .iter()
+ .find(|(k, _)| k.starts_with(PYPI_PURL_PREFIX))
+ .unwrap_or_else(|| panic!("no patch matching {PYPI_PURL_PREFIX} in manifest"));
+
+ (purl.clone(), patch["files"].clone())
+}
+
+// ---------------------------------------------------------------------------
+// Tests
+// ---------------------------------------------------------------------------
+
+/// Full lifecycle: get → verify hashes → list → rollback → apply → remove.
+#[test]
+#[ignore]
+fn test_pypi_full_lifecycle() {
+ if !has_python3() {
+ eprintln!("SKIP: python3 not found on PATH");
+ return;
+ }
+
+ let dir = tempfile::tempdir().unwrap();
+ let cwd = dir.path();
+
+ // -- Setup: create venv and install pydantic-ai@0.0.36 ----------------
+ setup_venv(cwd);
+
+ let site_packages = find_site_packages(cwd);
+ assert!(
+ site_packages.join("pydantic_ai").exists(),
+ "pydantic_ai package should be installed in site-packages"
+ );
+
+ // Record original hashes of all files that will be patched.
+ // We'll compare against these after rollback.
+ let files_to_check = [
+ "pydantic_ai/messages.py",
+ "pydantic_ai/models/__init__.py",
+ "pydantic_ai/models/anthropic.py",
+ "pydantic_ai/models/gemini.py",
+ "pydantic_ai/models/openai.py",
+ ];
+ let original_hashes: Vec<(String, String)> = files_to_check
+ .iter()
+ .map(|f| {
+ let path = site_packages.join(f);
+ let hash = if path.exists() {
+ git_sha256_file(&path)
+ } else {
+ String::new() // file doesn't exist yet (e.g., _ssrf.py)
+ };
+ (f.to_string(), hash)
+ })
+ .collect();
+
+ // -- GET: download + apply patch ---------------------------------------
+ assert_run_ok(cwd, &["get", PYPI_UUID], "get");
+
+ let manifest_path = cwd.join(".socket/manifest.json");
+ assert!(manifest_path.exists(), ".socket/manifest.json should exist after get");
+
+ // Parse the manifest to get file hashes from the API.
+ let (purl, files_value) = read_patch_files(&manifest_path);
+ assert!(
+ purl.starts_with(PYPI_PURL_PREFIX),
+ "purl should start with {PYPI_PURL_PREFIX}, got {purl}"
+ );
+
+ let files = files_value.as_object().expect("files should be an object");
+ assert!(!files.is_empty(), "patch should modify at least one file");
+
+ // Verify every file's hash matches the afterHash from the manifest.
+ for (rel_path, info) in files {
+ let after_hash = info["afterHash"]
+ .as_str()
+ .expect("afterHash should be a string");
+ let full_path = site_packages.join(rel_path);
+ assert!(
+ full_path.exists(),
+ "patched file should exist: {}",
+ full_path.display()
+ );
+ assert_eq!(
+ git_sha256_file(&full_path),
+ after_hash,
+ "hash mismatch for {rel_path} after get"
+ );
+ }
+
+ // -- LIST: verify JSON output ------------------------------------------
+ let (stdout, _) = assert_run_ok(cwd, &["list", "--json"], "list --json");
+ let list: serde_json::Value = serde_json::from_str(&stdout).unwrap();
+ let patches = list["patches"].as_array().expect("patches array");
+ assert_eq!(patches.len(), 1, "should have exactly one patch");
+ assert_eq!(patches[0]["uuid"].as_str().unwrap(), PYPI_UUID);
+
+ // Verify vulnerability
+ let vulns = patches[0]["vulnerabilities"]
+ .as_array()
+ .expect("vulnerabilities array");
+ assert!(!vulns.is_empty(), "should have vulnerability info");
+ let has_cve = vulns.iter().any(|v| {
+ v["cves"]
+ .as_array()
+ .map_or(false, |cves| cves.iter().any(|c| c == "CVE-2026-25580"))
+ });
+ assert!(has_cve, "vulnerability list should include CVE-2026-25580");
+
+ // -- ROLLBACK: restore original files ----------------------------------
+ assert_run_ok(cwd, &["rollback"], "rollback");
+
+ // Verify files are restored to their original state.
+ for (rel_path, info) in files {
+ let before_hash = info["beforeHash"].as_str().unwrap_or("");
+ let full_path = site_packages.join(rel_path);
+
+ if before_hash.is_empty() {
+ // New file — should be deleted after rollback.
+ assert!(
+ !full_path.exists(),
+ "new file {rel_path} should be removed after rollback"
+ );
+ } else {
+ // Existing file — hash should match beforeHash.
+ assert_eq!(
+ git_sha256_file(&full_path),
+ before_hash,
+ "{rel_path} should match beforeHash after rollback"
+ );
+ }
+ }
+
+ // Also verify against our originally recorded hashes.
+ for (rel_path, orig_hash) in &original_hashes {
+ if orig_hash.is_empty() {
+ continue; // file didn't exist before
+ }
+ let full_path = site_packages.join(rel_path);
+ if full_path.exists() {
+ assert_eq!(
+ git_sha256_file(&full_path),
+ *orig_hash,
+ "{rel_path} should match original hash after rollback"
+ );
+ }
+ }
+
+ // -- APPLY: re-apply from manifest ------------------------------------
+ assert_run_ok(cwd, &["apply"], "apply");
+
+ for (rel_path, info) in files {
+ let after_hash = info["afterHash"]
+ .as_str()
+ .expect("afterHash should be a string");
+ let full_path = site_packages.join(rel_path);
+ assert_eq!(
+ git_sha256_file(&full_path),
+ after_hash,
+ "{rel_path} should match afterHash after re-apply"
+ );
+ }
+
+ // -- REMOVE: rollback + remove from manifest ---------------------------
+ assert_run_ok(cwd, &["remove", PYPI_UUID], "remove");
+
+ // Manifest should be empty.
+ let manifest: serde_json::Value =
+ serde_json::from_str(&std::fs::read_to_string(&manifest_path).unwrap()).unwrap();
+ assert!(
+ manifest["patches"].as_object().unwrap().is_empty(),
+ "manifest should be empty after remove"
+ );
+}
+
+/// `apply --dry-run` should not modify files on disk.
+#[test]
+#[ignore]
+fn test_pypi_dry_run() {
+ if !has_python3() {
+ eprintln!("SKIP: python3 not found on PATH");
+ return;
+ }
+
+ let dir = tempfile::tempdir().unwrap();
+ let cwd = dir.path();
+
+ setup_venv(cwd);
+
+ let site_packages = find_site_packages(cwd);
+
+ // Record original hashes.
+ let messages_py = site_packages.join("pydantic_ai/messages.py");
+ assert!(messages_py.exists());
+ let original_hash = git_sha256_file(&messages_py);
+
+ // Download without applying.
+ assert_run_ok(cwd, &["get", PYPI_UUID, "--no-apply"], "get --no-apply");
+
+ // File should be unchanged.
+ assert_eq!(
+ git_sha256_file(&messages_py),
+ original_hash,
+ "file should not change after get --no-apply"
+ );
+
+ // Dry-run should leave file untouched.
+ assert_run_ok(cwd, &["apply", "--dry-run"], "apply --dry-run");
+ assert_eq!(
+ git_sha256_file(&messages_py),
+ original_hash,
+ "file should not change after apply --dry-run"
+ );
+
+ // Real apply should work.
+ assert_run_ok(cwd, &["apply"], "apply");
+
+ // Read afterHash from manifest to verify.
+ let manifest_path = cwd.join(".socket/manifest.json");
+ let (_, files_value) = read_patch_files(&manifest_path);
+ let files = files_value.as_object().unwrap();
+ let after_hash = files["pydantic_ai/messages.py"]["afterHash"]
+ .as_str()
+ .unwrap();
+ assert_eq!(
+ git_sha256_file(&messages_py),
+ after_hash,
+ "file should match afterHash after real apply"
+ );
+}
diff --git a/crates/socket-patch-core/Cargo.toml b/crates/socket-patch-core/Cargo.toml
new file mode 100644
index 0000000..7930beb
--- /dev/null
+++ b/crates/socket-patch-core/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "socket-patch-core"
+description = "Core library for socket-patch: manifest, hash, crawlers, patch engine, API client"
+version.workspace = true
+edition.workspace = true
+license.workspace = true
+repository.workspace = true
+
+[dependencies]
+serde = { workspace = true }
+serde_json = { workspace = true }
+sha2 = { workspace = true }
+hex = { workspace = true }
+reqwest = { workspace = true }
+tokio = { workspace = true }
+thiserror = { workspace = true }
+walkdir = { workspace = true }
+uuid = { workspace = true }
+regex = { workspace = true }
+once_cell = { workspace = true }
+
+[dev-dependencies]
+tempfile = { workspace = true }
+tokio = { version = "1", features = ["full", "test-util"] }
diff --git a/crates/socket-patch-core/src/api/blob_fetcher.rs b/crates/socket-patch-core/src/api/blob_fetcher.rs
new file mode 100644
index 0000000..8496e9e
--- /dev/null
+++ b/crates/socket-patch-core/src/api/blob_fetcher.rs
@@ -0,0 +1,533 @@
+use std::collections::HashSet;
+use std::path::{Path, PathBuf};
+
+use crate::api::client::ApiClient;
+use crate::manifest::operations::get_after_hash_blobs;
+use crate::manifest::schema::PatchManifest;
+
+/// Result of fetching a single blob.
+#[derive(Debug, Clone)]
+pub struct BlobFetchResult {
+ pub hash: String,
+ pub success: bool,
+ pub error: Option,
+}
+
+/// Aggregate result of a blob-fetch operation.
+#[derive(Debug, Clone)]
+pub struct FetchMissingBlobsResult {
+ pub total: usize,
+ pub downloaded: usize,
+ pub failed: usize,
+ pub skipped: usize,
+ pub results: Vec,
+}
+
+/// Progress callback signature.
+///
+/// Called with `(hash, one_based_index, total)` for each blob.
+pub type OnProgress = Box;
+
+// ── Public API ────────────────────────────────────────────────────────
+
+/// Determine which `afterHash` blobs referenced in the manifest are
+/// missing from disk.
+///
+/// Only checks `afterHash` blobs because those are the patched file
+/// contents needed for applying patches. `beforeHash` blobs are
+/// downloaded on-demand during rollback.
+pub async fn get_missing_blobs(
+ manifest: &PatchManifest,
+ blobs_path: &Path,
+) -> HashSet {
+ let after_hash_blobs = get_after_hash_blobs(manifest);
+ let mut missing = HashSet::new();
+
+ for hash in after_hash_blobs {
+ let blob_path = blobs_path.join(&hash);
+ if tokio::fs::metadata(&blob_path).await.is_err() {
+ missing.insert(hash);
+ }
+ }
+
+ missing
+}
+
+/// Download all missing `afterHash` blobs referenced in the manifest.
+///
+/// Creates the `blobs_path` directory if it does not exist.
+///
+/// # Arguments
+///
+/// * `manifest` – Patch manifest whose `afterHash` blobs to check.
+/// * `blobs_path` – Directory where blob files are stored (one file per
+/// hash).
+/// * `client` – [`ApiClient`] used to fetch blobs from the server.
+/// * `on_progress` – Optional callback invoked before each download with
+/// `(hash, 1-based index, total)`.
+pub async fn fetch_missing_blobs(
+ manifest: &PatchManifest,
+ blobs_path: &Path,
+ client: &ApiClient,
+ on_progress: Option<&OnProgress>,
+) -> FetchMissingBlobsResult {
+ let missing = get_missing_blobs(manifest, blobs_path).await;
+
+ if missing.is_empty() {
+ return FetchMissingBlobsResult {
+ total: 0,
+ downloaded: 0,
+ failed: 0,
+ skipped: 0,
+ results: Vec::new(),
+ };
+ }
+
+ // Ensure blobs directory exists
+ if let Err(e) = tokio::fs::create_dir_all(blobs_path).await {
+ // If we cannot create the directory, every blob will fail.
+ let results: Vec = missing
+ .iter()
+ .map(|h| BlobFetchResult {
+ hash: h.clone(),
+ success: false,
+ error: Some(format!("Cannot create blobs directory: {}", e)),
+ })
+ .collect();
+ let failed = results.len();
+ return FetchMissingBlobsResult {
+ total: failed,
+ downloaded: 0,
+ failed,
+ skipped: 0,
+ results,
+ };
+ }
+
+ let hashes: Vec = missing.into_iter().collect();
+ download_hashes(&hashes, blobs_path, client, on_progress).await
+}
+
+/// Download specific blobs identified by their hashes.
+///
+/// Useful for fetching `beforeHash` blobs during rollback, where only a
+/// subset of hashes is required.
+///
+/// Blobs that already exist on disk are skipped (counted in `skipped`).
+pub async fn fetch_blobs_by_hash(
+ hashes: &HashSet,
+ blobs_path: &Path,
+ client: &ApiClient,
+ on_progress: Option<&OnProgress>,
+) -> FetchMissingBlobsResult {
+ if hashes.is_empty() {
+ return FetchMissingBlobsResult {
+ total: 0,
+ downloaded: 0,
+ failed: 0,
+ skipped: 0,
+ results: Vec::new(),
+ };
+ }
+
+ // Ensure blobs directory exists
+ if let Err(e) = tokio::fs::create_dir_all(blobs_path).await {
+ let results: Vec = hashes
+ .iter()
+ .map(|h| BlobFetchResult {
+ hash: h.clone(),
+ success: false,
+ error: Some(format!("Cannot create blobs directory: {}", e)),
+ })
+ .collect();
+ let failed = results.len();
+ return FetchMissingBlobsResult {
+ total: failed,
+ downloaded: 0,
+ failed,
+ skipped: 0,
+ results,
+ };
+ }
+
+ // Filter out hashes that already exist on disk
+ let mut to_download: Vec = Vec::new();
+ let mut skipped: usize = 0;
+ let mut results: Vec = Vec::new();
+
+ for hash in hashes {
+ let blob_path = blobs_path.join(hash);
+ if tokio::fs::metadata(&blob_path).await.is_ok() {
+ skipped += 1;
+ results.push(BlobFetchResult {
+ hash: hash.clone(),
+ success: true,
+ error: None,
+ });
+ } else {
+ to_download.push(hash.clone());
+ }
+ }
+
+ if to_download.is_empty() {
+ return FetchMissingBlobsResult {
+ total: hashes.len(),
+ downloaded: 0,
+ failed: 0,
+ skipped,
+ results,
+ };
+ }
+
+ let download_result =
+ download_hashes(&to_download, blobs_path, client, on_progress).await;
+
+ FetchMissingBlobsResult {
+ total: hashes.len(),
+ downloaded: download_result.downloaded,
+ failed: download_result.failed,
+ skipped,
+ results: {
+ let mut combined = results;
+ combined.extend(download_result.results);
+ combined
+ },
+ }
+}
+
+/// Format a [`FetchMissingBlobsResult`] as a human-readable string.
+pub fn format_fetch_result(result: &FetchMissingBlobsResult) -> String {
+ if result.total == 0 {
+ return "All blobs are present locally.".to_string();
+ }
+
+ let mut lines: Vec = Vec::new();
+
+ if result.downloaded > 0 {
+ lines.push(format!("Downloaded {} blob(s)", result.downloaded));
+ }
+
+ if result.failed > 0 {
+ lines.push(format!("Failed to download {} blob(s)", result.failed));
+
+ let failed_results: Vec<&BlobFetchResult> =
+ result.results.iter().filter(|r| !r.success).collect();
+
+ for r in failed_results.iter().take(5) {
+ let short_hash = if r.hash.len() >= 12 {
+ &r.hash[..12]
+ } else {
+ &r.hash
+ };
+ let err = r.error.as_deref().unwrap_or("unknown error");
+ lines.push(format!(" - {}...: {}", short_hash, err));
+ }
+
+ if failed_results.len() > 5 {
+ lines.push(format!(" ... and {} more", failed_results.len() - 5));
+ }
+ }
+
+ lines.join("\n")
+}
+
+// ── Internal helpers ──────────────────────────────────────────────────
+
+/// Download a list of blob hashes sequentially, writing each to
+/// `blobs_path/`.
+async fn download_hashes(
+ hashes: &[String],
+ blobs_path: &Path,
+ client: &ApiClient,
+ on_progress: Option<&OnProgress>,
+) -> FetchMissingBlobsResult {
+ let total = hashes.len();
+ let mut downloaded: usize = 0;
+ let mut failed: usize = 0;
+ let mut results: Vec = Vec::with_capacity(total);
+
+ for (i, hash) in hashes.iter().enumerate() {
+ if let Some(ref cb) = on_progress {
+ cb(hash, i + 1, total);
+ }
+
+ match client.fetch_blob(hash).await {
+ Ok(Some(data)) => {
+ let blob_path: PathBuf = blobs_path.join(hash);
+ match tokio::fs::write(&blob_path, &data).await {
+ Ok(()) => {
+ results.push(BlobFetchResult {
+ hash: hash.clone(),
+ success: true,
+ error: None,
+ });
+ downloaded += 1;
+ }
+ Err(e) => {
+ results.push(BlobFetchResult {
+ hash: hash.clone(),
+ success: false,
+ error: Some(format!("Failed to write blob to disk: {}", e)),
+ });
+ failed += 1;
+ }
+ }
+ }
+ Ok(None) => {
+ results.push(BlobFetchResult {
+ hash: hash.clone(),
+ success: false,
+ error: Some("Blob not found on server".to_string()),
+ });
+ failed += 1;
+ }
+ Err(e) => {
+ results.push(BlobFetchResult {
+ hash: hash.clone(),
+ success: false,
+ error: Some(e.to_string()),
+ });
+ failed += 1;
+ }
+ }
+ }
+
+ FetchMissingBlobsResult {
+ total,
+ downloaded,
+ failed,
+ skipped: 0,
+ results,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::manifest::schema::{PatchFileInfo, PatchManifest, PatchRecord};
+ use std::collections::HashMap;
+
+ fn make_manifest_with_hashes(after_hashes: &[&str]) -> PatchManifest {
+ let mut files = HashMap::new();
+ for (i, ah) in after_hashes.iter().enumerate() {
+ files.insert(
+ format!("package/file{}.js", i),
+ PatchFileInfo {
+ before_hash: format!(
+ "before{}{}",
+ "0".repeat(58),
+ format!("{:06}", i)
+ ),
+ after_hash: ah.to_string(),
+ },
+ );
+ }
+
+ let mut patches = HashMap::new();
+ patches.insert(
+ "pkg:npm/test@1.0.0".to_string(),
+ PatchRecord {
+ uuid: "test-uuid".to_string(),
+ exported_at: "2024-01-01T00:00:00Z".to_string(),
+ files,
+ vulnerabilities: HashMap::new(),
+ description: "test".to_string(),
+ license: "MIT".to_string(),
+ tier: "free".to_string(),
+ },
+ );
+
+ PatchManifest { patches }
+ }
+
+ #[tokio::test]
+ async fn test_get_missing_blobs_all_missing() {
+ let dir = tempfile::tempdir().unwrap();
+ let blobs_path = dir.path().join("blobs");
+ tokio::fs::create_dir_all(&blobs_path).await.unwrap();
+
+ let h1 = "a".repeat(64);
+ let h2 = "b".repeat(64);
+ let manifest = make_manifest_with_hashes(&[&h1, &h2]);
+
+ let missing = get_missing_blobs(&manifest, &blobs_path).await;
+ assert_eq!(missing.len(), 2);
+ assert!(missing.contains(&h1));
+ assert!(missing.contains(&h2));
+ }
+
+ #[tokio::test]
+ async fn test_get_missing_blobs_some_present() {
+ let dir = tempfile::tempdir().unwrap();
+ let blobs_path = dir.path().join("blobs");
+ tokio::fs::create_dir_all(&blobs_path).await.unwrap();
+
+ let h1 = "a".repeat(64);
+ let h2 = "b".repeat(64);
+
+ // Write h1 to disk so it is NOT missing
+ tokio::fs::write(blobs_path.join(&h1), b"data").await.unwrap();
+
+ let manifest = make_manifest_with_hashes(&[&h1, &h2]);
+ let missing = get_missing_blobs(&manifest, &blobs_path).await;
+ assert_eq!(missing.len(), 1);
+ assert!(missing.contains(&h2));
+ assert!(!missing.contains(&h1));
+ }
+
+ #[tokio::test]
+ async fn test_get_missing_blobs_empty_manifest() {
+ let dir = tempfile::tempdir().unwrap();
+ let blobs_path = dir.path().join("blobs");
+ tokio::fs::create_dir_all(&blobs_path).await.unwrap();
+
+ let manifest = PatchManifest::new();
+ let missing = get_missing_blobs(&manifest, &blobs_path).await;
+ assert!(missing.is_empty());
+ }
+
+ #[test]
+ fn test_format_fetch_result_all_present() {
+ let result = FetchMissingBlobsResult {
+ total: 0,
+ downloaded: 0,
+ failed: 0,
+ skipped: 0,
+ results: Vec::new(),
+ };
+ assert_eq!(format_fetch_result(&result), "All blobs are present locally.");
+ }
+
+ #[test]
+ fn test_format_fetch_result_some_downloaded() {
+ let result = FetchMissingBlobsResult {
+ total: 3,
+ downloaded: 2,
+ failed: 1,
+ skipped: 0,
+ results: vec![
+ BlobFetchResult {
+ hash: "a".repeat(64),
+ success: true,
+ error: None,
+ },
+ BlobFetchResult {
+ hash: "b".repeat(64),
+ success: true,
+ error: None,
+ },
+ BlobFetchResult {
+ hash: "c".repeat(64),
+ success: false,
+ error: Some("Blob not found on server".to_string()),
+ },
+ ],
+ };
+ let output = format_fetch_result(&result);
+ assert!(output.contains("Downloaded 2 blob(s)"));
+ assert!(output.contains("Failed to download 1 blob(s)"));
+ assert!(output.contains("cccccccccccc..."));
+ assert!(output.contains("Blob not found on server"));
+ }
+
+ #[test]
+ fn test_format_fetch_result_truncates_at_5() {
+ let results: Vec = (0..8)
+ .map(|i| BlobFetchResult {
+ hash: format!("{:0>64}", i),
+ success: false,
+ error: Some(format!("error {}", i)),
+ })
+ .collect();
+
+ let result = FetchMissingBlobsResult {
+ total: 8,
+ downloaded: 0,
+ failed: 8,
+ skipped: 0,
+ results,
+ };
+ let output = format_fetch_result(&result);
+ assert!(output.contains("... and 3 more"));
+ }
+
+ // ── Group 8: format edge cases ───────────────────────────────────
+
+ #[test]
+ fn test_format_only_downloaded() {
+ let result = FetchMissingBlobsResult {
+ total: 3,
+ downloaded: 3,
+ failed: 0,
+ skipped: 0,
+ results: vec![
+ BlobFetchResult { hash: "a".repeat(64), success: true, error: None },
+ BlobFetchResult { hash: "b".repeat(64), success: true, error: None },
+ BlobFetchResult { hash: "c".repeat(64), success: true, error: None },
+ ],
+ };
+ let output = format_fetch_result(&result);
+ assert!(output.contains("Downloaded 3 blob(s)"));
+ assert!(!output.contains("Failed"));
+ }
+
+ #[test]
+ fn test_format_short_hash() {
+ let result = FetchMissingBlobsResult {
+ total: 1,
+ downloaded: 0,
+ failed: 1,
+ skipped: 0,
+ results: vec![BlobFetchResult {
+ hash: "abc".into(),
+ success: false,
+ error: Some("not found".into()),
+ }],
+ };
+ let output = format_fetch_result(&result);
+ // Hash is < 12 chars, should show full hash
+ assert!(output.contains("abc..."));
+ }
+
+ #[test]
+ fn test_format_error_none() {
+ let result = FetchMissingBlobsResult {
+ total: 1,
+ downloaded: 0,
+ failed: 1,
+ skipped: 0,
+ results: vec![BlobFetchResult {
+ hash: "d".repeat(64),
+ success: false,
+ error: None,
+ }],
+ };
+ let output = format_fetch_result(&result);
+ assert!(output.contains("unknown error"));
+ }
+
+ #[test]
+ fn test_format_only_failed() {
+ let result = FetchMissingBlobsResult {
+ total: 2,
+ downloaded: 0,
+ failed: 2,
+ skipped: 0,
+ results: vec![
+ BlobFetchResult {
+ hash: "a".repeat(64),
+ success: false,
+ error: Some("timeout".into()),
+ },
+ BlobFetchResult {
+ hash: "b".repeat(64),
+ success: false,
+ error: Some("timeout".into()),
+ },
+ ],
+ };
+ let output = format_fetch_result(&result);
+ assert!(!output.contains("Downloaded"));
+ assert!(output.contains("Failed to download 2 blob(s)"));
+ }
+}
diff --git a/crates/socket-patch-core/src/api/client.rs b/crates/socket-patch-core/src/api/client.rs
new file mode 100644
index 0000000..e1757e8
--- /dev/null
+++ b/crates/socket-patch-core/src/api/client.rs
@@ -0,0 +1,938 @@
+use std::collections::HashSet;
+
+use reqwest::header::{self, HeaderMap, HeaderValue};
+use reqwest::StatusCode;
+use serde::Serialize;
+
+use crate::api::types::*;
+use crate::constants::{
+ DEFAULT_PATCH_API_PROXY_URL, DEFAULT_SOCKET_API_URL, USER_AGENT as USER_AGENT_VALUE,
+};
+
+/// Check if debug mode is enabled via SOCKET_PATCH_DEBUG env.
+fn is_debug_enabled() -> bool {
+ match std::env::var("SOCKET_PATCH_DEBUG") {
+ Ok(val) => val == "1" || val == "true",
+ Err(_) => false,
+ }
+}
+
+/// Log debug messages when debug mode is enabled.
+fn debug_log(message: &str) {
+ if is_debug_enabled() {
+ eprintln!("[socket-patch debug] {}", message);
+ }
+}
+
+/// Severity order for sorting (most severe = lowest number).
+fn get_severity_order(severity: Option<&str>) -> u8 {
+ match severity.map(|s| s.to_lowercase()).as_deref() {
+ Some("critical") => 0,
+ Some("high") => 1,
+ Some("medium") => 2,
+ Some("low") => 3,
+ _ => 4,
+ }
+}
+
+/// Options for constructing an [`ApiClient`].
+#[derive(Debug, Clone)]
+pub struct ApiClientOptions {
+ pub api_url: String,
+ pub api_token: Option,
+ /// When true, the client will use the public patch API proxy
+ /// which only provides access to free patches without authentication.
+ pub use_public_proxy: bool,
+ /// Organization slug for authenticated API access.
+ /// Required when using authenticated API (not public proxy).
+ pub org_slug: Option,
+}
+
+/// HTTP client for the Socket Patch API.
+///
+/// Supports both the authenticated Socket API (`api.socket.dev`) and the
+/// public proxy (`patches-api.socket.dev`) which serves free patches
+/// without authentication.
+#[derive(Debug, Clone)]
+pub struct ApiClient {
+ client: reqwest::Client,
+ api_url: String,
+ api_token: Option,
+ use_public_proxy: bool,
+ org_slug: Option,
+}
+
+/// Body payload for the batch search POST endpoint.
+#[derive(Serialize)]
+struct BatchSearchBody {
+ components: Vec,
+}
+
+#[derive(Serialize)]
+struct BatchComponent {
+ purl: String,
+}
+
+impl ApiClient {
+ /// Create a new API client from the given options.
+ ///
+ /// Constructs a `reqwest::Client` with proper default headers
+ /// (User-Agent, Accept, and optionally Authorization).
+ pub fn new(options: ApiClientOptions) -> Self {
+ let api_url = options.api_url.trim_end_matches('/').to_string();
+
+ let mut default_headers = HeaderMap::new();
+ default_headers.insert(
+ header::USER_AGENT,
+ HeaderValue::from_static(USER_AGENT_VALUE),
+ );
+ default_headers.insert(
+ header::ACCEPT,
+ HeaderValue::from_static("application/json"),
+ );
+
+ if let Some(ref token) = options.api_token {
+ if let Ok(hv) = HeaderValue::from_str(&format!("Bearer {}", token)) {
+ default_headers.insert(header::AUTHORIZATION, hv);
+ }
+ }
+
+ let client = reqwest::Client::builder()
+ .default_headers(default_headers)
+ .build()
+ .expect("failed to build reqwest client");
+
+ Self {
+ client,
+ api_url,
+ api_token: options.api_token,
+ use_public_proxy: options.use_public_proxy,
+ org_slug: options.org_slug,
+ }
+ }
+
+ // ── Internal helpers ──────────────────────────────────────────────
+
+ /// Internal GET that deserialises JSON. Returns `Ok(None)` on 404.
+ async fn get_json(
+ &self,
+ path: &str,
+ ) -> Result