diff --git a/.github/workflows/update-dependencies-from-metadata.yml b/.github/workflows/update-dependencies-from-metadata.yml new file mode 100644 index 00000000..2d4dcba5 --- /dev/null +++ b/.github/workflows/update-dependencies-from-metadata.yml @@ -0,0 +1,378 @@ +name: Update Dependencies From Metadata (Retrieve, Metadata, Compile, Test, Create PR) + +on: + workflow_dispatch: + schedule: + - cron: '57 13 * * *' # daily at 13:57 UTC + +jobs: + retrieve: + name: Retrieve New Versions and Generate Metadata + runs-on: ubuntu-latest + outputs: + metadata-filepath: ${{ steps.retrieve.outputs.metadata-filepath }} + metadata-json: ${{ steps.retrieve.outputs.metadata-json }} + # from-source-metadata-filepath is the path to a file containing a subset + # of metadata-json entries for NON-compiled dependencies + from-source-metadata-filepath: ${{ steps.retrieve.outputs.from-source-metadata-filepath }} + # compilation-json is a subset of metadata-json entries which are missing + # a `checksum` and `uri` + compilation-json: ${{ steps.retrieve.outputs.compilation-json }} + id: ${{ steps.retrieve.outputs.id }} + length: ${{ steps.retrieve.outputs.length }} + compilation-length: ${{ steps.retrieve.outputs.compilation-length }} + steps: + - name: Check out code + uses: actions/checkout@v6 + + - name: Setup Go + uses: actions/setup-go@v6 + with: + # hashFiles returns empty string if file does not exist + go-version-file: ${{ hashFiles('dependency/retrieval/go.mod') != '' && 'dependency/retrieval/go.mod' || 'go.mod' }} + + - name: Run Retrieve + id: retrieve + working-directory: dependency + run: | + #!/usr/bin/env bash + set -euo pipefail + shopt -s inherit_errexit + + OUTPUT="/tmp/metadata.json" + + make retrieve \ + buildpackTomlPath="${{ github.workspace }}/buildpack.toml" \ + output="${OUTPUT}" + + id=$(jq -r .[0].id < "${OUTPUT}") + content=$(jq -r < "${OUTPUT}") + + length=$(echo $content | jq -r '. | length') + + compilation=$(echo $content | jq -r 'map(select(.checksum == null and .uri == null))'?) + complength=$(echo $compilation | jq -r '. | length') + echo $content | jq -r 'map(select(.checksum != null and .uri != null))'? > "/tmp/from-source-metadata.json" + echo "from-source-metadata-filepath=/tmp/from-source-metadata.json" >> "$GITHUB_OUTPUT" + + + delimiter="$(uuidgen)" + echo "metadata-filepath=${OUTPUT}" >> "$GITHUB_OUTPUT" + printf "metadata-json<<%s\n%s\n%s\n" "${delimiter}" "${content}" "${delimiter}" >> "$GITHUB_OUTPUT" # see https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + echo "id=$id" >> "$GITHUB_OUTPUT" + echo "length=$length" >> "$GITHUB_OUTPUT" + printf "compilation-json<<%s\n%s\n%s\n" "${delimiter}" "${compilation}" "${delimiter}" >> "$GITHUB_OUTPUT" # see https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + echo "compilation-length=$complength" >> "$GITHUB_OUTPUT" + + - name: Upload `${{ steps.retrieve.outputs.metadata-filepath }}` + uses: actions/upload-artifact@v6 + with: + name: metadata.json + path: ${{ steps.retrieve.outputs.metadata-filepath }} + + - name: Upload `${{ steps.retrieve.outputs.from-source-metadata-filepath }}` + uses: actions/upload-artifact@v6 + with: + name: from-source-metadata.json + path: ${{ steps.retrieve.outputs.from-source-metadata-filepath }} + + # Check if there is buildpack-provided compilation code and testing code + # Optional compilation code expected at: /dependency/actions/compile/ + # Optional testing code expected at: /dependency/test/ + get-compile-and-test: + name: Get Compilation and Testing Code + outputs: + should-compile: ${{ steps.compile-check.outputs.should-compile }} + should-test: ${{ steps.test-check.outputs.should-test }} + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v6 + + - name: Has Compilation Action? + id: compile-check + run: | + if test -d "dependency/actions/compile"; then + echo "Compilation action provided" + echo "should-compile=true" >> "$GITHUB_OUTPUT" + fi + + - name: Has Testing Action? + id: test-check + run: | + if test -d "dependency/test"; then + echo "Testing file provided" + echo "should-test=true" >> "$GITHUB_OUTPUT" + fi + + test: + name: Test Non-Compiled Dependency + needs: + - retrieve + - get-compile-and-test + strategy: + matrix: + includes: ${{ fromJSON(needs.retrieve.outputs.metadata-json) }} + # Run job step if BOTH: + # (1) needs.get-compile-and-test.outputs.should-test = TRUE -> if there is a dependency/test directory in the buildpack + # (2) needs.get-compile-and-test.outputs.should-compile = FALSE -> if there is NOT a dependency/actions/compile directory in the buildpack + # AND: + # (3) there is at least one new version to test + if: ${{ needs.retrieve.outputs.length > 0 && needs.get-compile-and-test.outputs.should-test == 'true' && needs.get-compile-and-test.outputs.should-compile == 'false' }} + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v6 + + - name: Make Temporary Artifact Directory + id: make-outputdir + run: echo "outputdir=$(mktemp -d)" >> "$GITHUB_OUTPUT" + + # Download the tarball for testing if: + # (1) dependency testing code is present in the buildpack directory + # (2) URI in metadata.json is available + - name: Download upstream tarball (if not compiled) + if: ${{ matrix.includes.uri != '' && needs.get-compile-and-test.outputs.should-test == 'true' }} + run: | + #!/usr/bin/env bash + set -euo pipefail + shopt -s inherit_errexit + + curl ${{ matrix.includes.uri }} \ + --fail-with-body \ + --show-error \ + --silent \ + --location \ + --output ${{ steps.make-outputdir.outputs.outputdir }}/dependency.tgz + + # Test the dependency tarball if: + # (1) dependency testing code is present in the buildpack directory + - name: Test Upstream Dependency + working-directory: dependency + if: ${{ needs.get-compile-and-test.outputs.should-test == 'true' }} + run: | + make test \ + version="${{ matrix.includes.version }}" \ + tarballPath="${{ steps.make-outputdir.outputs.outputdir }}/*.tgz" + compile: + name: Compile and Test Dependency + needs: + - retrieve + - get-compile-and-test + strategy: + matrix: + includes: ${{ fromJSON(needs.retrieve.outputs.compilation-json) }} + # Run job step if: + # (1) needs.get-compile-and-test.outputs.should-compile -> if there is a dependency/actions/compile directory in the buildpack + # (2) OR needs.get-compile-and-test.outputs.should-test -> if there is a dependency/test directory in the buildpack + # AND: + # (3) there is at least one version to compile/test + if: ${{ needs.retrieve.outputs.compilation-length > 0 && (needs.get-compile-and-test.outputs.should-compile == 'true' || needs.get-compile-and-test.outputs.should-test == 'true') }} + uses: ./.github/workflows/compile-dependency.yml + with: + version: "${{ matrix.includes.version }}" + target: "${{ matrix.includes.target }}" + os: "${{ matrix.includes.os }}" + arch: "${{ matrix.includes.arch }}" + shouldCompile: ${{ matrix.includes.checksum == '' && matrix.includes.uri == '' }} + shouldTest: ${{ matrix.includes.checksum == '' && matrix.includes.uri == '' && needs.get-compile-and-test.outputs.should-test == 'true' }} + uploadArtifactName: "${{ needs.retrieve.outputs.id }}-${{ matrix.includes.version }}-${{ matrix.includes.os != '' && matrix.includes.os || 'linux' }}-${{ matrix.includes.arch != '' && matrix.includes.arch || 'amd64' }}-${{ matrix.includes.target }}" + + # Add in the checksum and URI fields to the metadata if the dependency was compiled + update-metadata: + name: Update Metadata (if compiled) + needs: + - retrieve + - get-compile-and-test + - compile + strategy: + matrix: + includes: ${{ fromJSON(needs.retrieve.outputs.compilation-json) }} + if: ${{ needs.retrieve.outputs.compilation-length > 0 && needs.get-compile-and-test.outputs.should-compile == 'true' }} + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v6 + + - name: Download artifact files + uses: actions/download-artifact@v7 + with: + name: "${{ needs.retrieve.outputs.id }}-${{ matrix.includes.version }}-${{ matrix.includes.os != '' && matrix.includes.os || 'linux' }}-${{ matrix.includes.arch != '' && matrix.includes.arch || 'amd64' }}-${{ matrix.includes.target }}" + + - name: Get artifact file name + id: get-file-names + run: | + #!/usr/bin/env bash + set -euo pipefail + shopt -s inherit_errexit + + echo "artifact-file=$(basename ./*.tgz)" >> "$GITHUB_OUTPUT" + echo "checksum-file=$(basename ./*.tgz.checksum)" >> "$GITHUB_OUTPUT" + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v5 + with: + aws-access-key-id: ${{ secrets.AWS_S3_DEPENDENCIES_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_S3_DEPENDENCIES_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: Upload to S3 + id: upload + uses: paketo-buildpacks/github-config/actions/dependency/upload-to-s3@main + with: + bucket-name: "paketo-buildpacks" + dependency-name: ${{ needs.retrieve.outputs.id }} + artifact-path: ${{ steps.get-file-names.outputs.artifact-file }} + + - name: Get Checksum + id: get-checksum + run: echo "checksum=$(cat ${{ steps.get-file-names.outputs.checksum-file }})" >> "$GITHUB_OUTPUT" + + - name: Download metadata.json + uses: actions/download-artifact@v7 + with: + name: metadata.json + + # Create target/version specific metadata files + # Due to limitations with the upload action, we can no longer modify/upload the same metadata file + - name: Write dependency-specific metadata to new file + id: dependency-metadata + run: | + #!/usr/bin/env bash + set -euo pipefail + shopt -s inherit_errexit + + metadata_file_name="${{ matrix.includes.target }}-${{ matrix.includes.version }}-${{ matrix.includes.os != '' && matrix.includes.os || 'linux' }}-${{ matrix.includes.arch != '' && matrix.includes.arch || 'amd64' }}-metadata-file.json" + if [[ -z "${{ matrix.includes.os }}" && -z "${{ matrix.includes.arch }}" ]]; then + cat metadata.json | jq -r ['.[] | select( .version == "${{ matrix.includes.version }}" and .target == "${{ matrix.includes.target }}")'] > $metadata_file_name + else + echo "multi-arch buildpack with os and arch specified" + cat metadata.json | jq -r ['.[] | select( .version == "${{ matrix.includes.version }}" and .target == "${{ matrix.includes.target }}" and .os == "${{ matrix.includes.os }}" and .arch == "${{ matrix.includes.arch }}")'] > $metadata_file_name + fi + echo "file=$(echo $metadata_file_name)" >> "$GITHUB_OUTPUT" + + - name: Update `checksum` and `uri` in metadata for ${{ matrix.includes.target }} ${{ matrix.includes.version }} + if: ${{ matrix.includes.checksum == '' && matrix.includes.uri == '' }} + uses: paketo-buildpacks/github-config/actions/dependency/update-metadata-json@main + with: + version: ${{ matrix.includes.version }} + target: ${{ matrix.includes.target }} + checksum: ${{ steps.get-checksum.outputs.checksum }} + uri: ${{ steps.upload.outputs.dependency-uri }} + file: ${{ steps.dependency-metadata.outputs.file }} + os: ${{ matrix.includes.os }} + arch: ${{ matrix.includes.arch }} + + - name: Upload modified metadata + uses: actions/upload-artifact@v6 + with: + name: ${{ steps.dependency-metadata.outputs.file }} + path: ${{ steps.dependency-metadata.outputs.file }} + + assemble: + name: Update buildpack.toml + needs: + - retrieve + - test + - compile + - update-metadata + # Update buildpack.toml only if ALL of the following conditions are met: + # (1) Retrieval step has succeeded and has found at least 1 new version + # (2) Testing step has succeeded OR been skipped + # (3) Compilation/Testing step has succeeded OR been skipped + # (4) Update metadata step has succeeded OR been skipped + if: always() && needs.retrieve.result == 'success' && needs.retrieve.outputs.length > 0 && (needs.test.result == 'success' || needs.test.result == 'skipped') && (needs.compile.result == 'success' || needs.compile.result == 'skipped') && (needs.update-metadata.result == 'success' || needs.update-metadata.result == 'skipped') + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v6 + + - name: Checkout Branch + uses: paketo-buildpacks/github-config/actions/pull-request/checkout-branch@main + with: + branch: automation/dependencies/update-from-metadata + + - name: Make Temporary Artifact Directory + id: make-outputdir + run: echo "outputdir=$(mktemp -d)" >> "$GITHUB_OUTPUT" + + + # Metadata file for the non-compiled dependencies, if there are any + - name: Download metadata.json file + uses: actions/download-artifact@v7 + with: + path: "${{ steps.make-outputdir.outputs.outputdir }}/metadata-files" + pattern: "from-source-metadata.json" + merge-multiple: true + + # If we compiled the dependency, and updated the metadata: + # Download each metadata file, and combine them into one + - name: Download individual metadata-file.json file(s) + if: ${{ needs.update-metadata.result == 'success' }} + uses: actions/download-artifact@v7 + with: + path: "${{ steps.make-outputdir.outputs.outputdir }}/metadata-files" + pattern: "*metadata-file.json" + merge-multiple: true + - name: Display Metadata Files + run: ls "${{ steps.make-outputdir.outputs.outputdir }}/metadata-files" + - name: Combine Metadata Files + run: | + #!/usr/bin/env bash + set -euo pipefail + shopt -s inherit_errexit + + jq -s 'add' ${{ steps.make-outputdir.outputs.outputdir }}/metadata-files/* > "${{ steps.make-outputdir.outputs.outputdir }}/metadata.json" + + - name: Update dependencies from metadata.json + id: update + uses: paketo-buildpacks/github-config/actions/dependency/update-from-metadata@main + with: + buildpack_toml_path: "${{ github.workspace }}/buildpack.toml" + metadata_file_path: "${{ steps.make-outputdir.outputs.outputdir }}/metadata.json" + + - name: Show git diff + run: | + git diff + + - name: Commit + id: commit + uses: paketo-buildpacks/github-config/actions/pull-request/create-commit@main + with: + message: "Updating buildpack.toml with new versions ${{ steps.update.outputs.new-versions }}" + pathspec: "." + keyid: ${{ secrets.PAKETO_BOT_GPG_SIGNING_KEY_ID }} + key: ${{ secrets.PAKETO_BOT_GPG_SIGNING_KEY }} + + - name: Push Branch 'automation/dependencies/update-from-metadata' + if: ${{ steps.commit.outputs.commit_sha != '' }} + uses: paketo-buildpacks/github-config/actions/pull-request/push-branch@main + with: + branch: automation/dependencies/update-from-metadata + + - name: Open Pull Request + if: ${{ steps.commit.outputs.commit_sha != '' }} + uses: paketo-buildpacks/github-config/actions/pull-request/open@main + with: + token: ${{ secrets.PAKETO_BOT_GITHUB_TOKEN }} + title: "Updates buildpack.toml with ${{ steps.update.outputs.new-versions }}" + branch: automation/dependencies/update-from-metadata + + failure: + name: Alert on Failure + runs-on: ubuntu-24.04 + needs: [ retrieve, get-compile-and-test, test, compile, update-metadata, assemble ] + if: ${{ always() && needs.retrieve.result == 'failure' || needs.get-compile-and-test.result == 'failure' || needs.test.result == 'failure' || needs.compile.result == 'failure' || needs.update-metadata.result == 'failure' || needs.assemble.result == 'failure' }} + steps: + - name: File Failure Alert Issue + uses: paketo-buildpacks/github-config/actions/issue/file@main + with: + token: ${{ secrets.GITHUB_TOKEN }} + repo: ${{ github.repository }} + label: "failure:update-dependencies" + comment_if_exists: true + issue_title: "Failure: Update Dependencies workflow" + issue_body: | + Update Dependencies From Metadata workflow [failed](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}). + comment_body: | + Another failure occurred: https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} diff --git a/REUSE.toml b/REUSE.toml index 777b0336..d43e6bc3 100644 --- a/REUSE.toml +++ b/REUSE.toml @@ -12,7 +12,7 @@ SPDX-FileCopyrightText = "© 2025 Idiap Research Institute " SPDX-License-Identifier = "Apache-2.0" [[annotations]] -path = "scripts/.util/tools.json" +path = "scripts/**" precedence = "override" SPDX-FileCopyrightText = "Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved." SPDX-License-Identifier = "Apache-2.0" diff --git a/build.go b/build.go index 2d098374..5d3bc507 100644 --- a/build.go +++ b/build.go @@ -13,6 +13,7 @@ import ( pipinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pip" pipenvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pipenv" poetryinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/poetry" + uvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" pythonpackagers "github.com/paketo-buildpacks/python-packagers/pkg/packagers/common" ) @@ -107,6 +108,21 @@ func Build( } else { return packit.BuildResult{}, packit.Fail.WithMessage("missing plan for: %s", entry.Name) } + case uvinstall.UvEnvPlanEntry: + if parameters, ok := buildParameters[uvinstall.UvEnvPlanEntry]; ok { + uvResult, err := uvinstall.Build( + parameters.(uvinstall.UvBuildParameters), + commonBuildParameters, + )(context) + + if err != nil { + return packit.BuildResult{}, err + } + + layers = append(layers, uvResult.Layers...) + } else { + return packit.BuildResult{}, packit.Fail.WithMessage("missing plan for: %s", entry.Name) + } default: return packit.BuildResult{}, packit.Fail.WithMessage("unknown plan: %s", entry.Name) } diff --git a/build_test.go b/build_test.go index b838d6b0..d9fdd8a6 100644 --- a/build_test.go +++ b/build_test.go @@ -26,6 +26,8 @@ import ( pipenvfakes "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pipenv/fakes" poetryinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/poetry" poetryfakes "github.com/paketo-buildpacks/python-packagers/pkg/packagers/poetry/fakes" + uvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" + uvfakes "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv/fakes" "github.com/sclevine/spec" @@ -65,6 +67,9 @@ func testBuild(t *testing.T, context spec.G, it spec.S) { poetryInstallProcess *poetryfakes.InstallProcess poetryPythonPathProcess *poetryfakes.PythonPathLookupProcess + // uv + uvRunner *uvfakes.Runner + buildParameters pkgcommon.CommonBuildParameters plans []packit.BuildpackPlan @@ -105,6 +110,9 @@ func testBuild(t *testing.T, context spec.G, it spec.S) { poetryPythonPathProcess = &poetryfakes.PythonPathLookupProcess{} poetryPythonPathProcess.ExecuteCall.Returns.String = "some-python-path" + // uv + uvRunner = &uvfakes.Runner{} + buildParameters = pkgcommon.CommonBuildParameters{ SbomGenerator: pkgcommon.Generator{}, Clock: chronos.DefaultClock, @@ -129,6 +137,9 @@ func testBuild(t *testing.T, context spec.G, it spec.S) { InstallProcess: poetryInstallProcess, PythonPathLookupProcess: poetryPythonPathProcess, }, + uvinstall.UvEnvPlanEntry: uvinstall.UvBuildParameters{ + Runner: uvRunner, + }, } build = pythonpackagers.Build(logger, buildParameters, packagerParameters) @@ -162,6 +173,9 @@ func testBuild(t *testing.T, context spec.G, it spec.S) { { Name: poetryinstall.PoetryVenv, }, + { + Name: uvinstall.UvEnvPlanEntry, + }, }, }, packit.BuildpackPlan{ @@ -194,6 +208,13 @@ func testBuild(t *testing.T, context spec.G, it spec.S) { }, }, }, + packit.BuildpackPlan{ + Entries: []packit.BuildpackPlanEntry{ + { + Name: uvinstall.UvEnvPlanEntry, + }, + }, + }, } }) diff --git a/detect.go b/detect.go index 8127ecd8..15ee6339 100644 --- a/detect.go +++ b/detect.go @@ -13,6 +13,7 @@ import ( pipinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pip" pipenvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pipenv" poetryinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/poetry" + uvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" ) // Detect will return a packit.DetectFunc that will be invoked during the @@ -22,64 +23,69 @@ import ( // it will pass detection. func Detect(logger scribe.Emitter) packit.DetectFunc { return func(context packit.DetectContext) (packit.DetectResult, error) { - plans := []packit.BuildPlan{} - + logger.Title("Checking for pip") pipResult, err := pipinstall.Detect()(context) if err == nil { - plans = append(plans, pipResult.Plan) + // plans = append(plans, pipResult.Plan) + return packit.DetectResult{ + Plan: pipResult.Plan, + }, nil } else { logger.Detail("%s", err) } + logger.Title("Checking for conda") condaResult, err := conda.Detect()(context) if err == nil { - plans = append(plans, condaResult.Plan) + // plans = append(plans, condaResult.Plan) + return packit.DetectResult{ + Plan: condaResult.Plan, + }, nil } else { logger.Detail("%s", err) } + logger.Title("Checking for pipenv") pipenvResult, err := pipenvinstall.Detect( pipenvinstall.NewPipfileParser(), pipenvinstall.NewPipfileLockParser(), )(context) if err == nil { - plans = append(plans, pipenvResult.Plan) + // plans = append(plans, pipenvResult.Plan) + return packit.DetectResult{ + Plan: pipenvResult.Plan, + }, nil } else { logger.Detail("%s", err) } - poetryResult, err := poetryinstall.Detect()(context) + logger.Title("Checking for uv") + uvResult, err := uvinstall.Detect()(context) if err == nil { - plans = append(plans, poetryResult.Plan) + // plans = append(plans, uvResult.Plan) + return packit.DetectResult{ + Plan: uvResult.Plan, + }, nil } else { logger.Detail("%s", err) } - if len(plans) == 0 { - return packit.DetectResult{}, packit.Fail.WithMessage("No python packager manager related files found") - } - - return packit.DetectResult{ - Plan: or(plans...), - }, nil - } -} - -func or(plans ...packit.BuildPlan) packit.BuildPlan { - if len(plans) < 1 { - return packit.BuildPlan{} - } - combinedPlan := plans[0] + logger.Title("Checking for poetry") + poetryResult, err := poetryinstall.Detect()(context) - for i := range plans { - if i == 0 { - continue + if err == nil { + // plans = append(plans, poetryResult.Plan) + return packit.DetectResult{ + Plan: poetryResult.Plan, + }, nil + } else { + logger.Detail("%s", err) } - combinedPlan.Or = append(combinedPlan.Or, plans[i]) + + return packit.DetectResult{}, packit.Fail.WithMessage("No python packager manager related files found") } - return combinedPlan } diff --git a/detect_test.go b/detect_test.go index f11dd90a..b44218f9 100644 --- a/detect_test.go +++ b/detect_test.go @@ -18,6 +18,7 @@ import ( pip "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pip" pipenv "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pipenv" poetry "github.com/paketo-buildpacks/python-packagers/pkg/packagers/poetry" + uv "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" "github.com/sclevine/spec" @@ -232,6 +233,65 @@ func testDetect(t *testing.T, context spec.G, it spec.S) { }) }) + context("When only a uv.lock file is present", func() { + it.Before(func() { + Expect(os.RemoveAll(filepath.Join(workingDir, "x.py"))).To(Succeed()) + Expect(os.WriteFile(filepath.Join(workingDir, "uv.lock"), []byte{}, os.ModePerm)).To(Succeed()) + }) + + it("passes detection", func() { + result, err := detect(packit.DetectContext{ + WorkingDir: workingDir, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(result.Plan).To(Equal(packit.BuildPlan{ + Provides: []packit.BuildPlanProvision{ + { + Name: uv.UvEnvPlanEntry, + }, + }, + Requires: []packit.BuildPlanRequirement{ + { + Name: uv.UvPlanEntry, + Metadata: map[string]interface{}{ + "build": true, + }, + }, + }, + })) + }) + }) + + context("When a uv.lock and pyproject.toml file is present", func() { + it.Before(func() { + Expect(os.RemoveAll(filepath.Join(workingDir, "x.py"))).To(Succeed()) + Expect(os.WriteFile(filepath.Join(workingDir, "pyproject.toml"), []byte{}, os.ModePerm)).To(Succeed()) + Expect(os.WriteFile(filepath.Join(workingDir, "uv.lock"), []byte{}, os.ModePerm)).To(Succeed()) + }) + + it("passes detection", func() { + result, err := detect(packit.DetectContext{ + WorkingDir: workingDir, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(result.Plan).To(Equal(packit.BuildPlan{ + Provides: []packit.BuildPlanProvision{ + { + Name: uv.UvEnvPlanEntry, + }, + }, + Requires: []packit.BuildPlanRequirement{ + { + Name: uv.UvPlanEntry, + Metadata: map[string]interface{}{ + "build": true, + }, + }, + }, + })) + }) + }) + context("When no python related files are present", func() { it.Before(func() { Expect(os.RemoveAll(filepath.Join(workingDir, "x.py"))).To(Succeed()) diff --git a/integration.json b/integration.json index 047ba620..38f3fa3d 100644 --- a/integration.json +++ b/integration.json @@ -4,10 +4,7 @@ "index.docker.io/paketobuildpacks/builder-jammy-buildpackless-base:latest", "index.docker.io/paketobuildpacks/ubuntu-noble-builder-buildpackless:latest" ], - "miniconda": "index.docker.io/paketobuildpacks/miniconda", "cpython": "index.docker.io/paketobuildpacks/cpython", - "pip": "index.docker.io/paketobuildpacks/pip", - "pipenv": "index.docker.io/paketobuildpacks/pipenv", - "poetry": "index.docker.io/paketobuildpacks/poetry", + "python-installers": "github.com/idiap/python-installers", "build-plan": "index.docker.io/paketocommunity/build-plan" } diff --git a/integration/helpers.go b/integration/helpers.go index 4d0cb3a9..5067cc8e 100644 --- a/integration/helpers.go +++ b/integration/helpers.go @@ -27,25 +27,14 @@ type BuildpackInfo struct { type TestSettings struct { Buildpacks struct { // Dependency buildpacks - Miniconda struct { - Online string - Offline string - } CPython struct { Online string Offline string } - Pip struct { - Online string - Offline string - } - Pipenv struct { + PythonInstallers struct { Online string Offline string } - Poetry struct { - Online string - } BuildPlan struct { Online string } @@ -57,11 +46,8 @@ type TestSettings struct { } Config struct { - Miniconda string `json:"miniconda"` - CPython string `json:"cpython"` - Pip string `json:"pip"` - Pipenv string `json:"pipenv"` - Poetry string `json:"poetry"` - BuildPlan string `json:"build-plan"` + CPython string `json:"cpython"` + PythonInstallers string `json:"python-installers"` + BuildPlan string `json:"build-plan"` } } diff --git a/integration/packagers/conda_default_test.go b/integration/packagers/conda_default_test.go index 3146e020..4b4db1e2 100644 --- a/integration/packagers/conda_default_test.go +++ b/integration/packagers/conda_default_test.go @@ -62,7 +62,7 @@ func condaTestDefault(t *testing.T, context spec.G, it spec.S) { image, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -106,7 +106,7 @@ func condaTestDefault(t *testing.T, context spec.G, it spec.S) { image, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/conda_layer_reuse_test.go b/integration/packagers/conda_layer_reuse_test.go index cc530ac7..a0f42936 100644 --- a/integration/packagers/conda_layer_reuse_test.go +++ b/integration/packagers/conda_layer_reuse_test.go @@ -74,7 +74,7 @@ func condaTestLayerReuse(t *testing.T, context spec.G, it spec.S) { firstImage, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -98,7 +98,7 @@ func condaTestLayerReuse(t *testing.T, context spec.G, it spec.S) { secondImage, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -139,7 +139,7 @@ func condaTestLayerReuse(t *testing.T, context spec.G, it spec.S) { firstImage, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -163,7 +163,7 @@ func condaTestLayerReuse(t *testing.T, context spec.G, it spec.S) { secondImage, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/conda_lock_file_test.go b/integration/packagers/conda_lock_file_test.go index 406c4c37..3192bdb1 100644 --- a/integration/packagers/conda_lock_file_test.go +++ b/integration/packagers/conda_lock_file_test.go @@ -63,7 +63,7 @@ func condaTestLockFile(t *testing.T, context spec.G, it spec.S) { image, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/conda_logging_test.go b/integration/packagers/conda_logging_test.go index 5fd262a1..d02d59b7 100644 --- a/integration/packagers/conda_logging_test.go +++ b/integration/packagers/conda_logging_test.go @@ -60,7 +60,7 @@ func condaTestLogging(t *testing.T, context spec.G, it spec.S) { image, logs, err := pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -99,7 +99,7 @@ func condaTestLogging(t *testing.T, context spec.G, it spec.S) { image, logs, err := pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -132,7 +132,7 @@ func condaTestLogging(t *testing.T, context spec.G, it spec.S) { secondImage, logs, err := pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -158,7 +158,7 @@ func condaTestLogging(t *testing.T, context spec.G, it spec.S) { image, logs, err := pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/conda_offline_test.go b/integration/packagers/conda_offline_test.go index 2a0fdb4f..05747992 100644 --- a/integration/packagers/conda_offline_test.go +++ b/integration/packagers/conda_offline_test.go @@ -61,7 +61,7 @@ func condaTestOffline(t *testing.T, context spec.G, it spec.S) { image, logs, err = pack.WithNoColor().Build. WithPullPolicy("never"). WithBuildpacks( - settings.Buildpacks.Miniconda.Offline, + settings.Buildpacks.PythonInstallers.Offline, settings.Buildpacks.PythonPackagers.Offline, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/init_test.go b/integration/packagers/init_test.go index 4e1cebbf..20d4f7fd 100644 --- a/integration/packagers/init_test.go +++ b/integration/packagers/init_test.go @@ -60,15 +60,6 @@ func TestIntegration(t *testing.T) { Execute(settings.Config.BuildPlan) Expect(err).NotTo(HaveOccurred()) - settings.Buildpacks.Miniconda.Online, err = buildpackStore.Get. - Execute(settings.Config.Miniconda) - Expect(err).NotTo(HaveOccurred()) - - settings.Buildpacks.Miniconda.Offline, err = buildpackStore.Get. - WithOfflineDependencies(). - Execute(settings.Config.Miniconda) - Expect(err).NotTo(HaveOccurred()) - settings.Buildpacks.CPython.Online, err = buildpackStore.Get. Execute(settings.Config.CPython) Expect(err).NotTo(HaveOccurred()) @@ -78,26 +69,15 @@ func TestIntegration(t *testing.T) { Execute(settings.Config.CPython) Expect(err).NotTo(HaveOccurred()) - settings.Buildpacks.Pip.Online, err = buildpackStore.Get. - Execute(settings.Config.Pip) + settings.Buildpacks.PythonInstallers.Online, err = buildpackStore.Get. + WithVersion("0.1.0"). + Execute(settings.Config.PythonInstallers) Expect(err).NotTo(HaveOccurred()) - settings.Buildpacks.Pip.Offline, err = buildpackStore.Get. + settings.Buildpacks.PythonInstallers.Offline, err = buildpackStore.Get. + WithVersion("0.1.0"). WithOfflineDependencies(). - Execute(settings.Config.Pip) - Expect(err).NotTo(HaveOccurred()) - - settings.Buildpacks.Pipenv.Online, err = buildpackStore.Get. - Execute(settings.Config.Pipenv) - Expect(err).NotTo(HaveOccurred()) - - settings.Buildpacks.Pipenv.Offline, err = buildpackStore.Get. - WithOfflineDependencies(). - Execute(settings.Config.Pipenv) - Expect(err).NotTo(HaveOccurred()) - - settings.Buildpacks.Poetry.Online, err = buildpackStore.Get. - Execute(settings.Config.Poetry) + Execute(settings.Config.PythonInstallers) Expect(err).NotTo(HaveOccurred()) settings.Buildpacks.PythonPackagers.Online, err = buildpackStore.Get. @@ -138,5 +118,10 @@ func TestIntegration(t *testing.T) { // poetry suite("Poetry Default", poetryTestDefault) + // uv + suite("uv Default", uvTestDefault) + suite("uv Offline", uvTestOffline) + suite("uv Reused", uvTestReused) + suite.Run(t) } diff --git a/integration/packagers/pip_default_test.go b/integration/packagers/pip_default_test.go index 989c6e38..a01808de 100644 --- a/integration/packagers/pip_default_test.go +++ b/integration/packagers/pip_default_test.go @@ -64,7 +64,7 @@ func pipTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -115,7 +115,7 @@ func pipTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -168,7 +168,7 @@ func pipTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/pip_offline_test.go b/integration/packagers/pip_offline_test.go index 4c4cdc8b..c7099f28 100644 --- a/integration/packagers/pip_offline_test.go +++ b/integration/packagers/pip_offline_test.go @@ -64,7 +64,7 @@ func pipTestOffline(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Offline, - settings.Buildpacks.Pip.Offline, + settings.Buildpacks.PythonInstallers.Offline, settings.Buildpacks.PythonPackagers.Offline, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/pip_reused_test.go b/integration/packagers/pip_reused_test.go index 8e09800d..351863b3 100644 --- a/integration/packagers/pip_reused_test.go +++ b/integration/packagers/pip_reused_test.go @@ -63,7 +63,7 @@ func pipTestReused(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -75,7 +75,7 @@ func pipTestReused(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/pipenv_default_test.go b/integration/packagers/pipenv_default_test.go index b30a5fe1..f711f111 100644 --- a/integration/packagers/pipenv_default_test.go +++ b/integration/packagers/pipenv_default_test.go @@ -64,8 +64,7 @@ func pipenvTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, - settings.Buildpacks.Pipenv.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -138,8 +137,7 @@ func pipenvTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, - settings.Buildpacks.Pipenv.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -188,8 +186,7 @@ func pipenvTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, - settings.Buildpacks.Pipenv.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/pipenv_offline_test.go b/integration/packagers/pipenv_offline_test.go index 95726e5f..da0e98f5 100644 --- a/integration/packagers/pipenv_offline_test.go +++ b/integration/packagers/pipenv_offline_test.go @@ -69,8 +69,7 @@ func pipenvTestOffline(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Offline, - settings.Buildpacks.Pip.Offline, - settings.Buildpacks.Pipenv.Offline, + settings.Buildpacks.PythonInstallers.Offline, settings.Buildpacks.PythonPackagers.Offline, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/poetry_default_test.go b/integration/packagers/poetry_default_test.go index e56d918f..2c57822d 100644 --- a/integration/packagers/poetry_default_test.go +++ b/integration/packagers/poetry_default_test.go @@ -64,8 +64,7 @@ func poetryTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, - settings.Buildpacks.Poetry.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). @@ -128,8 +127,7 @@ func poetryTestDefault(t *testing.T, context spec.G, it spec.S) { WithPullPolicy("never"). WithBuildpacks( settings.Buildpacks.CPython.Online, - settings.Buildpacks.Pip.Online, - settings.Buildpacks.Poetry.Online, + settings.Buildpacks.PythonInstallers.Online, settings.Buildpacks.PythonPackagers.Online, settings.Buildpacks.BuildPlan.Online, ). diff --git a/integration/packagers/testdata/uv/REUSE.toml b/integration/packagers/testdata/uv/REUSE.toml new file mode 100644 index 00000000..10ed8e3f --- /dev/null +++ b/integration/packagers/testdata/uv/REUSE.toml @@ -0,0 +1,22 @@ +# SPDX-FileCopyrightText: 2025 Idiap Research Institute +# +# SPDX-License-Identifier: CC0-1.0 + +version = 1 + +[[annotations]] +path = [ + "default_app_vendored/vendor/**", +] +precedence = "override" +SPDX-FileCopyrightText = "Authors" +SPDX-License-Identifier = "LicenseRef-vendored" + +[[annotations]] +path = [ + "default_app/*", + "default_app_vendored/*", +] +precedence = "override" +SPDX-FileCopyrightText = "Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved." +SPDX-License-Identifier = "Apache-2.0" diff --git a/integration/packagers/testdata/uv/default_app/plan.toml b/integration/packagers/testdata/uv/default_app/plan.toml new file mode 100644 index 00000000..be7dc7d6 --- /dev/null +++ b/integration/packagers/testdata/uv/default_app/plan.toml @@ -0,0 +1,4 @@ +[[requires]] +name = "uv-environment" + [requires.metadata] + launch = true diff --git a/integration/packagers/testdata/uv/default_app/pyproject.toml b/integration/packagers/testdata/uv/default_app/pyproject.toml new file mode 100644 index 00000000..bdd49e16 --- /dev/null +++ b/integration/packagers/testdata/uv/default_app/pyproject.toml @@ -0,0 +1,8 @@ +[project] +name = "integration-test" +version = "0.0.0" +requires-python = "==3.10.*" +dependencies = [ + "Flask==3.0.0", + "gunicorn==20.1.0" +] diff --git a/integration/packagers/testdata/uv/default_app/server.py b/integration/packagers/testdata/uv/default_app/server.py new file mode 100644 index 00000000..9b009f7c --- /dev/null +++ b/integration/packagers/testdata/uv/default_app/server.py @@ -0,0 +1,19 @@ +import os +import sys + +from flask import Flask + +app = Flask(__name__) + + +@app.route('/') +def root(): + python_version = sys.version + return "Hello, world!\nUsing python: " + python_version + "\n" + + +if __name__ == '__main__': + # Get port from environment variable or choose 9099 as local default + port = int(os.getenv("PORT", 8080)) + # Run the app, listening on all IPs with our chosen port number + app.run(host='0.0.0.0', port=port, debug=True) diff --git a/integration/packagers/testdata/uv/default_app/uv.lock b/integration/packagers/testdata/uv/default_app/uv.lock new file mode 100644 index 00000000..b0cdc358 --- /dev/null +++ b/integration/packagers/testdata/uv/default_app/uv.lock @@ -0,0 +1,137 @@ +version = 1 +revision = 3 +requires-python = "==3.10.*" + +[[package]] +name = "blinker" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "flask" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "blinker" }, + { name = "click" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz", hash = "sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58", size = 674171, upload-time = "2023-09-30T14:36:12.918Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl", hash = "sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638", size = 99724, upload-time = "2023-09-30T14:36:10.961Z" }, +] + +[[package]] +name = "gunicorn" +version = "20.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/5b/0d1f0296485a6af03366604142ea8f19f0833894db3512a40ed07b2a56dd/gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8", size = 370601, upload-time = "2021-03-27T01:54:37.202Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/dd/5b190393e6066286773a67dfcc2f9492058e9b57c4867a95f1ba5caf0a83/gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e", size = 79531, upload-time = "2021-04-27T12:16:23.375Z" }, +] + +[[package]] +name = "integration-test" +version = "0.0.0" +source = { virtual = "." } +dependencies = [ + { name = "flask" }, + { name = "gunicorn" }, +] + +[package.metadata] +requires-dist = [ + { name = "flask", specifier = "==3.0.0" }, + { name = "gunicorn", specifier = "==20.1.0" }, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "werkzeug" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/70/1469ef1d3542ae7c2c7b72bd5e3a4e6ee69d7978fa8a3af05a38eca5becf/werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67", size = 864754, upload-time = "2026-01-08T17:49:23.247Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/e4/8d97cca767bcc1be76d16fb76951608305561c6e056811587f36cb1316a8/werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc", size = 225025, upload-time = "2026-01-08T17:49:21.859Z" }, +] diff --git a/integration/packagers/testdata/uv/default_app_vendored/plan.toml b/integration/packagers/testdata/uv/default_app_vendored/plan.toml new file mode 100644 index 00000000..57269f92 --- /dev/null +++ b/integration/packagers/testdata/uv/default_app_vendored/plan.toml @@ -0,0 +1,9 @@ +[[requires]] +name = "uv" + [requires.metadata] + launch = true + +[[requires]] +name = "uv-environment" + [requires.metadata] + launch = true diff --git a/integration/packagers/testdata/uv/default_app_vendored/pyproject.toml b/integration/packagers/testdata/uv/default_app_vendored/pyproject.toml new file mode 100644 index 00000000..905c7998 --- /dev/null +++ b/integration/packagers/testdata/uv/default_app_vendored/pyproject.toml @@ -0,0 +1,17 @@ +# Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +# +# SPDX-License-Identifier: Apache-2.0 + +[project] +name = "integration-test" +version = "0.0.0" +requires-python = "==3.10.*" +dependencies = [ + "Flask==2.1.3", + "Jinja2==3.1.2", + "MarkupSafe==2.1.1", + "Werkzeug==2.2.1", + "gunicorn==20.1.0", + "itsdangerous==2.1.2", + "setuptools==80.9.0", +] diff --git a/integration/packagers/testdata/uv/default_app_vendored/server.py b/integration/packagers/testdata/uv/default_app_vendored/server.py new file mode 100644 index 00000000..9b009f7c --- /dev/null +++ b/integration/packagers/testdata/uv/default_app_vendored/server.py @@ -0,0 +1,19 @@ +import os +import sys + +from flask import Flask + +app = Flask(__name__) + + +@app.route('/') +def root(): + python_version = sys.version + return "Hello, world!\nUsing python: " + python_version + "\n" + + +if __name__ == '__main__': + # Get port from environment variable or choose 9099 as local default + port = int(os.getenv("PORT", 8080)) + # Run the app, listening on all IPs with our chosen port number + app.run(host='0.0.0.0', port=port, debug=True) diff --git a/integration/packagers/testdata/uv/default_app_vendored/uv.lock b/integration/packagers/testdata/uv/default_app_vendored/uv.lock new file mode 100644 index 00000000..9476b2c0 --- /dev/null +++ b/integration/packagers/testdata/uv/default_app_vendored/uv.lock @@ -0,0 +1,136 @@ +version = 1 +revision = 3 +requires-python = "==3.10.*" + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "flask" +version = "2.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/77/3accd62b8771954e9584beb03f080385b32ddcad30009d2a4fe4068a05d9/Flask-2.1.3.tar.gz", hash = "sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb", size = 630206, upload-time = "2022-07-13T20:56:00.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/6a/00d144ac1626fbb44c4ff36519712e258128985a5d0ae43344778ae5cbb9/Flask-2.1.3-py3-none-any.whl", hash = "sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c", size = 95556, upload-time = "2022-07-13T20:55:57.512Z" }, +] + +[[package]] +name = "gunicorn" +version = "20.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/5b/0d1f0296485a6af03366604142ea8f19f0833894db3512a40ed07b2a56dd/gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8", size = 370601, upload-time = "2021-03-27T01:54:37.202Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/dd/5b190393e6066286773a67dfcc2f9492058e9b57c4867a95f1ba5caf0a83/gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e", size = 79531, upload-time = "2021-04-27T12:16:23.375Z" }, +] + +[[package]] +name = "integration-test" +version = "0.0.0" +source = { virtual = "." } +dependencies = [ + { name = "flask" }, + { name = "gunicorn" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "markupsafe" }, + { name = "setuptools" }, + { name = "werkzeug" }, +] + +[package.metadata] +requires-dist = [ + { name = "flask", specifier = "==2.1.3" }, + { name = "gunicorn", specifier = "==20.1.0" }, + { name = "itsdangerous", specifier = "==2.1.2" }, + { name = "jinja2", specifier = "==3.1.2" }, + { name = "markupsafe", specifier = "==2.1.1" }, + { name = "setuptools", specifier = "==80.9.0" }, + { name = "werkzeug", specifier = "==2.2.1" }, +] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/a1/d3fb83e7a61fa0c0d3d08ad0a94ddbeff3731c05212617dff3a94e097f08/itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a", size = 56143, upload-time = "2022-03-24T15:12:15.102Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/5f/447e04e828f47465eeab35b5d408b7ebaaaee207f48b7136c5a7267a30ae/itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44", size = 15749, upload-time = "2022-03-24T15:12:13.2Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/ff/75c28576a1d900e87eb6335b063fab47a8ef3c8b4d88524c4bf78f670cce/Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", size = 268239, upload-time = "2022-04-28T17:21:27.579Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/c3/f068337a370801f372f2f8f6bad74a5c140f6fda3d9de154052708dd3c65/Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61", size = 133101, upload-time = "2022-04-28T17:21:25.336Z" }, +] + +[[package]] +name = "markupsafe" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/97/2288fe498044284f39ab8950703e88abbac2abbdf65524d576157af70556/MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", size = 18668, upload-time = "2022-03-15T13:23:27.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/60/94e9de017674f88a514804e2924bdede9a642aba179d2045214719d6ec76/MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", size = 17712, upload-time = "2022-03-15T13:22:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/7e608e1a942232cb8c81ca24093e71e07e2bacbeb2dad62a0f82da28ed54/MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", size = 13623, upload-time = "2022-03-15T13:22:09.066Z" }, + { url = "https://files.pythonhosted.org/packages/ff/3a/42262a3aa6415befee33b275b31afbcef4f7f8d2f4380061b226c692ee2a/MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", size = 26400, upload-time = "2022-03-15T13:22:10.666Z" }, + { url = "https://files.pythonhosted.org/packages/9e/82/2e089c6f34e77c073aa5a67040d368aac0dfb9b8ccbb46d381452c26fc33/MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", size = 25596, upload-time = "2022-03-15T13:22:12.358Z" }, + { url = "https://files.pythonhosted.org/packages/a3/47/9dcc08eff8ab94f1e50f59f9cd322b710ef5db7e8590fdd8df924406fc9c/MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", size = 25270, upload-time = "2022-03-15T13:22:15.2Z" }, + { url = "https://files.pythonhosted.org/packages/ad/fa/292a72cddad41e3c06227b446a0af53ff642a40755fc5bd695f439c35ba8/MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", size = 30413, upload-time = "2022-03-15T13:22:17.921Z" }, + { url = "https://files.pythonhosted.org/packages/5c/1a/ac3a2b2a4ef1196c15dd8a143fc28eddeb6e6871d6d1de64dc44ef7f59b6/MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", size = 29518, upload-time = "2022-03-15T13:22:19.677Z" }, + { url = "https://files.pythonhosted.org/packages/fc/e4/78c7607352dd574d524daad079f855757d406d36b919b1864a5a07978390/MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", size = 29804, upload-time = "2022-03-15T13:22:21.47Z" }, + { url = "https://files.pythonhosted.org/packages/5e/3d/0a7df21deca52e20de81f8a895ac29df68944588c0030be9aa1e6c07877c/MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", size = 16464, upload-time = "2022-03-15T13:22:23.718Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4b/15e5b9d40c4b58e97ebcb8ed5845a215fa5b7cf49a7f1cc7908f8db9cf46/MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", size = 17092, upload-time = "2022-03-15T13:22:25.312Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "werkzeug" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/19/a92cdbd9fb795928dfca1031278ae8a7f051e78a2c057c224ad2d4cdd95e/Werkzeug-2.2.1.tar.gz", hash = "sha256:4d7013ef96fd197d1cdeb03e066c6c5a491ccb44758a5b2b91137319383e5a5a", size = 838993, upload-time = "2022-07-27T20:44:35.617Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/93/d6d60870e47162ea6a1bbdd787649eea776b2a70618dd66ed87cb2238543/Werkzeug-2.2.1-py3-none-any.whl", hash = "sha256:7e1db6a5ba6b9a8be061e47e900456355b8714c0f238b0313f53afce1a55a79a", size = 232397, upload-time = "2022-07-27T20:44:18.267Z" }, +] diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/Flask-2.1.3-py3-none-any.whl b/integration/packagers/testdata/uv/default_app_vendored/vendor/Flask-2.1.3-py3-none-any.whl new file mode 100644 index 00000000..bd485c76 Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/Flask-2.1.3-py3-none-any.whl differ diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/Jinja2-3.1.2-py3-none-any.whl b/integration/packagers/testdata/uv/default_app_vendored/vendor/Jinja2-3.1.2-py3-none-any.whl new file mode 100644 index 00000000..d271ac1d Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/Jinja2-3.1.2-py3-none-any.whl differ diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/MarkupSafe-2.1.1.tar.gz b/integration/packagers/testdata/uv/default_app_vendored/vendor/MarkupSafe-2.1.1.tar.gz new file mode 100644 index 00000000..3bc5b50d Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/MarkupSafe-2.1.1.tar.gz differ diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/Werkzeug-2.2.1-py3-none-any.whl b/integration/packagers/testdata/uv/default_app_vendored/vendor/Werkzeug-2.2.1-py3-none-any.whl new file mode 100644 index 00000000..618eb779 Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/Werkzeug-2.2.1-py3-none-any.whl differ diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/click-8.1.3-py3-none-any.whl b/integration/packagers/testdata/uv/default_app_vendored/vendor/click-8.1.3-py3-none-any.whl new file mode 100644 index 00000000..e758ec0c Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/click-8.1.3-py3-none-any.whl differ diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/gunicorn-20.1.0-py3-none-any.whl b/integration/packagers/testdata/uv/default_app_vendored/vendor/gunicorn-20.1.0-py3-none-any.whl new file mode 100644 index 00000000..6b800601 Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/gunicorn-20.1.0-py3-none-any.whl differ diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/itsdangerous-2.1.2.tar.gz b/integration/packagers/testdata/uv/default_app_vendored/vendor/itsdangerous-2.1.2.tar.gz new file mode 100644 index 00000000..994d7b5b Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/itsdangerous-2.1.2.tar.gz differ diff --git a/integration/packagers/testdata/uv/default_app_vendored/vendor/setuptools-80.9.0-py3-none-any.whl b/integration/packagers/testdata/uv/default_app_vendored/vendor/setuptools-80.9.0-py3-none-any.whl new file mode 100644 index 00000000..2412ad4a Binary files /dev/null and b/integration/packagers/testdata/uv/default_app_vendored/vendor/setuptools-80.9.0-py3-none-any.whl differ diff --git a/integration/packagers/uv_default_test.go b/integration/packagers/uv_default_test.go new file mode 100644 index 00000000..2a5960cd --- /dev/null +++ b/integration/packagers/uv_default_test.go @@ -0,0 +1,153 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/paketo-buildpacks/occam" + "github.com/sclevine/spec" + + . "github.com/onsi/gomega" + . "github.com/paketo-buildpacks/occam/matchers" +) + +func uvTestDefault(t *testing.T, context spec.G, it spec.S) { + var ( + Expect = NewWithT(t).Expect + Eventually = NewWithT(t).Eventually + pack occam.Pack + docker occam.Docker + ) + + it.Before(func() { + pack = occam.NewPack() + docker = occam.NewDocker() + }) + + context("when building a simple app", func() { + var ( + image occam.Image + container occam.Container + name string + source string + ) + + it.Before(func() { + var err error + name, err = occam.RandomName() + Expect(err).NotTo(HaveOccurred()) + + source, err = occam.Source(filepath.Join("testdata", "uv", "default_app")) + Expect(err).NotTo(HaveOccurred()) + }) + + it.After(func() { + Expect(docker.Container.Remove.Execute(container.ID)).To(Succeed()) + Expect(docker.Image.Remove.Execute(image.ID)).To(Succeed()) + Expect(docker.Volume.Remove.Execute(occam.CacheVolumeNames(name))).To(Succeed()) + Expect(os.RemoveAll(source)).To(Succeed()) + }) + + it("builds an oci image that has the correct behavior", func() { + var err error + + var logs fmt.Stringer + image, logs, err = pack.WithNoColor().Build. + WithPullPolicy("never"). + WithBuildpacks( + settings.Buildpacks.PythonInstallers.Online, + settings.Buildpacks.PythonPackagers.Online, + settings.Buildpacks.BuildPlan.Online, + ). + Execute(name, source) + Expect(err).NotTo(HaveOccurred(), logs.String()) + + container, err = docker.Container.Run. + WithEnv(map[string]string{"PORT": "8080"}). + WithPublish("8080"). + WithPublishAll(). + WithCommand("python server.py"). + Execute(image.ID) + Expect(err).NotTo(HaveOccurred()) + + Eventually(container).Should(Serve(ContainSubstring("Hello, world!")).OnPort(8080)) + }) + + context("validating SBOM", func() { + var ( + sbomDir string + ) + + it.Before(func() { + var err error + sbomDir, err = os.MkdirTemp("", "sbom") + Expect(err).NotTo(HaveOccurred()) + Expect(os.Chmod(sbomDir, os.ModePerm)).To(Succeed()) + + source, err = occam.Source(filepath.Join("testdata", "uv", "default_app")) + Expect(err).NotTo(HaveOccurred()) + }) + + it.After(func() { + Expect(os.RemoveAll(sbomDir)).To(Succeed()) + }) + + it("writes SBOM files to the layer and label metadata", func() { + var err error + var logs fmt.Stringer + + image, logs, err = pack.WithNoColor().Build. + WithPullPolicy("never"). + WithBuildpacks( + settings.Buildpacks.PythonInstallers.Online, + settings.Buildpacks.PythonPackagers.Online, + settings.Buildpacks.BuildPlan.Online, + ). + WithEnv(map[string]string{ + "BP_LOG_LEVEL": "DEBUG", + }). + WithSBOMOutputDir(sbomDir). + Execute(name, source) + Expect(err).ToNot(HaveOccurred(), logs.String) + + container, err = docker.Container.Run. + WithCommand("python server.py"). + WithEnv(map[string]string{"PORT": "8080"}). + WithPublish("8080"). + Execute(image.ID) + Expect(err).ToNot(HaveOccurred()) + + Eventually(container).Should(BeAvailable()) + Eventually(container).Should(Serve(ContainSubstring("Hello, world!")).OnPort(8080)) + + Expect(logs).To(ContainLines( + fmt.Sprintf(" Generating SBOM for /layers/%s/uv-env", strings.ReplaceAll(buildpackInfo.Buildpack.ID, "/", "_")), + MatchRegexp(` Completed in \d+(\.?\d+)*`), + )) + Expect(logs).To(ContainLines( + " Writing SBOM in the following format(s):", + " application/vnd.cyclonedx+json", + " application/spdx+json", + " application/vnd.syft+json", + )) + + // check that all required SBOM files are present + Expect(filepath.Join(sbomDir, "sbom", "launch", strings.ReplaceAll(buildpackInfo.Buildpack.ID, "/", "_"), "uv-env", "sbom.cdx.json")).To(BeARegularFile()) + Expect(filepath.Join(sbomDir, "sbom", "launch", strings.ReplaceAll(buildpackInfo.Buildpack.ID, "/", "_"), "uv-env", "sbom.spdx.json")).To(BeARegularFile()) + Expect(filepath.Join(sbomDir, "sbom", "launch", strings.ReplaceAll(buildpackInfo.Buildpack.ID, "/", "_"), "uv-env", "sbom.syft.json")).To(BeARegularFile()) + + // check an SBOM file to make sure it has an entry for a dependency from requirements.txt + contents, err := os.ReadFile(filepath.Join(sbomDir, "sbom", "launch", strings.ReplaceAll(buildpackInfo.Buildpack.ID, "/", "_"), "uv-env", "sbom.cdx.json")) + Expect(err).NotTo(HaveOccurred()) + Expect(string(contents)).To(ContainSubstring(`"name": "flask"`)) + }) + }) + }) +} diff --git a/integration/packagers/uv_offline_test.go b/integration/packagers/uv_offline_test.go new file mode 100644 index 00000000..f7e3b462 --- /dev/null +++ b/integration/packagers/uv_offline_test.go @@ -0,0 +1,84 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "fmt" + "os" + "path/filepath" + "testing" + + . "github.com/onsi/gomega" + . "github.com/paketo-buildpacks/occam/matchers" + + "github.com/paketo-buildpacks/occam" + "github.com/sclevine/spec" +) + +func uvTestOffline(t *testing.T, context spec.G, it spec.S) { + var ( + Expect = NewWithT(t).Expect + Eventually = NewWithT(t).Eventually + pack occam.Pack + docker occam.Docker + ) + + it.Before(func() { + pack = occam.NewPack().WithVerbose() + docker = occam.NewDocker() + }) + + context("when building an app with a vendor directory", func() { + var ( + image occam.Image + container occam.Container + name string + source string + ) + + it.Before(func() { + var err error + name, err = occam.RandomName() + Expect(err).NotTo(HaveOccurred()) + + source, err = occam.Source(filepath.Join("testdata", "uv", "default_app_vendored")) + Expect(err).NotTo(HaveOccurred()) + }) + + it.After(func() { + Expect(docker.Container.Remove.Execute(container.ID)).To(Succeed()) + Expect(docker.Image.Remove.Execute(image.ID)).To(Succeed()) + Expect(docker.Volume.Remove.Execute(occam.CacheVolumeNames(name))).To(Succeed()) + Expect(os.RemoveAll(source)).To(Succeed()) + }) + + it("uses the vendored dependencies for the build", func() { + var err error + + var logs fmt.Stringer + image, logs, err = pack.WithNoColor().Build. + WithPullPolicy("never"). + WithBuildpacks( + settings.Buildpacks.CPython.Offline, + settings.Buildpacks.PythonInstallers.Offline, + settings.Buildpacks.PythonPackagers.Offline, + settings.Buildpacks.BuildPlan.Online, + ). + WithNetwork("none"). + Execute(name, source) + Expect(err).NotTo(HaveOccurred(), logs.String()) + + container, err = docker.Container.Run. + WithEnv(map[string]string{"PORT": "8080"}). + WithPublish("8080"). + WithPublishAll(). + WithCommand("python server.py"). + Execute(image.ID) + Expect(err).NotTo(HaveOccurred()) + + Eventually(container).Should(Serve(ContainSubstring("Hello, world!")).OnPort(8080)) + }) + }) +} diff --git a/integration/packagers/uv_reused_test.go b/integration/packagers/uv_reused_test.go new file mode 100644 index 00000000..31d9d919 --- /dev/null +++ b/integration/packagers/uv_reused_test.go @@ -0,0 +1,92 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/paketo-buildpacks/occam" + "github.com/sclevine/spec" + + . "github.com/onsi/gomega" + . "github.com/paketo-buildpacks/occam/matchers" +) + +func uvTestReused(t *testing.T, context spec.G, it spec.S) { + var ( + Expect = NewWithT(t).Expect + + pack occam.Pack + docker occam.Docker + ) + + it.Before(func() { + pack = occam.NewPack() + docker = occam.NewDocker() + }) + + context("when the buildpack is run with pack build", func() { + var ( + images = map[string]bool{} + name string + source string + ) + + it.Before(func() { + var err error + name, err = occam.RandomName() + Expect(err).NotTo(HaveOccurred()) + }) + + it.After(func() { + for id := range images { + Expect(docker.Image.Remove.Execute(id)).To(Succeed()) + } + Expect(docker.Volume.Remove.Execute(occam.CacheVolumeNames(name))).To(Succeed()) + Expect(os.RemoveAll(source)).To(Succeed()) + }) + + it("reuses layers", func() { + var err error + var logs1 fmt.Stringer + var logs2 fmt.Stringer + + source, err = occam.Source(filepath.Join("testdata", "uv", "default_app")) + Expect(err).NotTo(HaveOccurred()) + + image1, logs1, err := pack.WithNoColor().Build. + WithPullPolicy("never"). + WithBuildpacks( + settings.Buildpacks.PythonInstallers.Online, + settings.Buildpacks.PythonPackagers.Online, + settings.Buildpacks.BuildPlan.Online, + ). + Execute(name, source) + Expect(err).ToNot(HaveOccurred(), logs1.String) + images[image1.ID] = true + + image2, logs2, err := pack.WithNoColor().Build. + WithPullPolicy("never"). + WithBuildpacks( + settings.Buildpacks.PythonInstallers.Online, + settings.Buildpacks.PythonPackagers.Online, + settings.Buildpacks.BuildPlan.Online, + ). + Execute(name, source) + Expect(err).ToNot(HaveOccurred(), logs2.String) + images[image2.ID] = true + + Expect(logs2).To(ContainLines( + fmt.Sprintf(" Reusing cached layer /layers/%s/uv-env", strings.ReplaceAll(buildpackInfo.Buildpack.ID, "/", "_")), + )) + + Expect(image2.Buildpacks[0].Layers["uv-env"].SHA).To(Equal(image1.Buildpacks[0].Layers["uv-env"].SHA)) + }) + }) +} diff --git a/pkg/packagers/uv/README.md b/pkg/packagers/uv/README.md new file mode 100644 index 00000000..947c8b48 --- /dev/null +++ b/pkg/packagers/uv/README.md @@ -0,0 +1,72 @@ + + +# Sub package for conda environment update + +Original implementation from `paketo-buildpacks/conda-env-update` + +This sub package runs commands to update the conda environment. It installs the conda environment into a +layer which makes it available for subsequent buildpacks and in the final running container. + +## Behavior + +This sub package participates when there is an `environment.yml` or +`package-list.txt` file in the app directory. + +The buildpack will do the following: + +* At build time: + - Requires that conda has already been installed in the build container + - Updates the conda environment and stores it in a layer + - If a `package-list.txt` is in the app dir, a new environment is created + from it + - Otherwise, the `environment.yml` file is used to update the environment + - Reuses the cached conda environment layer from a previous build if and + only if a `package-list.txt` is in the app dir and it has not changed + since the previous build +* At run time: + - Does nothing + +## Integration + +This sub package provides `conda-environment` as a dependency. Downstream buildpacks can require the +conda-environment dependency by +generating a [Build Plan TOML](https://github.com/buildpacks/spec/blob/master/buildpack.md#build-plan-toml) +file that looks like the following: + +```toml +[[requires]] +# The name of the Conda Env Update dependency is "conda-environment". This value is considered +# part of the public API for the buildpack and will not change without a plan +# for deprecation. +name = "conda-environment" + +# The Conda Env Update buildpack supports some non-required metadata options. +[requires.metadata] + +# Setting the build flag to true will ensure that the conda environment +# layer is available for subsequent buildpacks during their build phase. +# If you are writing a buildpack that needs the conda environment +# during its build process, this flag should be set to true. +build = true + +# Setting the launch flag to true will ensure that the conda environment is +# available to the running application. If you are writing an application +# that needs to use the conda environment at runtime, this flag should be set to true. +launch = true +``` + +## SBOM + +This buildpack can generate a Software Bill of Materials (SBOM) for the dependencies of an application. + +However, this feature only works if an application vendors its dependencies in +the `vendor` directory. This is due to a limitation in the upstream SBOM +generation library (Syft). + +Applications that declare their dependencies via a `package-list.txt` file but +do not vendor them will result in an empty SBOM. Check out the [Paketo SBOM documentation](https://paketo.io/docs/howto/sbom/) for more information about how to access the SBOM. diff --git a/pkg/packagers/uv/build.go b/pkg/packagers/uv/build.go new file mode 100644 index 00000000..dd0aa756 --- /dev/null +++ b/pkg/packagers/uv/build.go @@ -0,0 +1,134 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall + +import ( + "os" + "path/filepath" + "time" + + "github.com/paketo-buildpacks/packit/v2" + "github.com/paketo-buildpacks/packit/v2/draft" + "github.com/paketo-buildpacks/packit/v2/fs" + "github.com/paketo-buildpacks/packit/v2/sbom" + + pythonpackagers "github.com/paketo-buildpacks/python-packagers/pkg/packagers/common" +) + +//go:generate faux --interface Runner --output fakes/runner.go +//go:generate faux --interface SBOMGenerator --output fakes/sbom_generator.go + +// Runner defines the interface for setting up the uv environment. +type Runner interface { + Execute(uvEnvPath string, uvCachePath string, workingDir string) error + ShouldRun(workingDir string, metadata map[string]interface{}) (bool, string, error) +} + +// UvBuildParameters encapsulates the uv specific parameters for the +// Build function +type UvBuildParameters struct { + Runner Runner +} + +// Build will return a packit.BuildFunc that will be invoked during the build +// phase of the buildpack lifecycle. +// +// Build updates the uv environment and stores the result in a layer. It may +// reuse the environment layer from a previous build, depending on conditions +// determined by the runner. +func Build( + buildParameters UvBuildParameters, + parameters pythonpackagers.CommonBuildParameters, +) packit.BuildFunc { + return func(context packit.BuildContext) (packit.BuildResult, error) { + runner := buildParameters.Runner + + sbomGenerator := parameters.SbomGenerator + clock := parameters.Clock + logger := parameters.Logger + + logger.Title("%s %s", context.BuildpackInfo.Name, context.BuildpackInfo.Version) + + uvLayer, err := context.Layers.Get(UvEnvLayer) + if err != nil { + return packit.BuildResult{}, err + } + + uvCacheLayer, err := context.Layers.Get(UvEnvCache) + if err != nil { + return packit.BuildResult{}, err + } + + run, sha, err := runner.ShouldRun(context.WorkingDir, uvLayer.Metadata) + if err != nil { + return packit.BuildResult{}, err + } + + if run { + uvLayer, err = uvLayer.Reset() + if err != nil { + return packit.BuildResult{}, err + } + + logger.Process("Executing build process") + duration, err := clock.Measure(func() error { + return runner.Execute(uvLayer.Path, uvCacheLayer.Path, context.WorkingDir) + }) + if err != nil { + return packit.BuildResult{}, err + } + + logger.Action("Completed in %s", duration.Round(time.Millisecond)) + logger.Break() + + logger.GeneratingSBOM(uvLayer.Path) + + var sbomContent sbom.SBOM + duration, err = clock.Measure(func() error { + sbomContent, err = sbomGenerator.Generate(context.WorkingDir) + return err + }) + if err != nil { + return packit.BuildResult{}, err + } + logger.Action("Completed in %s", duration.Round(time.Millisecond)) + logger.Break() + + logger.FormattingSBOM(context.BuildpackInfo.SBOMFormats...) + + uvLayer.SBOM, err = sbomContent.InFormats(context.BuildpackInfo.SBOMFormats...) + if err != nil { + return packit.BuildResult{}, err + } + + uvLayer.SharedEnv.Prepend("PATH", filepath.Join(uvLayer.Path, "venv", "bin"), string(os.PathListSeparator)) + + logger.EnvironmentVariables(uvLayer) + + uvLayer.Metadata = map[string]interface{}{ + LockfileShaName: sha, + } + } else { + logger.Process("Reusing cached layer %s", uvLayer.Path) + logger.Break() + } + + planner := draft.NewPlanner() + uvLayer.Launch, uvLayer.Build = planner.MergeLayerTypes(UvEnvPlanEntry, context.Plan.Entries) + uvLayer.Cache = uvLayer.Build + uvCacheLayer.Cache = true + + layers := []packit.Layer{uvLayer} + if _, err := os.Stat(uvCacheLayer.Path); err == nil { + if !fs.IsEmptyDir(uvCacheLayer.Path) { + layers = append(layers, uvCacheLayer) + } + } + + return packit.BuildResult{ + Layers: layers, + }, nil + } +} diff --git a/pkg/packagers/uv/build_test.go b/pkg/packagers/uv/build_test.go new file mode 100644 index 00000000..e5701e9d --- /dev/null +++ b/pkg/packagers/uv/build_test.go @@ -0,0 +1,321 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall_test + +import ( + "bytes" + "errors" + "os" + "path/filepath" + "testing" + + "github.com/paketo-buildpacks/packit/v2" + "github.com/paketo-buildpacks/packit/v2/chronos" + "github.com/paketo-buildpacks/packit/v2/sbom" + "github.com/paketo-buildpacks/packit/v2/scribe" + pythonpackagers "github.com/paketo-buildpacks/python-packagers/pkg/packagers/common" + uvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" + "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv/fakes" + "github.com/sclevine/spec" + + . "github.com/onsi/gomega" +) + +func testBuild(t *testing.T, context spec.G, it spec.S) { + var ( + Expect = NewWithT(t).Expect + + layersDir string + workingDir string + cnbDir string + + buffer *bytes.Buffer + + runner *fakes.Runner + sbomGenerator *fakes.SBOMGenerator + + build packit.BuildFunc + buildContext packit.BuildContext + ) + + it.Before(func() { + var err error + layersDir, err = os.MkdirTemp("", "layers") + Expect(err).NotTo(HaveOccurred()) + + cnbDir, err = os.MkdirTemp("", "cnb") + Expect(err).NotTo(HaveOccurred()) + + workingDir, err = os.MkdirTemp("", "working-dir") + Expect(err).NotTo(HaveOccurred()) + + runner = &fakes.Runner{} + sbomGenerator = &fakes.SBOMGenerator{} + + runner.ShouldRunCall.Returns.Bool = true + runner.ShouldRunCall.Returns.String = "some-sha" + + sbomGenerator.GenerateCall.Returns.SBOM = sbom.SBOM{} + + buffer = bytes.NewBuffer(nil) + logger := scribe.NewEmitter(buffer) + + build = uvinstall.Build( + uvinstall.UvBuildParameters{ + runner, + }, + pythonpackagers.CommonBuildParameters{ + SbomGenerator: sbomGenerator, + Clock: chronos.DefaultClock, + Logger: logger, + }, + ) + buildContext = packit.BuildContext{ + BuildpackInfo: packit.BuildpackInfo{ + Name: "Some Buildpack", + Version: "some-version", + SBOMFormats: []string{sbom.CycloneDXFormat, sbom.SPDXFormat}, + }, + WorkingDir: workingDir, + CNBPath: cnbDir, + Plan: packit.BuildpackPlan{ + Entries: []packit.BuildpackPlanEntry{ + { + Name: uvinstall.UvEnvPlanEntry, + }, + }, + }, + Platform: packit.Platform{Path: "some-platform-path"}, + Layers: packit.Layers{Path: layersDir}, + Stack: "some-stack", + } + }) + + it.After(func() { + Expect(os.RemoveAll(layersDir)).To(Succeed()) + Expect(os.RemoveAll(cnbDir)).To(Succeed()) + Expect(os.RemoveAll(workingDir)).To(Succeed()) + }) + + it("returns a result that builds correctly", func() { + result, err := build(buildContext) + Expect(err).NotTo(HaveOccurred()) + + layers := result.Layers + Expect(layers).To(HaveLen(1)) + + uvEnvLayer := layers[0] + Expect(uvEnvLayer.Name).To(Equal("uv-env")) + Expect(uvEnvLayer.Path).To(Equal(filepath.Join(layersDir, "uv-env"))) + + Expect(uvEnvLayer.Build).To(BeFalse()) + Expect(uvEnvLayer.Launch).To(BeFalse()) + Expect(uvEnvLayer.Cache).To(BeFalse()) + + Expect(uvEnvLayer.BuildEnv).To(BeEmpty()) + Expect(uvEnvLayer.LaunchEnv).To(BeEmpty()) + Expect(uvEnvLayer.ProcessLaunchEnv).To(BeEmpty()) + // Expect(uvEnvLayer.SharedEnv).ToNot(BeEmpty()) + Expect(uvEnvLayer.SharedEnv).To(HaveLen(2)) + Expect(uvEnvLayer.SharedEnv["PATH.prepend"]).To(Equal(filepath.Join(uvEnvLayer.Path, "venv", "bin"))) + Expect(uvEnvLayer.SharedEnv["PATH.delim"]).To(Equal(":")) + + Expect(uvEnvLayer.SBOM.Formats()).To(HaveLen(2)) + var actualExtensions []string + for _, format := range uvEnvLayer.SBOM.Formats() { + actualExtensions = append(actualExtensions, format.Extension) + } + Expect(actualExtensions).To(ConsistOf("cdx.json", "spdx.json")) + + Expect(runner.ExecuteCall.Receives.UvEnvPath).To(Equal(filepath.Join(layersDir, "uv-env"))) + Expect(runner.ExecuteCall.Receives.UvCachePath).To(Equal(filepath.Join(layersDir, "uv-env-cache"))) + Expect(runner.ExecuteCall.Receives.WorkingDir).To(Equal(workingDir)) + + Expect(sbomGenerator.GenerateCall.Receives.Dir).To(Equal(workingDir)) + }) + + context("when the runner executes outputting a non-empty cache dir", func() { + it.Before(func() { + runner.ExecuteCall.Stub = func(_, c, _ string) error { + Expect(os.Mkdir(c, os.ModePerm)).To(Succeed()) + Expect(os.WriteFile(filepath.Join(c, "some-file"), []byte{}, os.ModePerm)).To(Succeed()) + return nil + } + }) + + it.After(func() { + Expect(os.RemoveAll(filepath.Join(layersDir, "uv-env-cache"))).To(Succeed()) + }) + + it("cache layer is exported", func() { + result, err := build(buildContext) + Expect(err).NotTo(HaveOccurred()) + + layers := result.Layers + Expect(layers).To(HaveLen(2)) + + uvEnvLayer := layers[0] + Expect(uvEnvLayer.Name).To(Equal("uv-env")) + + cacheLayer := layers[1] + Expect(cacheLayer.Name).To(Equal("uv-env-cache")) + Expect(cacheLayer.Path).To(Equal(filepath.Join(layersDir, "uv-env-cache"))) + + Expect(cacheLayer.Build).To(BeFalse()) + Expect(cacheLayer.Launch).To(BeFalse()) + Expect(cacheLayer.Cache).To(BeTrue()) + }) + }) + + context("when a build plan entry requires uv-environment at launch", func() { + it.Before(func() { + buildContext.Plan.Entries[0].Metadata = map[string]interface{}{ + "launch": true, + } + }) + + it("assigns the flag to the uv env layer", func() { + result, err := build(buildContext) + Expect(err).NotTo(HaveOccurred()) + + layers := result.Layers + Expect(layers).To(HaveLen(1)) + + uvEnvLayer := layers[0] + Expect(uvEnvLayer.Name).To(Equal("uv-env")) + + Expect(uvEnvLayer.Build).To(BeFalse()) + Expect(uvEnvLayer.Launch).To(BeTrue()) + Expect(uvEnvLayer.Cache).To(BeFalse()) + }) + }) + + context("when a build plan entry requires uv-environment at build", func() { + it.Before(func() { + buildContext.Plan.Entries[0].Metadata = map[string]interface{}{ + "build": true, + } + }) + + it("assigns build and cache to the uv env layer", func() { + result, err := build(buildContext) + Expect(err).NotTo(HaveOccurred()) + + layers := result.Layers + Expect(layers).To(HaveLen(1)) + + uvEnvLayer := layers[0] + Expect(uvEnvLayer.Name).To(Equal("uv-env")) + + Expect(uvEnvLayer.Build).To(BeTrue()) + Expect(uvEnvLayer.Launch).To(BeFalse()) + Expect(uvEnvLayer.Cache).To(BeTrue()) + }) + }) + + context("cached packages should be reused", func() { + it.Before(func() { + runner.ShouldRunCall.Returns.Bool = false + runner.ShouldRunCall.Returns.String = "cached-sha" + }) + + it("reuses cached uv env layer instead of running build process", func() { + result, err := build(buildContext) + Expect(err).NotTo(HaveOccurred()) + + layers := result.Layers + Expect(layers).To(HaveLen(1)) + + uvEnvLayer := layers[0] + Expect(uvEnvLayer.Name).To(Equal("uv-env")) + + Expect(runner.ExecuteCall.CallCount).To(BeZero()) + }) + }) + + context("failure cases", func() { + context("uv layer cannot be fetched", func() { + it.Before(func() { + Expect(os.WriteFile(filepath.Join(layersDir, "uv-env.toml"), nil, 0000)).To(Succeed()) + }) + + it("returns an error", func() { + _, err := build(buildContext) + Expect(err).To(MatchError(ContainSubstring("permission denied"))) + }) + }) + + context("uv cache layer cannot be fetched", func() { + it.Before(func() { + Expect(os.WriteFile(filepath.Join(layersDir, "uv-env-cache.toml"), nil, 0000)).To(Succeed()) + }) + + it("returns an error", func() { + _, err := build(buildContext) + Expect(err).To(MatchError(ContainSubstring("permission denied"))) + }) + }) + + context("runner ShouldRun fails", func() { + it.Before(func() { + runner.ShouldRunCall.Returns.Error = errors.New("some-shouldrun-error") + }) + + it("returns an error", func() { + _, err := build(buildContext) + Expect(err).To(MatchError("some-shouldrun-error")) + }) + }) + + context("layer cannot be reset", func() { + it.Before(func() { + Expect(os.Chmod(layersDir, 0500)).To(Succeed()) + }) + + it.After(func() { + Expect(os.Chmod(layersDir, os.ModePerm)).To(Succeed()) + }) + + it("returns an error", func() { + _, err := build(buildContext) + Expect(err).To(MatchError(ContainSubstring("error could not create directory"))) + }) + }) + + context("install process fails to execute", func() { + it.Before(func() { + runner.ShouldRunCall.Returns.Bool = true + runner.ExecuteCall.Returns.Error = errors.New("some execution error") + }) + + it("returns an error", func() { + _, err := build(buildContext) + Expect(err).To(MatchError(ContainSubstring("some execution error"))) + }) + }) + + context("when generating the SBOM returns an error", func() { + it.Before(func() { + buildContext.BuildpackInfo.SBOMFormats = []string{"random-format"} + }) + + it("returns an error", func() { + _, err := build(buildContext) + Expect(err).To(MatchError(`unsupported SBOM format: 'random-format'`)) + }) + }) + + context("when formatting the SBOM returns an error", func() { + it.Before(func() { + sbomGenerator.GenerateCall.Returns.Error = errors.New("failed to generate SBOM") + }) + + it("returns an error", func() { + _, err := build(buildContext) + Expect(err).To(MatchError(ContainSubstring("failed to generate SBOM"))) + }) + }) + }) +} diff --git a/pkg/packagers/uv/constants.go b/pkg/packagers/uv/constants.go new file mode 100644 index 00000000..dd60256f --- /dev/null +++ b/pkg/packagers/uv/constants.go @@ -0,0 +1,27 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall + +const ( + // UvEnvLayer is the name of the layer into which uv environment is installed. + UvEnvLayer = "uv-env" + + // UvEnvCache is the name of the layer that is used as the uv package directory. + UvEnvCache = "uv-env-cache" + + // UvEnvPlanEntry is the name of the Build Plan requirement that this buildpack provides. + UvEnvPlanEntry = "uv-environment" + + // UvPlanEntry is the name of the Build Plan requirement for the uv + // dependency that this buildpack requires. + UvPlanEntry = "uv" + + // LockfileShaName is the key in the Layer Content Metadata used to determine if layer + // can be reused. + LockfileShaName = "lockfile-sha" + + // LockfileName is the name of the export file from which the buildpack reinstalls packages + LockfileName = "uv.lock" +) diff --git a/pkg/packagers/uv/detect.go b/pkg/packagers/uv/detect.go new file mode 100644 index 00000000..00dc4a99 --- /dev/null +++ b/pkg/packagers/uv/detect.go @@ -0,0 +1,72 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall + +import ( + "path/filepath" + + "github.com/paketo-buildpacks/packit/v2" + "github.com/paketo-buildpacks/packit/v2/fs" +) + +// Detect returns a packit.DetectFunc that will be invoked during the +// detect phase of the buildpack lifecycle. +// +// Detection passes when there is an environment.yml or package-list.txt file +// in the app directory, and will contribute a Build Plan that provides +// conda-environment and requires conda. +func Detect() packit.DetectFunc { + return func(context packit.DetectContext) (packit.DetectResult, error) { + + lockfilePath := filepath.Join(context.WorkingDir, LockfileName) + lockFile, err := fs.Exists(lockfilePath) + if err != nil { + return packit.DetectResult{}, packit.Fail.WithMessage("failed trying to stat %s: %w", LockfileName, err) + } + + if !lockFile { + return packit.DetectResult{}, packit.Fail.WithMessage("no 'uv.lock' found") + } + + vendor, err := fs.Exists(filepath.Join(context.WorkingDir, "vendor")) + if err != nil { + return packit.DetectResult{}, packit.Fail.WithMessage("failed trying to stat %s: %w", LockfileName, err) + } + + requires := []packit.BuildPlanRequirement{ + { + Name: UvPlanEntry, + Metadata: map[string]interface{}{ + "build": true, + }, + }, + } + + if vendor { + parser := NewLockfileParser() + version, _ := parser.ParsePythonVersion(lockfilePath) + + requires = append(requires, + packit.BuildPlanRequirement{ + Name: "cpython", + Metadata: map[string]interface{}{ + "build": true, + "launch": true, + "version": version, + }, + }, + ) + } + + return packit.DetectResult{ + Plan: packit.BuildPlan{ + Provides: []packit.BuildPlanProvision{ + {Name: UvEnvPlanEntry}, + }, + Requires: requires, + }, + }, nil + } +} diff --git a/pkg/packagers/uv/detect_test.go b/pkg/packagers/uv/detect_test.go new file mode 100644 index 00000000..eeb37a3a --- /dev/null +++ b/pkg/packagers/uv/detect_test.go @@ -0,0 +1,96 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/paketo-buildpacks/packit/v2" + uvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" + "github.com/sclevine/spec" + + . "github.com/onsi/gomega" +) + +func testDetect(t *testing.T, context spec.G, it spec.S) { + var ( + Expect = NewWithT(t).Expect + + workingDir string + detect packit.DetectFunc + ) + + it.Before(func() { + var err error + workingDir, err = os.MkdirTemp("", "working-dir") + Expect(err).NotTo(HaveOccurred()) + + detect = uvinstall.Detect() + }) + + it.After(func() { + Expect(os.RemoveAll(workingDir)).To(Succeed()) + }) + + context("when there is an uv.lock in the working dir", func() { + it.Before(func() { + Expect(os.WriteFile(filepath.Join(workingDir, uvinstall.LockfileName), nil, 0644)).To(Succeed()) + }) + + it("detects", func() { + result, err := detect(packit.DetectContext{ + WorkingDir: workingDir, + }) + Expect(err).NotTo(HaveOccurred()) + + Expect(result.Plan).To(Equal(packit.BuildPlan{ + Provides: []packit.BuildPlanProvision{ + { + Name: uvinstall.UvEnvPlanEntry, + }, + }, + Requires: []packit.BuildPlanRequirement{ + { + Name: uvinstall.UvPlanEntry, + Metadata: map[string]interface{}{ + "build": true, + }, + }, + }, + })) + }) + }) + + context("when no uv.lock is present in the working dir", func() { + it("fails to detect", func() { + _, err := detect(packit.DetectContext{ + WorkingDir: workingDir, + }) + Expect(err).To(MatchError(packit.Fail.WithMessage("no 'uv.lock' found"))) + }) + }) + + context("failure cases", func() { + context("when the file cannot be stat'd", func() { + it.Before(func() { + Expect(os.Chmod(workingDir, 0000)).To(Succeed()) + }) + + it.After(func() { + Expect(os.Chmod(workingDir, os.ModePerm)).To(Succeed()) + }) + + it("returns an error", func() { + _, err := detect(packit.DetectContext{ + WorkingDir: workingDir, + }) + Expect(err).To(MatchError(ContainSubstring("failed trying to stat uv.lock:"))) + Expect(err).To(MatchError(ContainSubstring("permission denied"))) + }) + }) + }) +} diff --git a/pkg/packagers/uv/fakes/executable.go b/pkg/packagers/uv/fakes/executable.go new file mode 100644 index 00000000..49bf278f --- /dev/null +++ b/pkg/packagers/uv/fakes/executable.go @@ -0,0 +1,36 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package fakes + +import ( + "sync" + + "github.com/paketo-buildpacks/packit/v2/pexec" +) + +type Executable struct { + ExecuteCall struct { + mutex sync.Mutex + CallCount int + Receives struct { + Execution pexec.Execution + } + Returns struct { + Error error + } + Stub func(pexec.Execution) error + } +} + +func (f *Executable) Execute(param1 pexec.Execution) error { + f.ExecuteCall.mutex.Lock() + defer f.ExecuteCall.mutex.Unlock() + f.ExecuteCall.CallCount++ + f.ExecuteCall.Receives.Execution = param1 + if f.ExecuteCall.Stub != nil { + return f.ExecuteCall.Stub(param1) + } + return f.ExecuteCall.Returns.Error +} diff --git a/pkg/packagers/uv/fakes/runner.go b/pkg/packagers/uv/fakes/runner.go new file mode 100644 index 00000000..1c30a86c --- /dev/null +++ b/pkg/packagers/uv/fakes/runner.go @@ -0,0 +1,65 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package fakes + +import "sync" + +type Runner struct { + ExecuteCall struct { + mutex sync.Mutex + CallCount int + Receives struct { + UvEnvPath string + UvCachePath string + WorkingDir string + } + Returns struct { + Error error + } + Stub func(string, string, string) error + } + ShouldRunCall struct { + mutex sync.Mutex + CallCount int + Receives struct { + WorkingDir string + Metadata map[string]interface { + } + } + Returns struct { + Bool bool + String string + Error error + } + Stub func(string, map[string]interface { + }) (bool, string, error) + } +} + +func (f *Runner) Execute(param1 string, param2 string, param3 string) error { + f.ExecuteCall.mutex.Lock() + defer f.ExecuteCall.mutex.Unlock() + f.ExecuteCall.CallCount++ + f.ExecuteCall.Receives.UvEnvPath = param1 + f.ExecuteCall.Receives.UvCachePath = param2 + f.ExecuteCall.Receives.WorkingDir = param3 + if f.ExecuteCall.Stub != nil { + return f.ExecuteCall.Stub(param1, param2, param3) + } + return f.ExecuteCall.Returns.Error +} + +func (f *Runner) ShouldRun(param1 string, param2 map[string]interface { +}) (bool, string, error) { + f.ShouldRunCall.mutex.Lock() + defer f.ShouldRunCall.mutex.Unlock() + f.ShouldRunCall.CallCount++ + f.ShouldRunCall.Receives.WorkingDir = param1 + f.ShouldRunCall.Receives.Metadata = param2 + if f.ShouldRunCall.Stub != nil { + return f.ShouldRunCall.Stub(param1, param2) + } + return f.ShouldRunCall.Returns.Bool, f.ShouldRunCall.Returns.String, f.ShouldRunCall.Returns.Error +} diff --git a/pkg/packagers/uv/fakes/sbom_generator.go b/pkg/packagers/uv/fakes/sbom_generator.go new file mode 100644 index 00000000..9897b0d6 --- /dev/null +++ b/pkg/packagers/uv/fakes/sbom_generator.go @@ -0,0 +1,37 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package fakes + +import ( + "sync" + + "github.com/paketo-buildpacks/packit/v2/sbom" +) + +type SBOMGenerator struct { + GenerateCall struct { + mutex sync.Mutex + CallCount int + Receives struct { + Dir string + } + Returns struct { + SBOM sbom.SBOM + Error error + } + Stub func(string) (sbom.SBOM, error) + } +} + +func (f *SBOMGenerator) Generate(param1 string) (sbom.SBOM, error) { + f.GenerateCall.mutex.Lock() + defer f.GenerateCall.mutex.Unlock() + f.GenerateCall.CallCount++ + f.GenerateCall.Receives.Dir = param1 + if f.GenerateCall.Stub != nil { + return f.GenerateCall.Stub(param1) + } + return f.GenerateCall.Returns.SBOM, f.GenerateCall.Returns.Error +} diff --git a/pkg/packagers/uv/fakes/summer.go b/pkg/packagers/uv/fakes/summer.go new file mode 100644 index 00000000..31232cdd --- /dev/null +++ b/pkg/packagers/uv/fakes/summer.go @@ -0,0 +1,33 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package fakes + +import "sync" + +type Summer struct { + SumCall struct { + mutex sync.Mutex + CallCount int + Receives struct { + Arg []string + } + Returns struct { + String string + Error error + } + Stub func(...string) (string, error) + } +} + +func (f *Summer) Sum(param1 ...string) (string, error) { + f.SumCall.mutex.Lock() + defer f.SumCall.mutex.Unlock() + f.SumCall.CallCount++ + f.SumCall.Receives.Arg = param1 + if f.SumCall.Stub != nil { + return f.SumCall.Stub(param1...) + } + return f.SumCall.Returns.String, f.SumCall.Returns.Error +} diff --git a/pkg/packagers/uv/init_test.go b/pkg/packagers/uv/init_test.go new file mode 100644 index 00000000..95c52848 --- /dev/null +++ b/pkg/packagers/uv/init_test.go @@ -0,0 +1,21 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall_test + +import ( + "testing" + + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" +) + +func TestUnitUvInstall(t *testing.T) { + suite := spec.New("uvinstall", spec.Report(report.Terminal{}), spec.Parallel()) + suite("Build", testBuild) + suite("UvRunner", testUvRunner) + suite("UvLockParser", testUvLockParser) + suite("Detect", testDetect) + suite.Run(t) +} diff --git a/pkg/packagers/uv/uv_runner.go b/pkg/packagers/uv/uv_runner.go new file mode 100644 index 00000000..12cd48df --- /dev/null +++ b/pkg/packagers/uv/uv_runner.go @@ -0,0 +1,181 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall + +import ( + "errors" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/paketo-buildpacks/packit/v2/fs" + "github.com/paketo-buildpacks/packit/v2/pexec" + "github.com/paketo-buildpacks/packit/v2/scribe" +) + +//go:generate faux --interface Executable --output fakes/executable.go + +// Executable defines the interface for invoking an executable. +type Executable interface { + Execute(pexec.Execution) error +} + +// Summer defines the interface for computing a SHA256 for a set of files +// and/or directories. +// +//go:generate faux --interface Summer --output fakes/summer.go +type Summer interface { + Sum(arg ...string) (string, error) +} + +// UvRunner implements the Runner interface. +type UvRunner struct { + executable Executable + summer Summer + logger scribe.Emitter +} + +// NewUvRunner creates an instance of UvRunner given an Executable and a Logger. +func NewUvRunner(executable Executable, summer Summer, logger scribe.Emitter) UvRunner { + return UvRunner{ + executable: executable, + summer: summer, + logger: logger, + } +} + +// ShouldRun determines whether the uv environment setup command needs to be +// run, given the path to the app directory and the metadata from the +// preexisting uv-env layer. It returns true if the uv environment setup +// command must be run during this build, the SHA256 of the uv.lock in +// the app directory, and an error. If there is no uv.lock, the sha +// returned is an empty string. +func (c UvRunner) ShouldRun(workingDir string, metadata map[string]interface{}) (run bool, sha string, err error) { + lockfilePath := filepath.Join(workingDir, LockfileName) + _, err = os.Stat(lockfilePath) + + if errors.Is(err, os.ErrNotExist) { + return true, "", nil + } + + if err != nil { + return false, "", err + } + + updatedLockfileSha, err := c.summer.Sum(lockfilePath) + if err != nil { + return false, "", err + } + + if updatedLockfileSha == metadata[LockfileShaName] { + return false, updatedLockfileSha, nil + } + + return true, updatedLockfileSha, nil +} + +// Execute runs the uv environment setup command and cleans up unnecessary +// artifacts. If a vendor directory is present, it uses vendored packages and +// installs them in offline mode. If a packages-list.txt file is present, it creates a +// new environment based on the packages list. Otherwise, it updates the +// existing packages to their latest versions. +func (c UvRunner) Execute(uvLayerPath string, uvCachePath string, workingDir string) error { + lockfileExists, err := fs.Exists(filepath.Join(workingDir, LockfileName)) + if err != nil { + return err + } + + if !lockfileExists { + return errors.New("missing lock file") + } + + vendorDir := filepath.Join(workingDir, "vendor") + + exists, err := fs.Exists(vendorDir) + if err != nil { + return err + } + + venvPath := filepath.Join(uvLayerPath, "venv") + args := []string{ + "venv", + venvPath, + } + + env := append(os.Environ(), fmt.Sprintf("HOME=%s", uvLayerPath)) + + if exists { + args = append(args, "--offline", "--python", "/layers/paketo-buildpacks_cpython/cpython/bin/python") + env = append(env, "LD_LIBRARY_PATH=/layers/paketo-buildpacks_cpython/cpython/lib") + } + + c.logger.Subprocess("Running 'uv %s'", strings.Join(args, " ")) + + err = c.executable.Execute(pexec.Execution{ + Args: args, + Env: env, + Stdout: c.logger.ActionWriter, + Stderr: c.logger.ActionWriter, + }) + + if err != nil { + return fmt.Errorf("failed to run uv command: %w", err) + } + + userFindLinks, _ := os.LookupEnv("BP_UV_FIND_LINKS") + findLinks, _ := os.LookupEnv("UV_FIND_LINKS") + + combinedFindLinks := []string{userFindLinks, findLinks} + + if exists { + combinedFindLinks = append(combinedFindLinks, vendorDir) + args = offlineArgs(venvPath, workingDir) + } else { + args = onlineArgs(venvPath, uvCachePath, workingDir) + } + + c.logger.Subprocess("Running 'uv %s'", strings.Join(args, " ")) + + err = c.executable.Execute(pexec.Execution{ + Args: args, + Env: append(env, + fmt.Sprintf("UV_FIND_LINKS=%s", strings.TrimLeft(strings.Join(combinedFindLinks, " "), " ")), + ), + + Stdout: c.logger.ActionWriter, + Stderr: c.logger.ActionWriter, + }) + + if err != nil { + return fmt.Errorf("failed to run uv command: %w", err) + } + + return nil +} + +func onlineArgs(venvPath string, cachePath string, workingDir string) []string { + return []string{ + "pip", + "install", + "--python", + filepath.Join(venvPath, "bin", "python"), + "--cache-dir", + cachePath, + workingDir, + } +} + +func offlineArgs(venvPath string, workingDir string) []string { + return []string{ + "pip", + "install", + "--no-index", + "--python", + filepath.Join(venvPath, "bin", "python"), + workingDir, + "--offline", + } +} diff --git a/pkg/packagers/uv/uv_runner_test.go b/pkg/packagers/uv/uv_runner_test.go new file mode 100644 index 00000000..73fe5c35 --- /dev/null +++ b/pkg/packagers/uv/uv_runner_test.go @@ -0,0 +1,295 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall_test + +import ( + "bytes" + "errors" + "fmt" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/paketo-buildpacks/packit/v2/pexec" + "github.com/paketo-buildpacks/packit/v2/scribe" + uv "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" + "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv/fakes" + "github.com/sclevine/spec" + + . "github.com/onsi/gomega" + . "github.com/paketo-buildpacks/occam/matchers" +) + +func testUvRunner(t *testing.T, context spec.G, it spec.S) { + var ( + Expect = NewWithT(t).Expect + + workingDir string + uvLayerPath string + uvCachePath string + + executable *fakes.Executable + executions []pexec.Execution + summer *fakes.Summer + runner uv.UvRunner + buffer *bytes.Buffer + logger scribe.Emitter + ) + + it.Before(func() { + workingDir = t.TempDir() + layersDir := t.TempDir() + + uvLayerPath = filepath.Join(layersDir, "a-uv-layer") + uvCachePath = filepath.Join(layersDir, "a-uv-cache-path") + + executable = &fakes.Executable{} + executions = []pexec.Execution{} + executable.ExecuteCall.Stub = func(ex pexec.Execution) error { + executions = append(executions, ex) + Expect(os.MkdirAll(filepath.Join(uvLayerPath, "uv-meta"), os.ModePerm)).To(Succeed()) + // For reasons currently unknown, the search call triggers a permission issue in the tests + Expect(os.Chmod(filepath.Join(uvLayerPath, "uv-meta"), os.ModePerm)).To(Succeed()) + Expect(os.WriteFile(filepath.Join(uvLayerPath, "uv-meta", "history"), []byte("some content"), os.ModePerm)).To(Succeed()) + _, err := fmt.Fprintln(ex.Stdout, "stdout output") + Expect(err).NotTo(HaveOccurred()) + _, err = fmt.Fprintln(ex.Stderr, "stderr output") + Expect(err).NotTo(HaveOccurred()) + return nil + } + + summer = &fakes.Summer{} + buffer = bytes.NewBuffer(nil) + logger = scribe.NewEmitter(buffer) + runner = uv.NewUvRunner(executable, summer, logger) + }) + + context("ShouldRun", func() { + it("returns true, with no sha, and no error when no lockfile is present", func() { + run, sha, err := runner.ShouldRun(workingDir, map[string]interface{}{}) + Expect(run).To(BeTrue()) + Expect(sha).To(Equal("")) + Expect(err).NotTo(HaveOccurred()) + }) + + context("when there is an error checking if a lockfile is present", func() { + it.Before(func() { + Expect(os.Chmod(workingDir, 0000)).To(Succeed()) + }) + + it.After(func() { + Expect(os.Chmod(workingDir, os.ModePerm)).To(Succeed()) + }) + + it("returns false, with no sha, and an error", func() { + run, sha, err := runner.ShouldRun(workingDir, map[string]interface{}{}) + Expect(run).To(BeFalse()) + Expect(sha).To(Equal("")) + Expect(err).To(HaveOccurred()) + }) + }) + + context("when a lockfile is present", func() { + it.Before(func() { + Expect(os.WriteFile(filepath.Join(workingDir, uv.LockfileName), nil, os.ModePerm)).To(Succeed()) + }) + context("and the lockfile sha is unchanged", func() { + it("return false, with the existing sha, and no error", func() { + summer.SumCall.Returns.String = "a-sha" + Expect(os.WriteFile(filepath.Join(workingDir, uv.LockfileName), nil, os.ModePerm)).To(Succeed()) + + metadata := map[string]interface{}{ + "lockfile-sha": "a-sha", + } + + run, sha, err := runner.ShouldRun(workingDir, metadata) + Expect(run).To(BeFalse()) + Expect(sha).To(Equal("a-sha")) + Expect(err).NotTo(HaveOccurred()) + }) + context("and there is and error summing the lock file", func() { + it.Before(func() { + summer.SumCall.Returns.Error = errors.New("summing lockfile failed") + }) + + it("returns false, with no sha, and an error", func() { + run, sha, err := runner.ShouldRun(workingDir, map[string]interface{}{}) + Expect(run).To(BeFalse()) + Expect(sha).To(Equal("")) + Expect(err).To(MatchError("summing lockfile failed")) + + }) + }) + }) + + it("returns true, with a new sha, and no error when the lockfile has changed", func() { + summer.SumCall.Returns.String = "a-new-sha" + metadata := map[string]interface{}{ + uv.LockfileShaName: "a-sha", + } + + run, sha, err := runner.ShouldRun(workingDir, metadata) + Expect(run).To(BeTrue()) + Expect(sha).To(Equal("a-new-sha")) + Expect(err).NotTo(HaveOccurred()) + }) + }) + }) + + context("Execute", func() { + context("when a vendor dir is present", func() { + var vendorPath string + it.Before(func() { + vendorPath = filepath.Join(workingDir, "vendor") + Expect(os.Mkdir(vendorPath, os.ModePerm)) + Expect(os.WriteFile(filepath.Join(workingDir, uv.LockfileName), nil, os.ModePerm)).To(Succeed()) + }) + + it("runs uv pip install with additional vendor args and WITHOUT cache layer args", func() { + err := runner.Execute(uvLayerPath, uvCachePath, workingDir) + Expect(err).NotTo(HaveOccurred()) + + args := []string{ + "pip", + "install", + "--no-index", + "--python", + filepath.Join(uvLayerPath, "venv", "bin", "python"), + workingDir, + "--offline", + } + + Expect(executions[1].Args).To(Equal(args)) + Expect(executable.ExecuteCall.CallCount).To(Equal(2)) + Expect(buffer.String()).To(ContainLines( + fmt.Sprintf(" Running 'uv %s'", strings.Join(args, " ")), + " stdout output", + " stderr output", + )) + }) + + context("failure cases", func() { + context("when there is an error running the uv command", func() { + it.Before(func() { + executable.ExecuteCall.Stub = func(ex pexec.Execution) error { + _, err := fmt.Fprintln(ex.Stdout, "uv error stdout") + Expect(err).NotTo(HaveOccurred()) + _, err = fmt.Fprintln(ex.Stderr, "uv error stderr") + Expect(err).NotTo(HaveOccurred()) + return errors.New("some uv failure") + } + }) + + it("returns an error with stdout/stderr output", func() { + err := runner.Execute(uvLayerPath, uvCachePath, workingDir) + Expect(err).To(MatchError("failed to run uv command: some uv failure")) + + args := []string{ + "venv", + filepath.Join(uvLayerPath, "venv"), + "--offline", + "--python", + "/layers/paketo-buildpacks_cpython/cpython/bin/python", + } + Expect(buffer.String()).To(ContainLines( + fmt.Sprintf(" Running 'uv %s'", strings.Join(args, " ")), + " uv error stdout", + " uv error stderr", + )) + }) + }) + }) + }) + + context("when a lockfile exists", func() { + it.Before(func() { + Expect(os.WriteFile(filepath.Join(workingDir, uv.LockfileName), nil, os.ModePerm)).To(Succeed()) + }) + + it.After(func() { + Expect(os.RemoveAll(filepath.Join(workingDir, uv.LockfileName))).To(Succeed()) + }) + + it("runs uv create with the cache layer available in the environment", func() { + err := runner.Execute(uvLayerPath, uvCachePath, workingDir) + Expect(err).NotTo(HaveOccurred()) + + Expect(executable.ExecuteCall.CallCount).To(Equal(2)) + + Expect(executions[0].Args).To(Equal([]string{ + "venv", + filepath.Join(uvLayerPath, "venv"), + })) + Expect(executions[0].Env).To(ContainElement(fmt.Sprintf("HOME=%s", uvLayerPath))) + + Expect(executions[1].Args).To(Equal([]string{ + "pip", + "install", + "--python", + filepath.Join(uvLayerPath, "venv", "bin", "python"), + "--cache-dir", + uvCachePath, + workingDir, + })) + Expect(executions[1].Env).To(ContainElement("UV_FIND_LINKS=")) + Expect(executable.ExecuteCall.CallCount).To(Equal(2)) + }) + + context("failure cases", func() { + context("when the uv env command fails to run", func() { + it.Before(func() { + executable.ExecuteCall.Stub = func(ex pexec.Execution) error { + _, err := fmt.Fprintln(ex.Stdout, "uv error stdout") + Expect(err).NotTo(HaveOccurred()) + _, err = fmt.Fprintln(ex.Stderr, "uv error stderr") + Expect(err).NotTo(HaveOccurred()) + return errors.New("some uv failure") + } + }) + + it("returns an error and logs the stdout and stderr output from the command", func() { + err := runner.Execute(uvLayerPath, uvCachePath, workingDir) + Expect(err).To(MatchError("failed to run uv command: some uv failure")) + Expect(buffer.String()).To(ContainLines( + fmt.Sprintf( + " Running 'uv venv %s'", + filepath.Join(uvLayerPath, "venv"), + ), + " uv error stdout", + " uv error stderr", + )) + }) + }) + }) + + }) + context("when no vendor dir or lockfile exists", func() { + context("failure cases", func() { + context("when no lockfile exists", func() { + it("returns an error", func() { + err := runner.Execute(uvLayerPath, uvCachePath, workingDir) + Expect(err).To(MatchError(ContainSubstring("missing lock file"))) + }) + }) + + context("there is an error checking for vendor directory", func() { + it.Before(func() { + Expect(os.Chmod(workingDir, 0000)).To(Succeed()) + }) + + it.After(func() { + Expect(os.Chmod(workingDir, os.ModePerm)).To(Succeed()) + }) + + it("returns an error", func() { + err := runner.Execute(uvLayerPath, uvCachePath, workingDir) + Expect(err).To(MatchError(ContainSubstring("permission denied"))) + }) + }) + }) + }) + }) +} diff --git a/pkg/packagers/uv/uvlock_parser.go b/pkg/packagers/uv/uvlock_parser.go new file mode 100644 index 00000000..32247aec --- /dev/null +++ b/pkg/packagers/uv/uvlock_parser.go @@ -0,0 +1,36 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall + +import ( + "strings" + + "github.com/BurntSushi/toml" +) + +type Lockfile struct { + RequiresPython string `toml:"requires-python"` +} + +type LockfileParser struct { +} + +func NewLockfileParser() LockfileParser { + return LockfileParser{} +} + +func (p LockfileParser) ParsePythonVersion(lockfilePath string) (string, error) { + var lockfile Lockfile + + _, err := toml.DecodeFile(lockfilePath, &lockfile) + if err != nil { + return "", err + } + + if lockfile.RequiresPython != "" { + return strings.Trim(lockfile.RequiresPython, "="), nil + } + return lockfile.RequiresPython, nil +} diff --git a/pkg/packagers/uv/uvlock_parser_test.go b/pkg/packagers/uv/uvlock_parser_test.go new file mode 100644 index 00000000..553384b6 --- /dev/null +++ b/pkg/packagers/uv/uvlock_parser_test.go @@ -0,0 +1,69 @@ +// SPDX-FileCopyrightText: Copyright (c) 2013-Present CloudFoundry.org Foundation, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 + +package uvinstall_test + +import ( + "os" + "path/filepath" + "testing" + + . "github.com/onsi/gomega" + "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" + "github.com/sclevine/spec" +) + +func testUvLockParser(t *testing.T, context spec.G, it spec.S) { + var ( + Expect = NewWithT(t).Expect + + workingDir string + lockfile string + + parser uvinstall.LockfileParser + ) + + const ( + version = `requires-python = "==1.2.3"` + ) + + it.Before(func() { + var err error + workingDir, err = os.MkdirTemp("", "working-dir") + Expect(err).NotTo(HaveOccurred()) + + lockfile = filepath.Join(workingDir, uvinstall.LockfileName) + + parser = uvinstall.NewLockfileParser() + }) + + it.After(func() { + Expect(os.RemoveAll(workingDir)).To(Succeed()) + }) + + context("Calling ParsePythonVersion", func() { + it("parses version", func() { + Expect(os.WriteFile(lockfile, []byte(version), 0644)).To(Succeed()) + + version, err := parser.ParsePythonVersion(lockfile) + Expect(err).NotTo(HaveOccurred()) + Expect(version).To(Equal("1.2.3")) + }) + + it("returns empty string if file does not contain requires-python", func() { + Expect(os.WriteFile(lockfile, []byte(""), 0644)).To(Succeed()) + + version, err := parser.ParsePythonVersion(lockfile) + Expect(err).NotTo(HaveOccurred()) + Expect(version).To(Equal("")) + }) + + context("error handling", func() { + it("fails if file does not exist", func() { + _, err := parser.ParsePythonVersion("not-a-valid-dir") + Expect(err).To(HaveOccurred()) + }) + }) + }) +} diff --git a/run/main.go b/run/main.go index fcb4dc32..75c49050 100644 --- a/run/main.go +++ b/run/main.go @@ -20,6 +20,7 @@ import ( pipinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pip" pipenvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/pipenv" poetryinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/poetry" + uvinstall "github.com/paketo-buildpacks/python-packagers/pkg/packagers/uv" ) func main() { @@ -49,6 +50,9 @@ func main() { InstallProcess: poetryinstall.NewPoetryInstallProcess(pexec.NewExecutable("poetry"), logger), PythonPathLookupProcess: poetryinstall.NewPythonPathProcess(), }, + uvinstall.UvEnvPlanEntry: uvinstall.UvBuildParameters{ + Runner: uvinstall.NewUvRunner(pexec.NewExecutable("uv"), fs.NewChecksumCalculator(), logger), + }, } packit.Run(