Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/how-tos/bootstrap-constraints.rst
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ production packages.

.. code-block:: console

$ fromager --constraints-file constraints.txt build-sequence ./work-dir/build-order.json
$ fromager --constraints-file constraints.txt build-sequence ./work-dir/graph.json ./work-dir/build-order.json

This will use the constraints in the ``constraints.txt`` file to build the
production packages for ``my-package``.
15 changes: 9 additions & 6 deletions docs/using.md
Original file line number Diff line number Diff line change
Expand Up @@ -187,8 +187,10 @@ individual package compilation or integration into larger build systems.

### The build-sequence command

The `build-sequence` command processes a pre-determined build order file
(typically `build-order.json`) to build wheels in dependency order.
The `build-sequence` command processes a dependency graph (`graph.json`) and a
pre-determined build order file (`build-order.json`) to build wheels in
dependency order. Build dependencies are resolved from the graph rather than
running PEP 517 discovery hooks.

The outputs are patched source distributions and built wheels for each item in
the build-order file.
Expand All @@ -198,11 +200,12 @@ for any wheels that have already been built with the current settings.

For each package in the sequence:

1. **Build Order Reading** - Loads the build order file containing:
1. **Build Order Reading** - Loads the build order and graph files:

- Package names and versions to build
- Source URLs and types (PyPI, git, prebuilt)
- Dependency relationships and constraints
- `build-order.json`: Package names, versions, source URLs and types
(PyPI, git, prebuilt) in predetermined build order
- `graph.json`: Dependency relationships used to resolve build
requirements without running PEP 517 discovery hooks

2. **Build Status Checking** - Determines if building is needed:

Expand Down
9 changes: 5 additions & 4 deletions e2e/test_build_order.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@ fromager \
--settings-dir="$SCRIPTDIR/changelog_settings" \
bootstrap "${DIST}==${VERSION}"

# Save the build order file but remove everything else.
# Save the build order and graph files but remove everything else.
cp "$OUTDIR/work-dir/build-order.json" "$OUTDIR/"
cp "$OUTDIR/work-dir/graph.json" "$OUTDIR/"

# Rebuild everything even if it already exists
log="$OUTDIR/build-logs/${DIST}-build.log"
Expand All @@ -31,7 +32,7 @@ fromager \
--sdists-repo "$OUTDIR/sdists-repo" \
--wheels-repo "$OUTDIR/wheels-repo" \
--settings-dir="$SCRIPTDIR/changelog_settings" \
build-sequence --force "$OUTDIR/build-order.json"
build-sequence --force "$OUTDIR/graph.json" "$OUTDIR/build-order.json"

find "$OUTDIR/wheels-repo/"

Expand Down Expand Up @@ -94,7 +95,7 @@ fromager \
--sdists-repo "$OUTDIR/sdists-repo" \
--wheels-repo "$OUTDIR/wheels-repo" \
--settings-dir="$SCRIPTDIR/changelog_settings" \
build-sequence "$OUTDIR/build-order.json"
build-sequence "$OUTDIR/graph.json" "$OUTDIR/build-order.json"

find "$OUTDIR/wheels-repo/"

Expand All @@ -118,7 +119,7 @@ fromager \
--work-dir "$OUTDIR/work-dir" \
--sdists-repo "$OUTDIR/sdists-repo" \
--wheels-repo "$OUTDIR/wheels-repo" \
build-sequence --cache-wheel-server-url="https://pypi.org/simple" "$OUTDIR/build-order.json"
build-sequence --cache-wheel-server-url="https://pypi.org/simple" "$OUTDIR/graph.json" "$OUTDIR/build-order.json"

find "$OUTDIR/wheels-repo/"

Expand Down
3 changes: 2 additions & 1 deletion e2e/test_build_sequence_git_url.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ ls "$OUTDIR"/work-dir/*/build.log || true

# Clean up the work directory so we can test build-sequence
mv "$OUTDIR/work-dir/build-order.json" "$OUTDIR/"
cp "$OUTDIR/work-dir/graph.json" "$OUTDIR/"
rm -rf "$OUTDIR/work-dir/wheels-repo"
rm -rf "$OUTDIR/work-dir/sdists-repo"

Expand All @@ -37,7 +38,7 @@ fromager \
--sdists-repo "$OUTDIR/sdists-repo" \
--wheels-repo "$OUTDIR/wheels-repo" \
--settings-dir="$SCRIPTDIR/changelog_settings" \
build-sequence --force "$OUTDIR/build-order.json"
build-sequence --force "$OUTDIR/graph.json" "$OUTDIR/build-order.json"

find "$OUTDIR/wheels-repo/" -name '*.whl'
find "$OUTDIR/sdists-repo/" -name '*.tar.gz'
Expand Down
5 changes: 3 additions & 2 deletions e2e/test_prebuilt_wheel_hook.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@ fromager \
--settings-dir="$SCRIPTDIR/prebuilt_settings" \
bootstrap "${DIST}==${VERSION}"

# Save the build order file but remove everything else.
# Save the build order and graph files but remove everything else.
cp "$OUTDIR/work-dir/build-order.json" "$OUTDIR/"
cp "$OUTDIR/work-dir/graph.json" "$OUTDIR/"

# Remove downloaded wheels to trigger hook
rm -rf "$OUTDIR/wheels-repo"
Expand All @@ -34,7 +35,7 @@ fromager \
--sdists-repo "$OUTDIR/sdists-repo" \
--wheels-repo "$OUTDIR/wheels-repo" \
--settings-dir="$SCRIPTDIR/prebuilt_settings" \
build-sequence "$OUTDIR/build-order.json"
build-sequence "$OUTDIR/graph.json" "$OUTDIR/build-order.json"

PATTERNS=(
"downloading prebuilt wheel ${DIST}==${VERSION}"
Expand Down
51 changes: 50 additions & 1 deletion src/fromager/build_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from .requirements_file import RequirementType

if typing.TYPE_CHECKING:
from . import context
from . import context, dependency_graph

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -314,6 +314,55 @@ def prepare_build_environment(
return build_env


@metrics.timeit(description="prepare build environment from graph")
def prepare_build_environment_from_graph(
*,
ctx: context.WorkContext,
req: Requirement,
sdist_root_dir: pathlib.Path,
build_requirements: typing.Iterable[dependency_graph.DependencyNode],
) -> BuildEnvironment:
"""Create a build environment populated from pre-resolved graph dependencies.

Uses build requirements extracted from the dependency graph instead of
running PEP 517 discovery hooks. This is the preferred path for Stage 2
build commands (build-sequence, build-parallel) where the graph is the
source of truth.
"""
logger.info("preparing build environment from dependency graph")

build_env = BuildEnvironment(
ctx=ctx,
parent_dir=sdist_root_dir.parent,
)

reqs = {
Requirement(f"{node.canonicalized_name}=={node.version}")
for node in build_requirements
}
if reqs:
# Graph-resolved deps are a mix of build-system, build-backend,
# build-sdist, and their transitive install deps. We use
# BUILD_SYSTEM as the label since they all go into the build env.
_safe_install(
ctx=ctx,
req=req,
build_env=build_env,
deps=reqs,
dep_req_type=RequirementType.BUILD_SYSTEM,
)
Comment on lines +343 to +353
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Don’t report mixed graph installs as build-system failures.

This path installs a mix of build-system, build-backend, build-sdist, and transitive install deps. If _safe_install() fails, labeling the whole batch as RequirementType.BUILD_SYSTEM points operators at the wrong phase. A neutral label for the graph-driven install path would make the error output accurate.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/fromager/build_environment.py` around lines 343 - 353, The current
graph-resolved install path wrongly tags failures as
RequirementType.BUILD_SYSTEM; change this to use a neutral label instead: add a
new enum member (e.g., RequirementType.GRAPH or RequirementType.GRAPH_INSTALL)
and pass that to _safe_install when installing the mixed graph (the call site
that currently passes RequirementType.BUILD_SYSTEM for variables reqs, ctx,
build_env, req). Ensure the new enum value is declared where RequirementType is
defined and update any consumers/tests if needed so graph-driven install
failures are reported with the neutral label.


try:
distributions = build_env.get_distributions()
except Exception:
# ignore error for debug call, error reason is logged in get_distributions()
pass
else:
logger.debug("build env %r has packages %r", build_env.path, distributions)

return build_env


def _safe_install(
ctx: context.WorkContext,
req: Requirement,
Expand Down
4 changes: 4 additions & 0 deletions src/fromager/commands/bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,10 @@ def bootstrap(
progressbar.update()
requirement_ctxvar.reset(token)

# Ensure graph.json is written even when no recursive dependencies
# were discovered (e.g., prebuilt-only bootstraps).
wkctx.write_to_graph_to_file()

# Finalize test mode and check for failures
exit_code = bt.finalize()
if exit_code != 0:
Expand Down
58 changes: 49 additions & 9 deletions src/fromager/commands/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,22 +142,26 @@ def build(
"cache_wheel_server_url",
help="url to a wheel server from where fromager can check if it had already built the wheel",
)
@click.argument("graph_file")
@click.argument("build_order_file")
@click.pass_obj
def build_sequence(
wkctx: context.WorkContext,
graph_file: str,
build_order_file: str,
force: bool,
cache_wheel_server_url: str | None,
) -> None:
"""Build a sequence of wheels in order

BUILD_ORDER_FILE is the build-order.json files to build
GRAPH_FILE is a graph.json file containing the dependency relationships
between packages, used to resolve build dependencies.

SDIST_SERVER_URL is the URL for a PyPI-compatible package index hosting sdists
BUILD_ORDER_FILE is the build-order.json file specifying the build order.

Performs the equivalent of the 'build' command for each item in
the build order file.
the build order file, using the dependency graph to populate
build environments instead of PEP 517 discovery hooks.

"""
server.start_wheel_server(wkctx)
Expand All @@ -173,6 +177,9 @@ def build_sequence(
f"{wkctx.wheel_server_url=}, {cache_wheel_server_url=}"
)

logger.info("reading dependency graph from %s", graph_file)
graph = dependency_graph.DependencyGraph.from_file(graph_file)

entries: list[BuildSequenceEntry] = []

logger.info("reading build order from %s", build_order_file)
Expand All @@ -191,6 +198,10 @@ def build_sequence(
else:
req = Requirement(f"{dist_name}=={resolved_version}")

build_requirements = _get_build_requirements_from_graph(
graph, dist_name, resolved_version
)

with req_ctxvar_context(req, resolved_version):
logger.info("building %s", resolved_version)
entry = _build(
Expand All @@ -200,6 +211,7 @@ def build_sequence(
source_download_url=source_download_url,
force=force,
cache_wheel_server_url=cache_wheel_server_url,
build_requirements=build_requirements,
)
if entry.prebuilt:
logger.info(
Expand Down Expand Up @@ -326,6 +338,7 @@ def _build(
source_download_url: str,
force: bool,
cache_wheel_server_url: str | None,
build_requirements: typing.Iterable[dependency_graph.DependencyNode] | None = None,
) -> BuildSequenceEntry:
"""Handle one version of one wheel.

Expand Down Expand Up @@ -417,12 +430,20 @@ def _build(
)

# Build environment
build_env = build_environment.prepare_build_environment(
ctx=wkctx,
req=req,
version=resolved_version,
sdist_root_dir=source_root_dir,
)
if build_requirements is not None:
build_env = build_environment.prepare_build_environment_from_graph(
ctx=wkctx,
req=req,
sdist_root_dir=source_root_dir,
build_requirements=build_requirements,
)
else:
build_env = build_environment.prepare_build_environment(
ctx=wkctx,
req=req,
version=resolved_version,
sdist_root_dir=source_root_dir,
)

# Make a new source distribution, in case we patched the code.
sdist_filename = sources.build_sdist(
Expand Down Expand Up @@ -544,6 +565,7 @@ def _build_parallel(
source_download_url: str,
force: bool,
cache_wheel_server_url: str | None,
build_requirements: typing.Iterable[dependency_graph.DependencyNode] | None = None,
) -> BuildSequenceEntry:
"""
This function runs in a thread to manage the build of a single package.
Expand All @@ -556,7 +578,24 @@ def _build_parallel(
source_download_url=source_download_url,
force=force,
cache_wheel_server_url=cache_wheel_server_url,
build_requirements=build_requirements,
)


def _get_build_requirements_from_graph(
graph: dependency_graph.DependencyGraph,
dist_name: str,
version: Version,
) -> list[dependency_graph.DependencyNode]:
"""Look up build requirements for a package from the dependency graph."""
node_key = f"{canonicalize_name(dist_name)}=={version}"
node = graph.nodes.get(node_key)
if node is None:
raise KeyError(
f"package {node_key} not found in dependency graph; "
f"ensure the graph file matches the build order"
)
return list(node.iter_build_requirements())


def _nodes_to_string(nodes: typing.Iterable[dependency_graph.DependencyNode]) -> str:
Expand Down Expand Up @@ -681,6 +720,7 @@ def update_progressbar_cb(future: concurrent.futures.Future) -> None:
source_download_url=node.download_url,
force=force,
cache_wheel_server_url=cache_wheel_server_url,
build_requirements=list(node.iter_build_requirements()),
)
future.add_done_callback(update_progressbar_cb)
future2node[future] = node
Expand Down
Loading
Loading