Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions .changeset/sparse-fieldsets.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
---
"@stackables/bridge-core": minor
"@stackables/bridge-compiler": minor
---

Add `requestedFields` option to `executeBridge()` for sparse fieldset filtering.

When provided, only the listed output fields (and their transitive tool dependencies) are resolved.
Tools that feed exclusively into unrequested fields are never called, reducing latency and upstream
bandwidth.

Supports dot-separated paths and a trailing wildcard (`["id", "price", "legs.*"]`).
10 changes: 3 additions & 7 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,9 @@ jobs:
- uses: pnpm/action-setup@v4
- name: Install dependencies
run: pnpm install
- name: Build
run: pnpm build
- name: Check Exports
run: pnpm check:exports
- name: Lint Types
run: pnpm lint:types
- name: Test
run: pnpm test
- name: Build
run: pnpm build
- name: Lint with ESLint
run: pnpm lint:eslint
run: pnpm lint
336 changes: 46 additions & 290 deletions AGENTS.md

Large diffs are not rendered by default.

6 changes: 2 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,8 @@
"packageManager": "pnpm@10.30.3+sha256.ff0a72140f6a6d66c0b284f6c9560aff605518e28c29aeac25fb262b74331588",
"scripts": {
"test": "pnpm -r test",
"build": "pnpm -r build",
"lint:types": "pnpm -r --filter './packages/*' lint:types",
"lint:eslint": "eslint .",
"check:exports": "node scripts/check-exports.mjs",
"build": "pnpm -r --filter './packages/*' lint:types",
"lint": "eslint .",
"smoke": "node scripts/smoke-test-packages.mjs",
"e2e": "pnpm -r e2e",
"depcheck": "pnpm -r exec pnpm dlx depcheck",
Expand Down
21 changes: 11 additions & 10 deletions packages/bridge-compiler/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,16 +73,17 @@ console.log(code); // Prints the raw `export default async function...` string

## API: `ExecuteBridgeOptions`

| Option | Type | What it does |
| ---------------- | --------------------- | -------------------------------------------------------------------------------- |
| `document` | `BridgeDocument` | The parsed AST from `@stackables/bridge-parser`. |
| `operation` | `string` | Which bridge to run, e.g. `"Query.myField"`. |
| `input?` | `Record<string, any>` | Input arguments — equivalent to GraphQL field args. |
| `tools?` | `ToolMap` | Your custom tool functions (merged with built-in `std`). |
| `context?` | `Record<string, any>` | Shared data available via `with context as ctx` in `.bridge` files. |
| `signal?` | `AbortSignal` | Pass an `AbortSignal` to cancel execution and upstream HTTP requests mid-flight. |
| `toolTimeoutMs?` | `number` | Fails the execution if a single tool takes longer than this threshold. |
| `logger?` | `Logger` | Structured logger for tool calls. |
| Option | Type | What it does |
| ------------------ | --------------------- | -------------------------------------------------------------------------------- |
| `document` | `BridgeDocument` | The parsed AST from `@stackables/bridge-parser`. |
| `operation` | `string` | Which bridge to run, e.g. `"Query.myField"`. |
| `input?` | `Record<string, any>` | Input arguments — equivalent to GraphQL field args. |
| `tools?` | `ToolMap` | Your custom tool functions (merged with built-in `std`). |
| `context?` | `Record<string, any>` | Shared data available via `with context as ctx` in `.bridge` files. |
| `signal?` | `AbortSignal` | Pass an `AbortSignal` to cancel execution and upstream HTTP requests mid-flight. |
| `toolTimeoutMs?` | `number` | Fails the execution if a single tool takes longer than this threshold. |
| `logger?` | `Logger` | Structured logger for tool calls. |
| `requestedFields?` | `string[]` | Sparse fieldset filter — only resolve the listed output fields. Supports dot-separated paths and a trailing `*` wildcard (e.g. `["id", "legs.*"]`). Omit to resolve all fields. |

_Returns:_ `Promise<{ data: T }>`

Expand Down
122 changes: 88 additions & 34 deletions packages/bridge-compiler/src/codegen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import type {
NodeRef,
ToolDef,
} from "@stackables/bridge-core";
import { matchesRequestedFields } from "@stackables/bridge-core";

const SELF_MODULE = "_";

Expand All @@ -38,6 +39,12 @@ const SELF_MODULE = "_";
export interface CompileOptions {
/** The operation to compile, e.g. "Query.livingStandard" */
operation: string;
/**
* Sparse fieldset filter — only emit code for the listed output fields.
* Supports dot-separated paths and a trailing `*` wildcard.
* Omit or pass an empty array to compile all output fields.
*/
requestedFields?: string[];
}

export interface CompileResult {
Expand Down Expand Up @@ -88,7 +95,7 @@ export function compileBridge(
(i): i is ToolDef => i.kind === "tool",
);

const ctx = new CodegenContext(bridge, constDefs, toolDefs);
const ctx = new CodegenContext(bridge, constDefs, toolDefs, options.requestedFields);
return ctx.compile();
}

Expand Down Expand Up @@ -231,16 +238,20 @@ class CodegenContext {
/** Map from ToolDef dependency tool name to its emitted variable name.
* Populated lazily by emitToolDeps to avoid duplicating calls. */
private toolDepVars = new Map<string, string>();
/** Sparse fieldset filter for output wire pruning. */
private requestedFields: string[] | undefined;

constructor(
bridge: Bridge,
constDefs: Map<string, string>,
toolDefs: ToolDef[],
requestedFields?: string[],
) {
this.bridge = bridge;
this.constDefs = constDefs;
this.toolDefs = toolDefs;
this.selfTrunkKey = `${SELF_MODULE}:${bridge.type}:${bridge.field}`;
this.requestedFields = requestedFields?.length ? requestedFields : undefined;

for (const h of bridge.handles) {
switch (h.kind) {
Expand Down Expand Up @@ -452,7 +463,7 @@ class CodegenContext {
}

// Separate wires into tool inputs, define containers, and output
const outputWires: Wire[] = [];
const allOutputWires: Wire[] = [];
const toolWires = new Map<string, Wire[]>();
const defineWires = new Map<string, Wire[]>();

Expand All @@ -465,7 +476,7 @@ class CodegenContext {
? `${w.to.module}:${w.to.type}:${w.to.field}`
: toKey;
if (toTrunkNoElement === this.selfTrunkKey) {
outputWires.push(w);
allOutputWires.push(w);
} else if (this.defineContainers.has(toKey)) {
// Wire targets a define-in/out container
const arr = defineWires.get(toKey) ?? [];
Expand All @@ -478,6 +489,19 @@ class CodegenContext {
}
}

// ── Sparse fieldset filtering ──────────────────────────────────────
// When requestedFields is provided, drop output wires for fields that
// weren't requested. Kahn's algorithm will then naturally eliminate
// tools that only feed into those dropped wires.
const outputWires = this.requestedFields
? allOutputWires.filter((w) => {
// Root wires (path length 0) and element wires are always included
if (w.to.path.length === 0) return true;
const fieldPath = w.to.path.join(".");
return matchesRequestedFields(fieldPath, this.requestedFields);
})
: allOutputWires;

// Ensure force-only tools (no wires targeting them from output) are
// still included in the tool map for scheduling
for (const [tk] of forceMap) {
Expand Down Expand Up @@ -618,38 +642,68 @@ class CodegenContext {
lines.push(` }`);

// ── Dead tool detection ────────────────────────────────────────────
// Detect tools whose output is never referenced by any output wire,
// other tool wire, or define container wire. These are dead code
// (e.g. a pipe-only handle whose forks are all element-scoped).
const referencedToolKeys = new Set<string>();
const allWireSources = [...outputWires, ...bridge.wires];
for (const w of allWireSources) {
if ("from" in w) referencedToolKeys.add(refTrunkKey(w.from));
if ("cond" in w) {
referencedToolKeys.add(refTrunkKey(w.cond));
if (w.thenRef) referencedToolKeys.add(refTrunkKey(w.thenRef));
if (w.elseRef) referencedToolKeys.add(refTrunkKey(w.elseRef));
}
if ("condAnd" in w) {
referencedToolKeys.add(refTrunkKey(w.condAnd.leftRef));
if (w.condAnd.rightRef)
referencedToolKeys.add(refTrunkKey(w.condAnd.rightRef));
}
if ("condOr" in w) {
referencedToolKeys.add(refTrunkKey(w.condOr.leftRef));
if (w.condOr.rightRef)
referencedToolKeys.add(refTrunkKey(w.condOr.rightRef));
}
// Also count falsy/nullish/catch fallback refs
if ("falsyFallbackRefs" in w && w.falsyFallbackRefs) {
for (const ref of w.falsyFallbackRefs)
referencedToolKeys.add(refTrunkKey(ref));
}
if ("nullishFallbackRef" in w && w.nullishFallbackRef) {
referencedToolKeys.add(refTrunkKey(w.nullishFallbackRef));
// Detect which tools are reachable from the (possibly filtered) output
// wires. Uses a backward reachability analysis: start from tools
// referenced in output wires, then transitively follow tool-input
// wires to discover all upstream dependencies. Tools not in the
// reachable set are dead code and can be skipped.

/**
* Extract all tool trunk keys referenced as **sources** in a set of
* wires. A "source key" is the trunk key of a node that feeds data
* into a wire (the right-hand side of `target <- source`). This
* includes pull refs, ternary branches, condAnd/condOr operands,
* and all fallback refs. Used by the backward reachability analysis
* to discover which tools are transitively needed by the output.
*/
const collectSourceKeys = (wires: Wire[]): Set<string> => {
const keys = new Set<string>();
for (const w of wires) {
if ("from" in w) keys.add(refTrunkKey(w.from));
if ("cond" in w) {
keys.add(refTrunkKey(w.cond));
if (w.thenRef) keys.add(refTrunkKey(w.thenRef));
if (w.elseRef) keys.add(refTrunkKey(w.elseRef));
}
if ("condAnd" in w) {
keys.add(refTrunkKey(w.condAnd.leftRef));
if (w.condAnd.rightRef) keys.add(refTrunkKey(w.condAnd.rightRef));
}
if ("condOr" in w) {
keys.add(refTrunkKey(w.condOr.leftRef));
if (w.condOr.rightRef) keys.add(refTrunkKey(w.condOr.rightRef));
}
if ("falsyFallbackRefs" in w && w.falsyFallbackRefs) {
for (const ref of w.falsyFallbackRefs) keys.add(refTrunkKey(ref));
}
if ("nullishFallbackRef" in w && w.nullishFallbackRef) {
keys.add(refTrunkKey(w.nullishFallbackRef));
}
if ("catchFallbackRef" in w && w.catchFallbackRef) {
keys.add(refTrunkKey(w.catchFallbackRef));
}
}
if ("catchFallbackRef" in w && w.catchFallbackRef) {
referencedToolKeys.add(refTrunkKey(w.catchFallbackRef));
return keys;
};

// Seed: tools directly referenced by output wires + forced tools
const referencedToolKeys = collectSourceKeys(outputWires);
for (const tk of forceMap.keys()) referencedToolKeys.add(tk);

// Transitive closure: walk backward through tool input wires
const visited = new Set<string>();
const queue = [...referencedToolKeys];
while (queue.length > 0) {
const tk = queue.pop()!;
if (visited.has(tk)) continue;
visited.add(tk);
const deps = toolWires.get(tk);
if (!deps) continue;
for (const key of collectSourceKeys(deps)) {
if (!visited.has(key)) {
referencedToolKeys.add(key);
queue.push(key);
}
}
}

Expand Down
43 changes: 37 additions & 6 deletions packages/bridge-compiler/src/execute-bridge.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,20 @@ export type ExecuteBridgeOptions = {
* - `"full"` — everything including input and output
*/
trace?: TraceLevel;
/**
* Sparse fieldset filter.
*
* When provided, only the listed output fields (and their transitive
* dependencies) are compiled and executed. Tools that feed exclusively
* into unrequested fields are eliminated by the compiler's dead-code
* analysis (Kahn's algorithm).
*
* Supports dot-separated paths and a trailing wildcard:
* `["id", "price", "legs.*"]`
*
* Omit or pass an empty array to resolve all fields (the default).
*/
requestedFields?: string[];
};

export type ExecuteBridgeResult<T = unknown> = {
Expand Down Expand Up @@ -91,20 +105,37 @@ const AsyncFunction = Object.getPrototypeOf(async function () {})
.constructor as typeof Function;

/**
* Cache: one compiled function per (document identity × operation).
* Cache: one compiled function per (document identity × operation × requestedFields).
* Uses a WeakMap keyed on the document object so entries are GC'd when
* the document is no longer referenced.
*/
const fnCache = new WeakMap<BridgeDocument, Map<string, BridgeFn>>();

function getOrCompile(document: BridgeDocument, operation: string): BridgeFn {
/** Build a cache key that includes the sorted requestedFields. */
function cacheKey(
operation: string,
requestedFields?: string[],
): string {
if (!requestedFields || requestedFields.length === 0) return operation;
return `${operation}:${[...requestedFields].sort().join(",")}`;
}

function getOrCompile(
document: BridgeDocument,
operation: string,
requestedFields?: string[],
): BridgeFn {
const key = cacheKey(operation, requestedFields);
let opMap = fnCache.get(document);
if (opMap) {
const cached = opMap.get(operation);
const cached = opMap.get(key);
if (cached) return cached;
}

const { functionBody } = compileBridge(document, { operation });
const { functionBody } = compileBridge(document, {
operation,
requestedFields,
});

let fn: BridgeFn;
try {
Expand Down Expand Up @@ -133,7 +164,7 @@ function getOrCompile(document: BridgeDocument, operation: string): BridgeFn {
opMap = new Map();
fnCache.set(document, opMap);
}
opMap.set(operation, fn);
opMap.set(key, fn);
return fn;
}

Expand Down Expand Up @@ -202,7 +233,7 @@ export async function executeBridge<T = unknown>(
logger,
} = options;

const fn = getOrCompile(document, operation);
const fn = getOrCompile(document, operation, options.requestedFields);

// Merge built-in std namespace with user-provided tools, then flatten
// so the generated code can access them via dotted keys like tools["std.str.toUpperCase"].
Expand Down
Loading