Skip to content

Commit a236fe7

Browse files
authored
Merge pull request #200 from PredicateSystems/simplified_tracer
simplified boilerplate and tracer
2 parents 78fe1c3 + b4aa7cc commit a236fe7

File tree

12 files changed

+1310
-28
lines changed

12 files changed

+1310
-28
lines changed

.github/workflows/release.yml

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -198,21 +198,15 @@ jobs:
198198

199199
publish-compat-shim:
200200
runs-on: ubuntu-latest
201-
needs: build-and-publish
202-
203-
steps:
201+
needs: build-and-publish steps:
204202
- name: Checkout code
205-
uses: actions/checkout@v4
206-
207-
- name: Set up Node.js
203+
uses: actions/checkout@v4 - name: Set up Node.js
208204
uses: actions/setup-node@v4
209205
with:
210206
node-version: '20'
211207
registry-url: 'https://registry.npmjs.org'
212208
scope: '@predicatesystems'
213-
always-auth: true
214-
215-
- name: Extract version from tag or input
209+
always-auth: true - name: Extract version from tag or input
216210
id: version
217211
run: |
218212
if [ "${{ github.event_name }}" == "release" ]; then
@@ -222,18 +216,13 @@ jobs:
222216
VERSION="${{ github.event.inputs.version }}"
223217
fi
224218
echo "version=$VERSION" >> $GITHUB_OUTPUT
225-
echo "Version: $VERSION"
226-
227-
- name: Sync shim version and runtime dependency
219+
echo "Version: $VERSION" - name: Sync shim version and runtime dependency
228220
run: |
229221
VERSION="${{ steps.version.outputs.version }}"
230222
npm pkg set version=$VERSION --prefix compat/sdk-shim
231-
npm pkg set dependencies."@predicatesystems/runtime"=$VERSION --prefix compat/sdk-shim
232-
233-
- name: Publish compatibility shim to npm
223+
npm pkg set dependencies."@predicatesystems/runtime"=$VERSION --prefix compat/sdk-shim - name: Publish compatibility shim to npm
234224
run: |
235225
cd compat/sdk-shim
236226
npm publish --access public
237227
env:
238-
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
239-
228+
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

src/agents/index.ts

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
/**
2+
* Agents Module
3+
*
4+
* High-level agent implementations for browser automation.
5+
*/
6+
7+
// Browser Agent (enterprise features)
8+
export {
9+
PredicateBrowserAgent,
10+
type PredicateBrowserAgentConfig,
11+
type PermissionRecoveryConfig,
12+
type VisionFallbackConfig,
13+
type CaptchaConfig,
14+
} from './browser-agent';
15+
16+
// Planner-Executor Agent (two-tier LLM architecture)
17+
export {
18+
// Configuration
19+
type SnapshotEscalationConfig,
20+
type RetryConfig,
21+
type StepwisePlanningConfig,
22+
type PlannerExecutorConfig,
23+
ConfigPreset,
24+
getConfigPreset,
25+
mergeConfig,
26+
DEFAULT_CONFIG,
27+
// Factory
28+
type CreateAgentOptions,
29+
type AgentProviders,
30+
detectProvider,
31+
createProvider,
32+
resolveConfig,
33+
createPlannerExecutorAgentProviders,
34+
} from './planner-executor';
Lines changed: 276 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,276 @@
1+
/**
2+
* Agent Factory for PlannerExecutorAgent
3+
*
4+
* Provides convenient factory functions to create agents with sensible defaults,
5+
* auto-provider detection, and auto-tracer creation.
6+
*/
7+
8+
import { LLMProvider, OllamaProvider, OpenAIProvider, AnthropicProvider } from '../../llm-provider';
9+
import { createTracer, createLocalTracer, Tracer } from '../../tracing';
10+
import {
11+
PlannerExecutorConfig,
12+
ConfigPreset,
13+
getConfigPreset,
14+
mergeConfig,
15+
DEFAULT_CONFIG,
16+
DeepPartial,
17+
} from './config';
18+
19+
/**
20+
* Options for creating a PlannerExecutorAgent.
21+
*/
22+
export interface CreateAgentOptions {
23+
/** Model name for planning (e.g., "gpt-4o", "qwen3:8b") */
24+
plannerModel: string;
25+
26+
/** Model name for execution (e.g., "gpt-4o-mini", "qwen3:4b") */
27+
executorModel: string;
28+
29+
/** Provider for planner ("auto", "ollama", "openai", "anthropic") */
30+
plannerProvider?: 'auto' | 'ollama' | 'openai' | 'anthropic';
31+
32+
/** Provider for executor ("auto", "ollama", "openai", "anthropic") */
33+
executorProvider?: 'auto' | 'ollama' | 'openai' | 'anthropic';
34+
35+
/** Ollama server URL (default: http://localhost:11434) */
36+
ollamaBaseUrl?: string;
37+
38+
/** OpenAI API key (defaults to OPENAI_API_KEY env var) */
39+
openaiApiKey?: string;
40+
41+
/** Anthropic API key (defaults to ANTHROPIC_API_KEY env var) */
42+
anthropicApiKey?: string;
43+
44+
/** Configuration preset or partial config */
45+
config?: ConfigPreset | string | DeepPartial<PlannerExecutorConfig>;
46+
47+
/** Run ID for tracing (generates UUID if not provided) */
48+
runId?: string;
49+
50+
/** Whether to auto-create tracer (default: true) */
51+
autoTracer?: boolean;
52+
}
53+
54+
/**
55+
* Auto-detect provider from model name.
56+
*/
57+
export function detectProvider(model: string): 'openai' | 'anthropic' | 'ollama' {
58+
const modelLower = model.toLowerCase();
59+
60+
// OpenAI models
61+
if (
62+
modelLower.startsWith('gpt-') ||
63+
modelLower.startsWith('o1-') ||
64+
modelLower.startsWith('o3-') ||
65+
modelLower.startsWith('o4-')
66+
) {
67+
return 'openai';
68+
}
69+
70+
// Anthropic models
71+
if (modelLower.startsWith('claude-')) {
72+
return 'anthropic';
73+
}
74+
75+
// Common Ollama model patterns
76+
const ollamaPatterns = ['qwen', 'llama', 'phi', 'mistral', 'gemma', 'deepseek', 'codellama'];
77+
if (ollamaPatterns.some(p => modelLower.startsWith(p))) {
78+
return 'ollama';
79+
}
80+
81+
// Ollama models typically have "model:tag" format
82+
if (model.includes(':')) {
83+
return 'ollama';
84+
}
85+
86+
// Default to ollama for unknown models (assume local)
87+
return 'ollama';
88+
}
89+
90+
/**
91+
* Create LLM provider based on provider name.
92+
*/
93+
export function createProvider(
94+
model: string,
95+
provider: 'auto' | 'ollama' | 'openai' | 'anthropic',
96+
options: {
97+
ollamaBaseUrl?: string;
98+
openaiApiKey?: string;
99+
anthropicApiKey?: string;
100+
}
101+
): LLMProvider {
102+
const resolvedProvider = provider === 'auto' ? detectProvider(model) : provider;
103+
104+
switch (resolvedProvider) {
105+
case 'ollama':
106+
return new OllamaProvider({
107+
model,
108+
baseUrl: options.ollamaBaseUrl ?? 'http://localhost:11434',
109+
});
110+
111+
case 'openai': {
112+
const apiKey = options.openaiApiKey ?? process.env.OPENAI_API_KEY;
113+
if (!apiKey) {
114+
throw new Error('OpenAI API key required. Set OPENAI_API_KEY or pass openaiApiKey option.');
115+
}
116+
return new OpenAIProvider(apiKey, model);
117+
}
118+
119+
case 'anthropic': {
120+
const apiKey = options.anthropicApiKey ?? process.env.ANTHROPIC_API_KEY;
121+
if (!apiKey) {
122+
throw new Error(
123+
'Anthropic API key required. Set ANTHROPIC_API_KEY or pass anthropicApiKey option.'
124+
);
125+
}
126+
return new AnthropicProvider(apiKey, model);
127+
}
128+
129+
default:
130+
throw new Error(
131+
`Unknown provider: ${provider}. Supported: 'auto', 'ollama', 'openai', 'anthropic'`
132+
);
133+
}
134+
}
135+
136+
/**
137+
* Resolve configuration from preset or partial config.
138+
*/
139+
export function resolveConfig(
140+
config?: ConfigPreset | string | DeepPartial<PlannerExecutorConfig>
141+
): PlannerExecutorConfig {
142+
if (!config) {
143+
return { ...DEFAULT_CONFIG };
144+
}
145+
146+
// String preset name
147+
if (typeof config === 'string') {
148+
return getConfigPreset(config);
149+
}
150+
151+
// It's a partial config object - merge with defaults
152+
return mergeConfig(config);
153+
}
154+
155+
/**
156+
* Result from createPlannerExecutorAgentProviders.
157+
*
158+
* Note: The full PlannerExecutorAgent is not yet implemented in TypeScript.
159+
* This function creates the providers and config that will be used when
160+
* the agent is ported.
161+
*/
162+
export interface AgentProviders {
163+
/** Planner LLM provider */
164+
planner: LLMProvider;
165+
166+
/** Executor LLM provider */
167+
executor: LLMProvider;
168+
169+
/** Resolved configuration */
170+
config: PlannerExecutorConfig;
171+
172+
/** Tracer instance (if autoTracer was enabled) */
173+
tracer?: Tracer;
174+
}
175+
176+
/**
177+
* Create providers and configuration for PlannerExecutorAgent.
178+
*
179+
* This is a helper that creates the LLM providers with auto-detection
180+
* and resolves configuration from presets. Use this until the full
181+
* PlannerExecutorAgent is ported to TypeScript.
182+
*
183+
* @example Minimal local Ollama setup
184+
* ```typescript
185+
* const { planner, executor, config } = await createPlannerExecutorAgentProviders({
186+
* plannerModel: 'qwen3:8b',
187+
* executorModel: 'qwen3:4b',
188+
* });
189+
* ```
190+
*
191+
* @example With cloud OpenAI
192+
* ```typescript
193+
* const { planner, executor, config } = await createPlannerExecutorAgentProviders({
194+
* plannerModel: 'gpt-4o',
195+
* executorModel: 'gpt-4o-mini',
196+
* openaiApiKey: 'sk-...',
197+
* });
198+
* ```
199+
*
200+
* @example Mixed cloud planner, local executor
201+
* ```typescript
202+
* const { planner, executor, config } = await createPlannerExecutorAgentProviders({
203+
* plannerModel: 'gpt-4o',
204+
* plannerProvider: 'openai',
205+
* executorModel: 'qwen3:4b',
206+
* executorProvider: 'ollama',
207+
* openaiApiKey: 'sk-...',
208+
* });
209+
* ```
210+
*
211+
* @example With config preset
212+
* ```typescript
213+
* import { ConfigPreset } from '@predicatesystems/runtime';
214+
*
215+
* const { planner, executor, config } = await createPlannerExecutorAgentProviders({
216+
* plannerModel: 'qwen3:8b',
217+
* executorModel: 'qwen3:4b',
218+
* config: ConfigPreset.LOCAL_SMALL_MODEL,
219+
* });
220+
* ```
221+
*/
222+
export async function createPlannerExecutorAgentProviders(
223+
options: CreateAgentOptions
224+
): Promise<AgentProviders> {
225+
const {
226+
plannerModel,
227+
executorModel,
228+
plannerProvider = 'auto',
229+
executorProvider = 'auto',
230+
ollamaBaseUrl,
231+
openaiApiKey,
232+
anthropicApiKey,
233+
config,
234+
runId,
235+
autoTracer = false,
236+
} = options;
237+
238+
// Create providers
239+
const planner = createProvider(plannerModel, plannerProvider, {
240+
ollamaBaseUrl,
241+
openaiApiKey,
242+
anthropicApiKey,
243+
});
244+
245+
const executor = createProvider(executorModel, executorProvider, {
246+
ollamaBaseUrl,
247+
openaiApiKey,
248+
anthropicApiKey,
249+
});
250+
251+
// Resolve configuration
252+
const resolvedConfig = resolveConfig(config);
253+
254+
// Create tracer if requested
255+
let tracer: Tracer | undefined;
256+
if (autoTracer) {
257+
const apiKey = process.env.PREDICATE_API_KEY;
258+
if (apiKey) {
259+
tracer = await createTracer({
260+
apiKey,
261+
runId,
262+
llmModel: `${plannerModel}/${executorModel}`,
263+
agentType: 'planner-executor',
264+
});
265+
} else {
266+
tracer = createLocalTracer(runId);
267+
}
268+
}
269+
270+
return {
271+
planner,
272+
executor,
273+
config: resolvedConfig,
274+
tracer,
275+
};
276+
}

0 commit comments

Comments
 (0)