From 53f8ccb17eb89b350919fd4d5756f000e1005e41 Mon Sep 17 00:00:00 2001 From: Michael Dailey Date: Fri, 8 May 2026 14:42:35 -0500 Subject: [PATCH 01/12] PDX-0: feat(mcp): add user-facing guide prompts, tool-guide resource, and git-workflow command RCA: Users and AI agents lacked structured onboarding, troubleshooting, and tool selection guidance when connecting a Provar project via ProvarDX MCP, causing long friction loops and repeated tool selection mistakes that baked-in agent workflows would prevent. Fix: Added three MCP prompts (provar.guide.onboarding, troubleshoot, orchestration), one resource (provar://docs/tool-guide), and a tracked /git-workflow slash command covering Jira ticket creation, branch naming, worktree setup with yarn install, and the full dev/PR lifecycle. Smoke test updated to 54 entries. Gitignore tightened to track .claude/commands/. --- .claude/commands/git-workflow.md | 257 ++++++++++++++++++++++ .gitignore | 7 +- docs/PROVAR_TOOL_GUIDE.md | 167 ++++++++++++++ scripts/mcp-smoke.cjs | 26 ++- src/mcp/prompts/guidePrompts.ts | 366 +++++++++++++++++++++++++++++++ src/mcp/prompts/index.ts | 4 + src/mcp/server.ts | 28 +++ 7 files changed, 849 insertions(+), 6 deletions(-) create mode 100644 .claude/commands/git-workflow.md create mode 100644 docs/PROVAR_TOOL_GUIDE.md create mode 100644 src/mcp/prompts/guidePrompts.ts diff --git a/.claude/commands/git-workflow.md b/.claude/commands/git-workflow.md new file mode 100644 index 0000000..2bdb765 --- /dev/null +++ b/.claude/commands/git-workflow.md @@ -0,0 +1,257 @@ +You are executing the provardx-cli development git workflow. Follow these steps in order. Stop and confirm with the user at each CONFIRM point before proceeding. + +Full reference doc: `.claude/agents/dev-git-workflow.md` + +--- + +## Step 0 — Establish the Jira ticket (planning phase) + +Ask the user: + +> "Do you have a PDX ticket for this work? +> A) Yes — give me the number +> B) No — create one now as part of planning +> C) No ticket needed (framework/chore work)" + +--- + +### Option A — Existing ticket + +User provides the ticket number. Set `TICKET = PDX-`. + +Fetch the ticket to confirm it exists and read its summary and status: + +- Use `getJiraIssue` (cloudId: `3c8a4f06-8ecc-4723-876f-b096b816c6ec`, issueIdOrKey: `PDX-`) +- Show the user: ticket summary, current status, and URL +- If the ticket is already Closed, warn the user before proceeding + +Skip to **Derive branch variables** below. + +--- + +### Option B — Create ticket now (planning phase) + +Ask the user a single compound question to gather everything at once: + +> "Tell me about the work — I'll draft the ticket from your answer: +> +> 1. What should this change do? (one sentence — becomes the ticket summary) +> 2. Is it a new feature, bug fix, infrastructure/CI work, research spike, or internal task? +> 3. Why is it needed? What problem or requirement drives it? +> 4. How will we know it's done? (acceptance criteria — list conditions) +> 5. Anything explicitly out of scope?" + +From the user's answer, draft the full ticket content. Use your judgment to infer issue type if the user is vague. Do not ask follow-up questions unless a critical field (summary or acceptance criteria) is completely missing. + +**Issue type mapping:** +| Work described | Issue type | +|----------------|-----------| +| New user-facing capability | Story | +| Something broken | Bug | +| CI, infra, tooling, architecture | Enabler | +| Research / investigation / prototype | Spike | +| Internal work, no user impact | Task | + +**CONFIRM**: Show the drafted ticket for review before creating: + +``` +Summary: +Type: +Label: provardx-cli + +Description: +## Background + + +## Acceptance Criteria +- [ ] +- [ ] + +## Notes + +``` + +> "Does this look right? I'll create the Jira ticket now." + +Once confirmed, create the ticket using the `createJiraIssue` MCP tool: + +- `cloudId`: `3c8a4f06-8ecc-4723-876f-b096b816c6ec` +- `projectKey`: `PDX` +- `issueTypeName`: as chosen above +- `summary`: as drafted +- `description`: full description in markdown +- `contentFormat`: `markdown` +- `additional_fields`: `{ "labels": ["provardx-cli"] }` + +The tool returns the new ticket key (e.g. `PDX-193`). Set `TICKET = PDX-`. + +Show the user: `Ticket created: https://provartesting.atlassian.net/browse/` + +--- + +### Option C — No ticket (PDX-0) + +Set `TICKET = PDX-0`. No Jira steps. Use this only for framework chores, internal tooling, or changes with no observable user or system behaviour change. + +--- + +### Derive branch variables + +Ask: "What type of change is this? (feature / fix)" — skip if already obvious from the issue type. + +Ask for a short branch slug (kebab-case, ≤ 30 chars, no spaces). + +Derive: + +- `BRANCH_TYPE` = `feature` or `fix` +- `BRANCH` = `feature/PDX--` or `fix/PDX--` (or `feature/` for PDX-0) + +**CONFIRM**: "I'll create branch `` off `develop`. Proceed?" + +--- + +## Step 1 — Create worktree and install dependencies + +```sh +# From the main repo root +git worktree add .claude/worktrees/ -b develop + +# Install node_modules so husky hooks work — ALWAYS do this in a new worktree +cd .claude/worktrees/ && yarn install +``` + +The `yarn install` step is mandatory. Without it, the pre-commit hook cannot find `wireit` and will fail with "wireit is not recognized". + +--- + +## Step 2 — Implement the change + +Work in the worktree at `.claude/worktrees//`. + +Before every commit attempt, run in the worktree directory: + +```sh +yarn compile +node_modules/.bin/nyc node_modules/.bin/mocha "test/**/*.test.ts" +node scripts/mcp-smoke.cjs 2>/dev/null +yarn lint +``` + +Fix any failures before staging. Do not move to Step 3 until all four pass. + +--- + +## Step 3 — Stage and commit + +Stage files explicitly — never `git add -A`: + +```sh +git add ... +``` + +Commit with the required PDX format: + +```sh +git commit -m "$(cat <<'EOF' +: (): + +RCA: +Fix: +EOF +)" +``` + +Valid `type` values: `feat`, `fix`, `test`, `docs`, `chore`, `refactor` +Valid `scope` values: `mcp`, `prompts`, `resources`, `cli`, `test`, `docs`, `ci` + +If the commit-msg hook rejects the message, read the error and fix the message. **Do not use `--no-verify` unless the user explicitly approves it.** + +--- + +## Step 4 — Push and open PR + +```sh +git push -u origin +``` + +The pre-push hook runs `yarn build && yarn test` (60–120 seconds). If it fails, fix the issue, commit the fix (Step 3 format), and push again. + +Open the PR: + +```sh +gh pr create \ + --base develop \ + --title ": " \ + --body "$(cat <<'EOF' +## Summary +- + +## Jira +https://provartesting.atlassian.net/browse/ + +## Test plan +- [ ] yarn compile passes +- [ ] yarn test:only passes +- [ ] mcp-smoke.cjs passes +- [ ] yarn lint passes + +## Changes +- : +EOF +)" +``` + +Omit the `## Jira` section for `PDX-0` work. + +**CONFIRM**: Show the user the PR URL and ask: "PR is open. Should I check the Copilot review now?" + +--- + +## Step 5 — Address Copilot review + +```sh +gh pr view --comments +gh pr checks +``` + +For each Copilot comment: + +- **Valid concern** → fix in the worktree, commit (Step 3 format), push +- **Not applicable** → reply explaining why: `gh pr comment --body "..."` +- **Security comment** → always address; never dismiss without strong justification + +--- + +## Step 6 — Merge and close ticket + +**CONFIRM**: "All checks pass. Should I merge the PR?" + +```sh +gh pr merge --squash --delete-branch +``` + +For ticketed work (non-PDX-0): transition the Jira ticket to Closed. + +- Web: `https://provartesting.atlassian.net/browse/` +- MCP: `transitionJiraIssue` (cloudId: `3c8a4f06-8ecc-4723-876f-b096b816c6ec`) + +Clean up the worktree: + +```sh +git worktree remove .claude/worktrees/ +git worktree prune +``` + +--- + +## Hook failures quick-reference + +| Hook | Failure | Fix | +| ---------- | -------------------------- | -------------------------------------------------------------- | +| pre-commit | `wireit is not recognized` | Run `yarn install` in the worktree first | +| pre-commit | ESLint violation | Fix the violation, `git add` the file, retry | +| pre-commit | Prettier | Run `yarn pretty-quick --staged`, restage, retry | +| commit-msg | Wrong format | Read error, rewrite message with `git commit --amend -m "..."` | +| commit-msg | Lines too short | `RCA:` or `Req:` and `Fix:` each need ≥ 40 characters | +| pre-push | Compile error | Fix TypeScript error, commit, push again | +| pre-push | Test failure | Fix the test, commit, push again | diff --git a/.gitignore b/.gitignore index 8541154..10f6ca6 100644 --- a/.gitignore +++ b/.gitignore @@ -55,8 +55,11 @@ mochawesome-report .env.local .env.*.local -# Claude -.claude/ +# Claude — local-only directories (worktrees, per-dev agent files, gitignored routing index) +.claude/worktrees/ +.claude/agents/ +AGENTS.md +# .claude/commands/ is intentionally tracked — project slash commands for Claude Code # NitroX schema files — do not commit until IP/licensing confirmed with Provar team # See: src/mcp/tools/nitroXTools.ts and plan notes diff --git a/docs/PROVAR_TOOL_GUIDE.md b/docs/PROVAR_TOOL_GUIDE.md new file mode 100644 index 0000000..a6d46c8 --- /dev/null +++ b/docs/PROVAR_TOOL_GUIDE.md @@ -0,0 +1,167 @@ +# ProvarDX MCP Tool Guide + +Reference for selecting the right MCP tool for a given goal. Organised by what you're trying to accomplish, not by tool name. + +--- + +## "I want to understand my project" + +Start here with any new or unfamiliar project. + +``` +provar_project_inspect { project_path } +``` + +Returns: test case inventory, connection list, plan coverage, config files, ANT files. Run before any authoring or execution task. + +To validate structure (not just inventory): + +``` +provar_project_validate { project_path } +``` + +Returns: structure violations, broken callables, missing references. + +--- + +## "I want to run tests" + +### Locally (Provar Automation) + +Fixed sequence — do not skip steps: + +``` +1. provar_automation_config_load { properties_path } ← required first +2. provar_automation_compile { project_path } ← required before run +3. provar_automation_testrun { properties_path, ... } +``` + +No properties file yet? Generate one first: + +``` +provar_properties_generate { project_path, connection_name } +``` + +### Via Quality Hub (remote) + +``` +1. provar_qualityhub_connect { target_org } +2. provar_qualityhub_testrun { target_org, flags: ["--plan", ""] } +3. provar_qualityhub_testrun_report { target_org, run_id } ← poll until done +``` + +**When to use local vs Quality Hub:** + +- Local: developer iteration, fast feedback, single machine +- Quality Hub: CI/CD, team-wide, managed environments, plan-level reporting + +--- + +## "I want to understand why tests failed" + +``` +1. provar_testrun_report_locate { project_path } ← find where results landed +2. provar_testrun_rca { report_path, mode: "rca" } +``` + +`provar_testrun_rca` classifies each failure (auth, locator, assertion, data, etc.) and gives a recommendation per failure. Use `mode: "failures"` for the raw failure list without classification. + +--- + +## "I want to write a new test" + +``` +1. provar_project_inspect { project_path } ← find coverage gaps first +2. provar_testcase_generate { project_path, name, ... } +3. provar_testcase_step_edit { file_path, ... } ← repeat per step +4. provar_testcase_validate { file_path } ← must pass before adding to plan +5. provar_testplan_add_instance { plan_path, testcase_path } +6. provar_testplan_validate { plan_path } +``` + +--- + +## "I want to work with Salesforce metadata" + +``` +provar_automation_metadata_download { project_path, ... } +``` + +Run when: first setting up a project, fields/objects are missing from test steps, or after Salesforce org changes. If this fails with `[DOWNLOAD_ERROR]`, the credentials are the issue — re-authenticate the connection in Provar IDE. + +--- + +## "I want to work with page objects" + +``` +provar_pageobject_generate { project_path, target_url, ... } ← generate +provar_pageobject_validate { file_path } ← validate first +provar_automation_compile { project_path } ← after any change +``` + +Always validate before compile. Validation errors are easier to read than compile errors. + +--- + +## "I want to work with LWC / Screen Flows (NitroX)" + +``` +provar_nitrox_discover { project_path } ← see what's already modeled +provar_nitrox_generate { project_path, ... } ← generate for a component +provar_nitrox_validate { file_path } ← always validate after generate +provar_nitrox_patch { file_path, ... } ← update existing model +provar_nitrox_validate { file_path } ← always validate after patch +``` + +--- + +## "I want to manage configuration" + +``` +provar_properties_read { file_path } ← read current config +provar_properties_set { file_path, key, value } ← change a single value +provar_properties_validate { file_path } ← validate after changes +``` + +| Property | Controls | +| ---------------- | --------------------------------------------------------- | +| `provarHome` | Path to Provar Automation installation | +| `projectPath` | Path to the Provar project | +| `resultsPath` | Where test results are written | +| `connectionName` | Which Salesforce connection to use | +| `metadataLevel` | `Reload` / `Refresh` / `Reuse` — metadata cache behaviour | + +--- + +## "I want to check which orgs are available" + +``` +provar_connection_list { project_path } +``` + +Returns all connections in `.testproject`. Use the `name` field from each connection as `connectionName` in properties files. + +--- + +## "I want to create a defect for a failed test" + +``` +1. provar_testrun_rca { report_path, mode: "rca" } +2. provar_qualityhub_defect_create { target_org, ... } +``` + +Requires Quality Hub to be connected. + +--- + +## Tool Selection Anti-Patterns + +**Don't run `testrun` without `config_load` first.** It fails with `MISSING_FILE` every time. + +**Don't run `compile` on a broken page object.** Validate with `provar_pageobject_validate` first. + +**Don't call `metadata_download` to fix an assertion failure.** Metadata download refreshes the field cache; it doesn't fix org data state. + +**Don't guess the project path.** Confirm with the user or inspect a known parent directory. + +**Don't parse raw testrun stdout for pass/fail.** Use `provar_testrun_rca` — raw output contains Java logging noise. diff --git a/scripts/mcp-smoke.cjs b/scripts/mcp-smoke.cjs index 0bbbd39..81f5042 100644 --- a/scripts/mcp-smoke.cjs +++ b/scripts/mcp-smoke.cjs @@ -363,11 +363,29 @@ async function runTests() { arguments: { story: 'Verify Users table has at least one Active record after Salesforce flow runs' }, }); - // ── 49. provar_connection_list ──────────────────────────────────────────── + // ── 49. provar.guide.onboarding prompt ─────────────────────────────────── + await rpc('provar.guide.onboarding (prompt)', 'prompts/get', { + name: 'provar.guide.onboarding', + arguments: { mode: 'local' }, + }); + + // ── 50. provar.guide.troubleshoot prompt ────────────────────────────────── + await rpc('provar.guide.troubleshoot (prompt)', 'prompts/get', { + name: 'provar.guide.troubleshoot', + arguments: { errorMessage: 'ClassNotFoundException: pageobjects.LoginPage' }, + }); + + // ── 51. provar.guide.orchestration prompt ───────────────────────────────── + await rpc('provar.guide.orchestration (prompt)', 'prompts/get', { + name: 'provar.guide.orchestration', + arguments: { task: 'run-local' }, + }); + + // ── 52. provar_connection_list ──────────────────────────────────────────── // TMP has no .testproject → CONNECTION_FILE_NOT_FOUND result (not a protocol error) await callTool('provar_connection_list', { project_path: TMP }); - // ── 50. provar_testcase_step_edit ───────────────────────────────────────── + // ── 53. provar_testcase_step_edit ───────────────────────────────────────── // TMP/nonexistent.testcase does not exist → FILE_NOT_FOUND result await callTool('provar_testcase_step_edit', { test_case_path: path.join(TMP, 'nonexistent.testcase'), @@ -383,8 +401,8 @@ async function runTests() { // ---------------------------------------------------------------------------- server.on('close', () => { clearTimeout(overallTimer); - // initialize + tools/list + 40 tools + prompts/list + 8 prompts/get (setup excluded from default count) - const TOTAL_EXPECTED = 51 + (INCLUDE_SETUP ? 1 : 0); + // initialize + tools/list + 40 tools + prompts/list + 11 prompts/get (setup excluded from default count) + const TOTAL_EXPECTED = 54 + (INCLUDE_SETUP ? 1 : 0); let passed = 0; let failed = 0; diff --git a/src/mcp/prompts/guidePrompts.ts b/src/mcp/prompts/guidePrompts.ts new file mode 100644 index 0000000..06baf9a --- /dev/null +++ b/src/mcp/prompts/guidePrompts.ts @@ -0,0 +1,366 @@ +/* + * Copyright (c) 2024 Provar Limited. + * All rights reserved. + * Licensed under the BSD 3-Clause license. + * For full license text, see LICENSE.md file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ + +import { z } from 'zod'; +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; + +// ── Prompt: provar.guide.onboarding ────────────────────────────────────────── + +export function registerOnboardingPrompt(server: McpServer): void { + server.prompt( + 'provar.guide.onboarding', + 'First-time setup guide for a Provar project. Walks through project discovery, connection verification, properties configuration, and a first test run. Use this when a user is getting started with ProvarDX for the first time.', + { + projectPath: z + .string() + .optional() + .describe( + 'Absolute path to the Provar project root (the folder containing .testproject). If omitted, the guide will ask the user for it.' + ), + mode: z + .string() + .optional() + .describe( + '"local" (default) for running tests via Provar Automation on this machine. "quality-hub" for remote execution via a connected Quality Hub org.' + ), + }, + ({ projectPath, mode }) => ({ + messages: [ + { + role: 'user' as const, + content: { + type: 'text' as const, + text: `You are a ProvarDX setup assistant. Help the user connect their Provar project and run their first test. + +## Context + +${ + projectPath + ? `The user's Provar project is at: ${projectPath}` + : 'Ask the user for the path to their Provar project root (the folder containing .testproject). Common locations: ~/ProvarProjects/ on Mac/Linux, C:\\Users\\\\ProvarProjects\\ on Windows. The marker file is .testproject at the project root.' +} + +Execution mode: ${mode === 'quality-hub' ? 'Quality Hub (remote)' : 'Local (Provar Automation)'} + + +## Workflow + +Follow these steps in order. Stop and report if any step fails — do not skip ahead. + +### Step 1 — Verify the server is reachable +Call: provardx_ping +If this fails, the MCP server is not running. Tell the user to run: sf provar mcp start --allowed-paths + +### Step 2 — Inspect the project +Call: provar_project_inspect with the project path. + +If PATH_NOT_ALLOWED: the MCP server's --allowed-paths doesn't include this directory. Ask the user to restart with the correct path. +If PATH_NOT_FOUND: confirm the path with the user — typos and path separator differences (/ vs \\) are common. + +From the result, summarise in plain language: +- How many test cases were found and where +- Which Salesforce connections are configured +- Whether a provardx-properties.json already exists + +### Step 3 — Check connections +Call: provar_connection_list with the project path. + +If no connections appear, the project isn't connected to any org yet. Ask the user to open Provar IDE → Project → Connections → Add Connection. + +${ + mode === 'quality-hub' + ? `### Step 4 — Connect to Quality Hub +Call: provar_qualityhub_connect with the user's SF org alias. +Then: provar_qualityhub_display to confirm the correct org is connected. + +If NOT_AUTHENTICATED: the user needs to run: sf org login web -o + +### Step 5 — Retrieve available test plans +Call: provar_qualityhub_testcase_retrieve to show what's available to run. +Ask the user which plan they'd like to run first. + +### Step 6 — Run first test +Call: provar_qualityhub_testrun with the chosen plan name. +Poll with provar_qualityhub_testrun_report every 30–60 seconds until the run completes. +Stop polling after 20 minutes and ask the user to check Quality Hub directly.` + : `### Step 4 — Configure properties +If a provardx-properties.json was found in step 2: + Call: provar_properties_read to show the user the current config. + Confirm provarHome and connectionName look correct. + +If no properties file was found: + Call: provar_properties_generate using the project path and the first connection name from step 3. + +### Step 5 — Register the config +Call: provar_automation_config_load with the properties file path. +This must succeed before compile or test run. If it fails: + MISSING_FILE: the path is wrong — recheck it. + AUTOMATION_CONFIG_LOAD_FAILED: call provar_properties_validate to find the issue. + +### Step 6 — Compile +Call: provar_automation_compile with the project path. +If this fails with ClassNotFoundException or CompilationException, call provar_pageobject_validate on any .java files in src/pageobjects/ to find the issue. + +### Step 7 — Run a first test +Ask the user which test case they'd like to run (or suggest the first one from the inspect result). +Call: provar_automation_testrun with the properties path and the chosen test.` +} + +## Common First-Time Issues + +| Error | Cause | Fix | +|-------|-------|-----| +| PATH_NOT_ALLOWED | --allowed-paths too narrow | Restart MCP server with parent directory included | +| MISSING_FILE on compile/run | config_load skipped or failed | Run provar_automation_config_load first | +| No connections returned | Project not connected to org | Open Provar IDE → Connections → Add | +| [DOWNLOAD_ERROR] on metadata | Salesforce auth expired | Re-authenticate connection in Provar IDE | +| ClassNotFoundException | Page objects not compiled | Run provar_automation_compile before testrun | + +## Finishing Up + +After a successful first run, summarise in 3 sentences: +1. What project was connected and how many test cases it has +2. Which connection/org is active +3. What they can do next (run more tests, add to CI, generate new tests)`, + }, + }, + ], + }) + ); +} + +// ── Prompt: provar.guide.troubleshoot ──────────────────────────────────────── + +export function registerTroubleshootPrompt(server: McpServer): void { + server.prompt( + 'provar.guide.troubleshoot', + 'Systematic failure diagnosis for Provar test runs. Classifies the error, maps it to a root cause, and gives an actionable fix. Use when a test failed, a tool returned an error, or an agent is looping without progress.', + { + errorMessage: z + .string() + .optional() + .describe( + 'The error message, tool output, or failure description. Paste as much as available — the more detail, the better the diagnosis.' + ), + projectPath: z.string().optional().describe('Absolute path to the Provar project root, if available.'), + }, + ({ errorMessage, projectPath }) => ({ + messages: [ + { + role: 'user' as const, + content: { + type: 'text' as const, + text: `You are a ProvarDX diagnostics expert. Identify the root cause of the failure and give an actionable fix. + +${ + errorMessage + ? `## Failure to diagnose\n\n${errorMessage}` + : 'Ask the user to share the error message, tool output, or a description of what went wrong.' +} +${projectPath ? `\nProject path: ${projectPath}` : ''} + +## Loop Detection Rule + +If you have tried the same fix 3 times and the error hasn't changed, STOP. Tell the user what you tried and ask them to confirm the environment (org auth, file paths, Provar install). + +## Step 1 — Run RCA if a test report exists + +If there is a completed test run, use the RCA tool first: + Call: provar_testrun_report_locate (with project path if available) + Call: provar_testrun_rca with mode: "rca" + +The RCA tool classifies each failure and gives a recommendation per failure. Use it before reading raw stack traces. + +## Step 2 — Classify by error pattern + +| Pattern in the error | Category | Action | +|---------------------|----------|--------| +| PATH_NOT_ALLOWED | Path policy | Ask user to restart MCP server with --allowed-paths set to the project parent dir | +| MISSING_FILE, AUTOMATION_CONFIG_LOAD_FAILED | Missing prerequisite | Run provar_automation_config_load with the properties file path | +| [DOWNLOAD_ERROR], INVALID_LOGIN, AuthenticationException | Salesforce auth | User must re-authenticate the connection in Provar IDE — cannot fix via MCP | +| ClassNotFoundException, CompilationException | Compile missing | Run provar_automation_compile; run provar_pageobject_validate first if compile fails | +| NoSuchElementException, StaleElementReferenceException | Stale locator | User must re-capture the element in Provar IDE — tell them which test step failed | +| TimeoutException, ElementClickInterceptedException | UI timing | Increase step timeout or check org performance | +| SessionNotCreatedException, Chrome version must be between | WebDriver mismatch | Update ChromeDriver to match installed Chrome | +| AssertionException, UiAssert | Assertion | Verify expected value is correct for current org data state | +| Required fields are missing | Salesforce required field | Check field-level security for the running user | +| FIELD_CUSTOM_VALIDATION_EXCEPTION | Salesforce validation rule | Review validation rules on the target object | +| INVALID_CROSS_REFERENCE_KEY | Record not found | Verify referenced record exists and running user has access | +| bad value for restricted picklist | Picklist mismatch | Run provar_automation_metadata_download; check for trailing spaces | +| LicenseException, license.*expired | License | Contact Provar support — not fixable via MCP | +| caseCall.*cannot.*resolv | Broken callable | Run provar_project_validate; look for PROJ-CALLABLE violations | + +## Step 3 — Know when to escalate + +Stop and ask the user when: +- The fix requires action in Provar IDE (re-authenticate, re-capture element) +- The fix requires action in a Salesforce org (data, permissions, validation rules) +- The error is LicenseException +- The RCA category is UNKNOWN with no recommendation + +When escalating, tell the user: what you tried, what the error says, your best diagnosis, and the specific action they need to take. + +## Reading Provar output + +Signal lines to look for: + PASSED: + FAILED: + Provar test run complete: X passed, Y failed + +Safely ignore: + com.networknt.schema.* + SEVERE.*Failed to configure logger.*\\.lck + Loading index of metadata`, + }, + }, + ], + }) + ); +} + +// ── Prompt: provar.guide.orchestration ─────────────────────────────────────── + +export function registerOrchestrationPrompt(server: McpServer): void { + server.prompt( + 'provar.guide.orchestration', + 'Task sequencing guide for multi-step Provar workflows. Shows the correct tool order for common tasks (run tests, author tests, debug failures, Quality Hub), prerequisite dependencies, and when to stop and ask the user.', + { + task: z + .string() + .optional() + .describe( + 'The type of task to sequence: "run-local" (local test execution), "run-quality-hub" (remote runs), "author-test" (writing new tests), "debug-failures" (diagnosing failures), "nitrox" (LWC/Screen Flow work). Omit for a general overview of all flows.' + ), + }, + ({ task }) => { + const flows: Record = { + 'run-local': `## Run Tests Locally + +Required sequence — do not skip steps: + +1. provar_project_inspect → confirm project root and connections exist +2. provar_properties_read OR provar_properties_generate +3. provar_automation_config_load ← MUST succeed before step 4 +4. provar_automation_compile ← MUST succeed before step 5 +5. provar_automation_testrun +6. provar_testrun_report_locate → find where results landed +7. provar_testrun_rca → classify any failures`, + + 'run-quality-hub': `## Run Tests via Quality Hub + +1. provar_qualityhub_connect → once per session +2. provar_qualityhub_display → confirm correct org +3. provar_qualityhub_testrun → returns run_id +4. provar_qualityhub_testrun_report → poll every 30–60s until terminal status + Stop polling after 20 minutes — ask user to check Quality Hub directly +5. provar_testrun_rca → if failures, classify them +6. provar_qualityhub_defect_create → optional, create defects for failures`, + + 'author-test': `## Author a New Test Case + +1. provar_project_inspect → find coverage gaps before writing +2. provar_automation_metadata_download → if SF metadata is stale (missing fields/objects) +3. provar_pageobject_generate → if a new page object is needed +4. provar_pageobject_validate → validate before compile +5. provar_automation_compile → after any page object change +6. provar_testcase_generate → create the test case file +7. provar_testcase_step_edit → add steps (repeat as needed) +8. provar_testcase_validate → MUST pass before adding to a plan +9. provar_testplan_add_instance → add to an existing plan +10. provar_testplan_validate → validate the plan`, + + 'debug-failures': `## Debug Failing Tests + +1. provar_testrun_report_locate → find the report file +2. provar_testrun_rca → classify failures by category + +Then act on the category: + AUTH failure → user must re-authenticate in Provar IDE (cannot fix via MCP) + LOCATOR failure → user must re-capture element in Provar IDE + COMPILE failure → provar_automation_compile, then provar_pageobject_validate if compile fails + CALLABLE failure → provar_project_validate, fix PROJ-CALLABLE violations + DATA failure → advise user on org data state + UNKNOWN → escalate to user with full RCA output`, + + nitrox: `## NitroX (LWC / Screen Flows / Industry Components) + +1. provar_nitrox_discover → see what's already modeled in the project +2. provar_nitrox_generate → for the target component +3. provar_nitrox_validate → always validate immediately after generate +4. provar_nitrox_patch → to update an existing model +5. provar_nitrox_validate → always validate after patch + +After adding a NitroX model to a page object, run provar_automation_compile.`, + + general: `## All Canonical Task Flows + +### Prerequisite graph (hard constraints) +provardx_ping → (confirms server is up — always run first in a fresh session) + +provar_properties_* or provar_properties_generate + └── provar_automation_config_load + └── provar_automation_compile + └── provar_automation_testrun + └── provar_testrun_report_locate + └── provar_testrun_rca + +provar_qualityhub_connect + └── provar_qualityhub_testrun + └── provar_qualityhub_testrun_report + +provar_pageobject_validate + └── provar_automation_compile (validate before compile — errors are clearer) + +provar_nitrox_generate OR provar_nitrox_patch + └── provar_nitrox_validate (always validate after) + +provar_testcase_generate OR provar_testcase_step_edit + └── provar_testcase_validate + └── provar_testplan_add_instance + └── provar_testplan_validate + +### Safe to run in parallel (no dependency between them) +- provar_project_inspect + provar_connection_list +- provar_pageobject_validate on multiple files +- provar_testcase_validate on multiple files +- provar_nitrox_validate on multiple models + +### Stopping rules +Stop and return to the user when: +1. The same fix has been tried 3 times with identical output +2. The fix requires action in Provar IDE or a Salesforce org +3. A LicenseException appears +4. RCA returns UNKNOWN with no recommendation +5. The task requires a decision only the user can make (which plan, which connection, which org)`, + }; + + const flowContent = flows[task ?? 'general'] ?? flows['general']; + + return { + messages: [ + { + role: 'user' as const, + content: { + type: 'text' as const, + text: `You are a ProvarDX workflow coordinator. Follow the task sequence below exactly. + +${flowContent} + +## Rules for all tasks +- Always call provardx_ping first in a fresh session to confirm the server is up +- Always call provar_project_inspect before any authoring task +- provar_automation_config_load must succeed before compile or testrun — no exceptions +- Validate before execute: testcase_validate before adding to a plan, pageobject_validate before compile +- All paths must be within the --allowed-paths configured for this MCP server +- Stop and ask the user when you hit a stopping rule (see above)`, + }, + }, + ], + }; + } + ); +} diff --git a/src/mcp/prompts/index.ts b/src/mcp/prompts/index.ts index 494ad3e..e9e24a0 100644 --- a/src/mcp/prompts/index.ts +++ b/src/mcp/prompts/index.ts @@ -18,6 +18,7 @@ import { registerLoopCoveragePrompt, registerLoopDbPrompt, } from './loopPrompts.js'; +import { registerOnboardingPrompt, registerTroubleshootPrompt, registerOrchestrationPrompt } from './guidePrompts.js'; export function registerAllPrompts(server: McpServer): void { registerCrtMigrationPrompt(server); @@ -28,4 +29,7 @@ export function registerAllPrompts(server: McpServer): void { registerLoopReviewPrompt(server); registerLoopCoveragePrompt(server); registerLoopDbPrompt(server); + registerOnboardingPrompt(server); + registerTroubleshootPrompt(server); + registerOrchestrationPrompt(server); } diff --git a/src/mcp/server.ts b/src/mcp/server.ts index 6d63b3d..1e01ee2 100644 --- a/src/mcp/server.ts +++ b/src/mcp/server.ts @@ -140,5 +140,33 @@ export function createProvarMcpServer(config: ServerConfig): McpServer { } ); + server.resource( + 'provar-tool-guide', + 'provar://docs/tool-guide', + { + description: + 'Tool selection guide for ProvarDX MCP. Organised by what you want to accomplish (run tests, author tests, debug failures, manage config, etc.) rather than by tool name. Read this to choose the right tool and understand correct sequencing before calling tools.', + mimeType: 'text/markdown', + }, + () => { + try { + const text = readFileSync(join(docsDir, 'PROVAR_TOOL_GUIDE.md'), 'utf-8'); + return { + contents: [{ uri: 'provar://docs/tool-guide', mimeType: 'text/markdown', text }], + }; + } catch { + return { + contents: [ + { + uri: 'provar://docs/tool-guide', + mimeType: 'text/markdown', + text: '# ProvarDX Tool Guide\n\nGuide not found. Reinstall or upgrade the plugin and try again.', + }, + ], + }; + } + } + ); + return server; } From fcf9d6a4f552a292d6abfb18ad79494e513ea930 Mon Sep 17 00:00:00 2001 From: Michael Dailey Date: Fri, 8 May 2026 14:50:05 -0500 Subject: [PATCH 02/12] PDX-0: chore(ci): update QualityOrchestrator to floating v1 tag RCA: CI workflow pinned QualityOrchestrator at v1.0.0, requiring manual edits to pick up every subsequent patch or minor release, causing the action to drift behind the latest available version. Fix: Created floating v1 tag on mrdailey99/QualityOrchestrator (currently at v1.0.2) and updated CI_Execution.yml to reference @v1, so the workflow automatically uses the latest v1.x release without any further changes needed. --- .github/workflows/CI_Execution.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/CI_Execution.yml b/.github/workflows/CI_Execution.yml index 1e146a0..ea8e066 100644 --- a/.github/workflows/CI_Execution.yml +++ b/.github/workflows/CI_Execution.yml @@ -29,7 +29,7 @@ jobs: - uses: actions/checkout@v6 with: persist-credentials: false - - uses: mrdailey99/QualityOrchestrator@v1.0.0 + - uses: mrdailey99/QualityOrchestrator@v1 with: github-token: ${{ secrets.GITHUB_TOKEN }} test-dir: 'test' From 05e2e607c86c0d351c287741969fc7e5cd054419 Mon Sep 17 00:00:00 2001 From: Michael Dailey Date: Fri, 8 May 2026 15:11:43 -0500 Subject: [PATCH 03/12] PDX-0: fix(mcp): address Copilot review comments on PR #153 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit RCA: Copilot flagged 14 issues — wrong tool schemas in guide prompts and docs, missing build copy step, hardcoded cloudId in a public repo file, broken gitignored file reference. Fix: Add PROVAR_TOOL_GUIDE.md to package.json build copy; fix all wrong tool params in guide docs and prompts (properties_generate output_path, --plan-name, testrun_rca project_path, testcase_step_edit test_case_path, testplan add-instance hyphen, defect run_id); remove hardcoded cloudId; remove broken agents ref. Co-Authored-By: Claude Sonnet 4.6 --- .claude/commands/git-workflow.md | 8 +++----- docs/PROVAR_TOOL_GUIDE.md | 26 +++++++++++++++++--------- package.json | 2 +- src/mcp/prompts/guidePrompts.ts | 17 +++++++++-------- 4 files changed, 30 insertions(+), 23 deletions(-) diff --git a/.claude/commands/git-workflow.md b/.claude/commands/git-workflow.md index 2bdb765..dfef137 100644 --- a/.claude/commands/git-workflow.md +++ b/.claude/commands/git-workflow.md @@ -1,7 +1,5 @@ You are executing the provardx-cli development git workflow. Follow these steps in order. Stop and confirm with the user at each CONFIRM point before proceeding. -Full reference doc: `.claude/agents/dev-git-workflow.md` - --- ## Step 0 — Establish the Jira ticket (planning phase) @@ -21,7 +19,7 @@ User provides the ticket number. Set `TICKET = PDX-`. Fetch the ticket to confirm it exists and read its summary and status: -- Use `getJiraIssue` (cloudId: `3c8a4f06-8ecc-4723-876f-b096b816c6ec`, issueIdOrKey: `PDX-`) +- Call `getAccessibleAtlassianResources` to get the cloudId for your Jira instance, then use `getJiraIssue` (cloudId: ``, issueIdOrKey: `PDX-`) - Show the user: ticket summary, current status, and URL - If the ticket is already Closed, warn the user before proceeding @@ -75,7 +73,7 @@ Description: Once confirmed, create the ticket using the `createJiraIssue` MCP tool: -- `cloudId`: `3c8a4f06-8ecc-4723-876f-b096b816c6ec` +- `cloudId`: `` - `projectKey`: `PDX` - `issueTypeName`: as chosen above - `summary`: as drafted @@ -233,7 +231,7 @@ gh pr merge --squash --delete-branch For ticketed work (non-PDX-0): transition the Jira ticket to Closed. - Web: `https://provartesting.atlassian.net/browse/` -- MCP: `transitionJiraIssue` (cloudId: `3c8a4f06-8ecc-4723-876f-b096b816c6ec`) +- MCP: `transitionJiraIssue` (cloudId: ``) Clean up the worktree: diff --git a/docs/PROVAR_TOOL_GUIDE.md b/docs/PROVAR_TOOL_GUIDE.md index a6d46c8..651e1bc 100644 --- a/docs/PROVAR_TOOL_GUIDE.md +++ b/docs/PROVAR_TOOL_GUIDE.md @@ -39,14 +39,21 @@ Fixed sequence — do not skip steps: No properties file yet? Generate one first: ``` -provar_properties_generate { project_path, connection_name } +provar_properties_generate { output_path } ← required; path to write the .json file + { project_path } ← optional; pre-fills projectPath field +``` + +Then set the connection name: + +``` +provar_properties_set { file_path: "", key: "connectionName", value: "" } ``` ### Via Quality Hub (remote) ``` 1. provar_qualityhub_connect { target_org } -2. provar_qualityhub_testrun { target_org, flags: ["--plan", ""] } +2. provar_qualityhub_testrun { target_org, flags: ["--plan-name", ""] } 3. provar_qualityhub_testrun_report { target_org, run_id } ← poll until done ``` @@ -61,7 +68,7 @@ provar_properties_generate { project_path, connection_name } ``` 1. provar_testrun_report_locate { project_path } ← find where results landed -2. provar_testrun_rca { report_path, mode: "rca" } +2. provar_testrun_rca { project_path } ← required; results_path/run_index optional ``` `provar_testrun_rca` classifies each failure (auth, locator, assertion, data, etc.) and gives a recommendation per failure. Use `mode: "failures"` for the raw failure list without classification. @@ -73,10 +80,10 @@ provar_properties_generate { project_path, connection_name } ``` 1. provar_project_inspect { project_path } ← find coverage gaps first 2. provar_testcase_generate { project_path, name, ... } -3. provar_testcase_step_edit { file_path, ... } ← repeat per step +3. provar_testcase_step_edit { test_case_path, ... } ← repeat per step 4. provar_testcase_validate { file_path } ← must pass before adding to plan -5. provar_testplan_add_instance { plan_path, testcase_path } -6. provar_testplan_validate { plan_path } +5. provar_testplan_add-instance { project_path, plan_name, test_case_path } +6. provar_testplan_validate { project_path, plan_name } ``` --- @@ -146,11 +153,12 @@ Returns all connections in `.testproject`. Use the `name` field from each connec ## "I want to create a defect for a failed test" ``` -1. provar_testrun_rca { report_path, mode: "rca" } -2. provar_qualityhub_defect_create { target_org, ... } +1. provar_qualityhub_testrun { target_org, ... } ← captures run_id from response +2. provar_testrun_rca { project_path } ← classify failures +3. provar_qualityhub_defect_create { run_id, target_org } ← run_id from step 1 ``` -Requires Quality Hub to be connected. +Requires Quality Hub to be connected (`provar_qualityhub_connect` first). --- diff --git a/package.json b/package.json index 6048105..9de29e5 100644 --- a/package.json +++ b/package.json @@ -146,7 +146,7 @@ ] }, "compile": { - "command": "tsc -p . --pretty --incremental && shx mkdir -p lib/mcp/rules && shx cp src/mcp/rules/*.json lib/mcp/rules/ && shx mkdir -p lib/mcp/docs && shx cp docs/PROVAR_TEST_STEP_REFERENCE.md lib/mcp/docs/", + "command": "tsc -p . --pretty --incremental && shx mkdir -p lib/mcp/rules && shx cp src/mcp/rules/*.json lib/mcp/rules/ && shx mkdir -p lib/mcp/docs && shx cp docs/PROVAR_TEST_STEP_REFERENCE.md lib/mcp/docs/ && shx cp docs/PROVAR_TOOL_GUIDE.md lib/mcp/docs/", "files": [ "src/**/*.ts", "src/mcp/rules/*.json", diff --git a/src/mcp/prompts/guidePrompts.ts b/src/mcp/prompts/guidePrompts.ts index 06baf9a..855ac58 100644 --- a/src/mcp/prompts/guidePrompts.ts +++ b/src/mcp/prompts/guidePrompts.ts @@ -79,12 +79,12 @@ Then: provar_qualityhub_display to confirm the correct org is connected. If NOT_AUTHENTICATED: the user needs to run: sf org login web -o -### Step 5 — Retrieve available test plans +### Step 5 — Retrieve available test cases Call: provar_qualityhub_testcase_retrieve to show what's available to run. -Ask the user which plan they'd like to run first. +Ask the user which plan they'd like to run first (pass the plan name via --plan-name flag). ### Step 6 — Run first test -Call: provar_qualityhub_testrun with the chosen plan name. +Call: provar_qualityhub_testrun with flags: ["--plan-name", ""]. Poll with provar_qualityhub_testrun_report every 30–60 seconds until the run completes. Stop polling after 20 minutes and ask the user to check Quality Hub directly.` : `### Step 4 — Configure properties @@ -93,7 +93,8 @@ If a provardx-properties.json was found in step 2: Confirm provarHome and connectionName look correct. If no properties file was found: - Call: provar_properties_generate using the project path and the first connection name from step 3. + Call: provar_properties_generate with output_path (e.g. /provardx-properties.json) and optionally project_path. + Then call: provar_properties_set to set connectionName to the first connection name from step 3. ### Step 5 — Register the config Call: provar_automation_config_load with the properties file path. @@ -171,7 +172,7 @@ If you have tried the same fix 3 times and the error hasn't changed, STOP. Tell If there is a completed test run, use the RCA tool first: Call: provar_testrun_report_locate (with project path if available) - Call: provar_testrun_rca with mode: "rca" + Call: provar_testrun_rca with project_path (required) and optionally mode: "rca" The RCA tool classifies each failure and gives a recommendation per failure. Use it before reading raw stack traces. @@ -243,7 +244,7 @@ export function registerOrchestrationPrompt(server: McpServer): void { Required sequence — do not skip steps: 1. provar_project_inspect → confirm project root and connections exist -2. provar_properties_read OR provar_properties_generate +2. provar_properties_read OR provar_properties_generate (output_path required; set connectionName via provar_properties_set) 3. provar_automation_config_load ← MUST succeed before step 4 4. provar_automation_compile ← MUST succeed before step 5 5. provar_automation_testrun @@ -270,7 +271,7 @@ Required sequence — do not skip steps: 6. provar_testcase_generate → create the test case file 7. provar_testcase_step_edit → add steps (repeat as needed) 8. provar_testcase_validate → MUST pass before adding to a plan -9. provar_testplan_add_instance → add to an existing plan +9. provar_testplan_add-instance → add to an existing plan 10. provar_testplan_validate → validate the plan`, 'debug-failures': `## Debug Failing Tests @@ -320,7 +321,7 @@ provar_nitrox_generate OR provar_nitrox_patch provar_testcase_generate OR provar_testcase_step_edit └── provar_testcase_validate - └── provar_testplan_add_instance + └── provar_testplan_add-instance └── provar_testplan_validate ### Safe to run in parallel (no dependency between them) From bf68e54975d90805d1169ee293c70a9e1faa5ffb Mon Sep 17 00:00:00 2001 From: Michael Dailey <49916244+mrdailey99@users.noreply.github.com> Date: Fri, 8 May 2026 15:22:14 -0500 Subject: [PATCH 04/12] Bump package json version to 1.5.0-beta.18 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ba49b3e..bc86e98 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@provartesting/provardx-cli", "description": "A plugin for the Salesforce CLI to orchestrate testing activities and report quality metrics to Provar Quality Hub", - "version": "1.5.0-beta.17", + "version": "1.5.0-beta.18", "mcpName": "io.github.ProvarTesting/provar", "license": "BSD-3-Clause", "plugins": [ From d7532c3c5cc0625b49348fe697398b7580b48f60 Mon Sep 17 00:00:00 2001 From: Michael Dailey <49916244+mrdailey99@users.noreply.github.com> Date: Fri, 8 May 2026 15:23:02 -0500 Subject: [PATCH 05/12] Bump version to 1.5.0-beta.18 in server.json --- server.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server.json b/server.json index 4bf8741..76faa4b 100644 --- a/server.json +++ b/server.json @@ -14,12 +14,12 @@ "url": "https://github.com/ProvarTesting/provardx-cli", "source": "github" }, - "version": "1.5.0-beta.17", + "version": "1.5.0-beta.18", "packages": [ { "registryType": "npm", "identifier": "@provartesting/provardx-cli", - "version": "1.5.0-beta.17", + "version": "1.5.0-beta.18", "transport": { "type": "stdio" }, From 19000c086ffea803fb8d2ddba28d5cdd123961c4 Mon Sep 17 00:00:00 2001 From: Michael Dailey Date: Fri, 8 May 2026 15:46:31 -0500 Subject: [PATCH 06/12] PDX-463: feat(mcp): fetch NitroX component packages from factPackages repo at release time MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit RCA: NitroX component packages were statically bundled in the repo and not updated automatically; the source of truth is the ProvarTesting/factPackages GitHub repo (main branch), so packages would silently drift stale between releases. Fix: Added scripts/fetch-nitrox-packages.cjs to the prepack hook; it downloads all component package files from factPackages@main, regenerates NITROX_COMPONENT_CATALOG.md, and writes NITROX_CATALOG_SOURCE.json with the commit SHA. On failure (no token, network error) it logs a warning and falls back to the committed catalog — the release is never blocked. A new provar://nitrox/catalog-source MCP resource exposes the bundled version so consumers can verify which factPackages commit is in use. --- docs/NITROX_CATALOG_SOURCE.json | 6 + docs/mcp.md | 27 ++- package.json | 5 +- scripts/fetch-nitrox-packages.cjs | 296 ++++++++++++++++++++++++++++++ src/mcp/server.ts | 40 ++++ test/unit/mcp/server.test.ts | 53 +++++- 6 files changed, 423 insertions(+), 4 deletions(-) create mode 100644 docs/NITROX_CATALOG_SOURCE.json create mode 100644 scripts/fetch-nitrox-packages.cjs diff --git a/docs/NITROX_CATALOG_SOURCE.json b/docs/NITROX_CATALOG_SOURCE.json new file mode 100644 index 0000000..08ab05b --- /dev/null +++ b/docs/NITROX_CATALOG_SOURCE.json @@ -0,0 +1,6 @@ +{ + "repo": "https://github.com/ProvarTesting/factPackages", + "branch": "main", + "commitSha": null, + "fetchedAt": null +} diff --git a/docs/mcp.md b/docs/mcp.md index 6cc4f8d..86c0678 100644 --- a/docs/mcp.md +++ b/docs/mcp.md @@ -73,6 +73,7 @@ The Provar DX CLI ships with a built-in **Model Context Protocol (MCP) server** - [MCP Resources](#mcp-resources) - [provar://docs/step-reference](#provardocsstep-reference) - [provar://nitrox/component-catalog](#provarnitroxcomponent-catalog) + - [provar://nitrox/catalog-source](#provarnitroxcatalog-source) - [AI loop pattern](#ai-loop-pattern) - [Quality scores explained](#quality-scores-explained) - [API compatibility — `xml` vs `xml_content`](#api-compatibility--xml-vs-xml_content) @@ -1960,7 +1961,31 @@ Catalog of all shipped NitroX (Hybrid Model) base component packages. Lists ever **URI:** `provar://nitrox/component-catalog` **MIME type:** `text/markdown` -The resource content is the same as `docs/NITROX_COMPONENT_CATALOG.md` in this repository, compiled into the package at build time. To regenerate the catalog after Provar ships updated NitroX packages, run `node scripts/generate-nitrox-catalog.cjs` on a machine with Provar NitroX installed, then commit the result. +The resource content is the same as `docs/NITROX_COMPONENT_CATALOG.md` in this repository, compiled into the package at build time. + +The catalog is automatically refreshed from the `main` branch of [ProvarTesting/factPackages](https://github.com/ProvarTesting/factPackages) during each `provardx-cli` release build (via `scripts/fetch-nitrox-packages.cjs`). If the fetch fails at build time (e.g. no `GITHUB_TOKEN`, network unavailable), the previously committed catalog is used as a fallback and a warning is logged. + +To check which version is bundled in a running server, read the `provar://nitrox/catalog-source` resource. + +--- + +### `provar://nitrox/catalog-source` + +Version metadata for the bundled NitroX component catalog. Returns the `factPackages` commit SHA and fetch timestamp recorded during the release build that produced this package. + +**URI:** `provar://nitrox/catalog-source` +**MIME type:** `application/json` + +```json +{ + "repo": "https://github.com/ProvarTesting/factPackages", + "branch": "main", + "commitSha": "<40-char SHA or null if fetched from fallback>", + "fetchedAt": "" +} +``` + +`commitSha` and `fetchedAt` are `null` when the release build could not reach GitHub (fallback catalog in use). --- diff --git a/package.json b/package.json index bc86e98..17143ed 100644 --- a/package.json +++ b/package.json @@ -127,7 +127,7 @@ "format": "wireit", "lint": "wireit", "postpack": "shx rm -f oclif.manifest.json", - "prepack": "sf-prepack", + "prepack": "node scripts/fetch-nitrox-packages.cjs && sf-prepack", "test": "wireit", "test:nuts": "nyc mocha \"**/*generate.nut.ts\" \"**/*permission.nut.ts\" \"**/*load.nut.ts\" \"**/*validate.nut.ts\" \"**/*set.nut.ts\" \"**/*get.nut.ts\" \"**/*key.nut.ts\" \"**/*status.nut.ts\" \"**/*clear.nut.ts\" --slow 4500 --timeout 600000 --reporter mochawesome", "test:only": "wireit", @@ -146,11 +146,12 @@ ] }, "compile": { - "command": "tsc -p . --pretty --incremental && shx mkdir -p lib/mcp/rules && shx cp src/mcp/rules/*.json lib/mcp/rules/ && shx mkdir -p lib/mcp/docs && shx cp docs/PROVAR_TEST_STEP_REFERENCE.md lib/mcp/docs/ && shx cp docs/NITROX_COMPONENT_CATALOG.md lib/mcp/docs/ && shx cp docs/PROVAR_TOOL_GUIDE.md lib/mcp/docs/", + "command": "tsc -p . --pretty --incremental && shx mkdir -p lib/mcp/rules && shx cp src/mcp/rules/*.json lib/mcp/rules/ && shx mkdir -p lib/mcp/docs && shx cp docs/PROVAR_TEST_STEP_REFERENCE.md lib/mcp/docs/ && shx cp docs/NITROX_COMPONENT_CATALOG.md lib/mcp/docs/ && shx cp docs/NITROX_CATALOG_SOURCE.json lib/mcp/docs/ && shx cp docs/PROVAR_TOOL_GUIDE.md lib/mcp/docs/", "files": [ "src/**/*.ts", "src/mcp/rules/*.json", "docs/NITROX_COMPONENT_CATALOG.md", + "docs/NITROX_CATALOG_SOURCE.json", "**/tsconfig.json", "messages/**" ], diff --git a/scripts/fetch-nitrox-packages.cjs b/scripts/fetch-nitrox-packages.cjs new file mode 100644 index 0000000..3a2cf0d --- /dev/null +++ b/scripts/fetch-nitrox-packages.cjs @@ -0,0 +1,296 @@ +#!/usr/bin/env node +/** + * Release pipeline utility: fetch the latest NitroX component packages + * from the ProvarTesting/factPackages GitHub repo (main branch) and + * regenerate docs/NITROX_COMPONENT_CATALOG.md. + * + * On success, writes docs/NITROX_CATALOG_SOURCE.json with the commit SHA + * so downstream consumers can verify which version was bundled. + * + * Falls back silently to the committed catalog when: + * - GITHUB_TOKEN / GH_TOKEN is not set in the environment + * - The GitHub API is unreachable + * - Any download fails + * + * The script always exits 0 so a fetch failure never blocks the release. + * + * Environment: + * GITHUB_TOKEN or GH_TOKEN — required to access the private repo + */ + +'use strict'; + +const https = require('https'); +const fs = require('fs'); +const path = require('path'); +const os = require('os'); + +const REPO_OWNER = 'ProvarTesting'; +const REPO_NAME = 'factPackages'; +const BRANCH = 'main'; +const DOCS_DIR = path.join(__dirname, '..', 'docs'); +const OUTPUT_CATALOG = path.join(DOCS_DIR, 'NITROX_COMPONENT_CATALOG.md'); +const OUTPUT_SOURCE = path.join(DOCS_DIR, 'NITROX_CATALOG_SOURCE.json'); + +function warn(msg) { + console.warn(`[fetch-nitrox-packages] WARN: ${msg}`); +} + +function log(msg) { + console.log(`[fetch-nitrox-packages] ${msg}`); +} + +/** Wraps https.get with redirect support; resolves to the response body string. */ +function httpsGet(url, headers) { + return new Promise((resolve, reject) => { + const parsed = new URL(url); + const reqHeaders = { + 'User-Agent': 'provardx-cli/fetch-nitrox-packages', + Accept: 'application/json', + ...headers, + }; + const req = https.get( + { hostname: parsed.hostname, path: parsed.pathname + parsed.search, headers: reqHeaders }, + (res) => { + if (res.statusCode === 301 || res.statusCode === 302) { + resolve(httpsGet(res.headers.location, headers)); + return; + } + const chunks = []; + res.on('data', (chunk) => chunks.push(chunk)); + res.on('end', () => { + const body = Buffer.concat(chunks).toString('utf-8'); + if (res.statusCode >= 400) { + reject(new Error(`HTTP ${res.statusCode} from ${url}: ${body.slice(0, 200)}`)); + } else { + resolve(body); + } + }); + res.on('error', reject); + } + ); + req.on('error', reject); + }); +} + +/** Downloads raw file bytes (supports redirect); resolves to a Buffer. */ +function httpsGetBuffer(url, headers) { + return new Promise((resolve, reject) => { + const parsed = new URL(url); + const req = https.get({ hostname: parsed.hostname, path: parsed.pathname + parsed.search, headers }, (res) => { + if (res.statusCode === 301 || res.statusCode === 302) { + resolve(httpsGetBuffer(res.headers.location, headers)); + return; + } + const chunks = []; + res.on('data', (chunk) => chunks.push(chunk)); + res.on('end', () => { + if (res.statusCode >= 400) { + reject(new Error(`HTTP ${res.statusCode} from ${url}`)); + } else { + resolve(Buffer.concat(chunks)); + } + }); + res.on('error', reject); + }); + req.on('error', reject); + }); +} + +function apiHeaders(token) { + return { + Authorization: `Bearer ${token}`, + Accept: 'application/vnd.github+json', + 'X-GitHub-Api-Version': '2022-11-28', + 'User-Agent': 'provardx-cli/fetch-nitrox-packages', + }; +} + +async function getLatestCommitSha(token) { + const url = `https://api.github.com/repos/${REPO_OWNER}/${REPO_NAME}/commits/${BRANCH}`; + const body = await httpsGet(url, apiHeaders(token)); + const data = JSON.parse(body); + if (typeof data.sha !== 'string') throw new Error('No commit SHA in GitHub API response'); + return data.sha; +} + +async function getTree(sha, token) { + const url = `https://api.github.com/repos/${REPO_OWNER}/${REPO_NAME}/git/trees/${sha}?recursive=1`; + const body = await httpsGet(url, apiHeaders(token)); + const data = JSON.parse(body); + if (!Array.isArray(data.tree)) throw new Error('Unexpected tree response shape'); + return data.tree; +} + +/** Matches top-level package.json files: e.g. "common/package.json" */ +const PKG_JSON_RE = /^[^/]+\/package\.json$/; +/** Matches component definitions nested under a components/ dir */ +const COMPONENT_FILE_RE = /^[^/]+\/components\/[^/]+\.(cp|po)\.json$/; + +function isRelevant(treePath) { + return PKG_JSON_RE.test(treePath) || COMPONENT_FILE_RE.test(treePath); +} + +async function downloadRaw(filePath, token) { + const url = `https://raw.githubusercontent.com/${REPO_OWNER}/${REPO_NAME}/${BRANCH}/${filePath}`; + const headers = token ? { Authorization: `Bearer ${token}` } : {}; + return httpsGetBuffer(url, headers); +} + +// ── Catalog generation (mirrors generate-nitrox-catalog.cjs) ──────────────── + +function safeReadJson(filePath) { + try { + return JSON.parse(fs.readFileSync(filePath, 'utf-8')); + } catch { + return null; + } +} + +function renderComponent(comp) { + const lines = []; + const heading = comp.label ?? comp.name ?? '(unnamed)'; + lines.push(`#### ${heading}`, ''); + if (comp.name) lines.push(`- **name:** \`${comp.name}\``); + if (comp.type) lines.push(`- **type:** \`${comp.type}\``); + if (comp.tagName) lines.push(`- **tagName:** \`${comp.tagName}\``); + + const interactions = (comp.interactions ?? []).map((i) => i.title ?? i.name ?? '').filter(Boolean); + if (interactions.length > 0) { + lines.push(`- **interactions:** ${interactions.map((n) => `\`${n}\``).join(', ')}`); + } + + const attributes = (comp.attributes ?? []).map((a) => a.title ?? a.attributeName ?? '').filter(Boolean); + if (attributes.length > 0) { + lines.push(`- **attributes:** ${attributes.map((n) => `\`${n}\``).join(', ')}`); + } + + const elementCount = (comp.elements ?? []).length; + if (elementCount > 0) lines.push(`- **child elements:** ${elementCount}`); + + lines.push(''); + return lines.join('\n'); +} + +function buildCatalogFromDir(baseDir, commitSha) { + const pkgDirEntries = fs + .readdirSync(baseDir, { withFileTypes: true }) + .filter((d) => d.isDirectory()) + .sort((a, b) => a.name.localeCompare(b.name)); + + const lines = [ + '# NitroX Component Package Catalog', + '', + 'Shipped base NitroX (Hybrid Model) component packages.', + 'Use as a reference when generating new NitroX components — match naming conventions,', + 'type strings, tagNames, interaction titles, and attribute names from these shipped packages.', + '', + `_Source: [ProvarTesting/factPackages@${commitSha.slice( + 0, + 7 + )}](https://github.com/ProvarTesting/factPackages/tree/${commitSha})_`, + '', + '---', + '', + ]; + + for (const entry of pkgDirEntries) { + const pkgDir = path.join(baseDir, entry.name); + const meta = safeReadJson(path.join(pkgDir, 'package.json')) ?? {}; + + const displayName = meta.name ?? entry.name; + const displayVersion = meta.version ? ` (v${meta.version})` : ''; + lines.push(`## ${displayName}${displayVersion}`); + + if (meta.description) lines.push('', meta.description); + if (meta.provarVersion) lines.push(`**Requires Provar:** ${meta.provarVersion}`); + lines.push(''); + + const componentsDir = path.join(pkgDir, 'components'); + if (!fs.existsSync(componentsDir)) { + lines.push('_No component definitions found._', '', '---', ''); + continue; + } + + const componentFiles = fs + .readdirSync(componentsDir) + .filter((f) => f.endsWith('.cp.json') || f.endsWith('.po.json')) + .sort() + .map((f) => path.join(componentsDir, f)); + + if (componentFiles.length === 0) { + lines.push('_No component definitions found._', '', '---', ''); + continue; + } + + lines.push('### Components', ''); + for (const compFile of componentFiles) { + const parsed = safeReadJson(compFile); + if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) { + lines.push(renderComponent(parsed)); + } + } + + lines.push('---', ''); + } + + return lines.join('\n'); +} + +// ── Main ───────────────────────────────────────────────────────────────────── + +async function main() { + const token = process.env['GITHUB_TOKEN'] || process.env['GH_TOKEN']; + + if (!token) { + warn('No GITHUB_TOKEN or GH_TOKEN set — skipping factPackages fetch, using bundled catalog'); + return; + } + + const tmpDir = path.join(os.tmpdir(), `nitrox-fact-packages-${Date.now()}`); + + try { + log(`Fetching latest commit on ${REPO_OWNER}/${REPO_NAME}@${BRANCH}...`); + const commitSha = await getLatestCommitSha(token); + log(`Commit: ${commitSha}`); + + log('Fetching file tree...'); + const tree = await getTree(commitSha, token); + const relevant = tree.filter((f) => f.type === 'blob' && isRelevant(f.path)); + log(`Downloading ${relevant.length} component files...`); + + for (const file of relevant) { + const destPath = path.join(tmpDir, file.path); + fs.mkdirSync(path.dirname(destPath), { recursive: true }); + const content = await downloadRaw(file.path, token); + fs.writeFileSync(destPath, content); + } + + log('Generating catalog...'); + const catalog = buildCatalogFromDir(tmpDir, commitSha); + fs.writeFileSync(OUTPUT_CATALOG, catalog, 'utf-8'); + log(`Written: docs/NITROX_COMPONENT_CATALOG.md (${catalog.split('\n').length} lines)`); + + const sourceInfo = { + repo: `https://github.com/${REPO_OWNER}/${REPO_NAME}`, + branch: BRANCH, + commitSha, + fetchedAt: new Date().toISOString(), + }; + fs.writeFileSync(OUTPUT_SOURCE, JSON.stringify(sourceInfo, null, 2) + '\n', 'utf-8'); + log(`Written: docs/NITROX_CATALOG_SOURCE.json (commitSha: ${commitSha.slice(0, 7)})`); + } catch (err) { + warn(`Fetch failed — ${String(err instanceof Error ? err.message : err)}`); + warn('Falling back to bundled catalog; release will use existing NITROX_COMPONENT_CATALOG.md'); + } finally { + try { + if (fs.existsSync(tmpDir)) fs.rmSync(tmpDir, { recursive: true, force: true }); + } catch { + // ignore cleanup errors + } + } +} + +main().catch((err) => { + warn(`Unexpected error — ${String(err instanceof Error ? err.message : err)}`); +}); diff --git a/src/mcp/server.ts b/src/mcp/server.ts index 769f13c..cef8f87 100644 --- a/src/mcp/server.ts +++ b/src/mcp/server.ts @@ -135,6 +135,22 @@ export function createProvarMcpServer(config: ServerConfig): McpServer { } ); + server.resource( + 'provar-nitrox-catalog-source', + 'provar://nitrox/catalog-source', + { + description: + 'Version metadata for the bundled NitroX component catalog. Returns the factPackages commit SHA and fetch timestamp from the last successful release build. Use this to verify which version of the ProvarTesting/factPackages repo is bundled in the running MCP server.', + mimeType: 'application/json', + }, + () => { + const text = readCatalogSource(docsDir); + return { + contents: [{ uri: 'provar://nitrox/catalog-source', mimeType: 'application/json', text }], + }; + } + ); + server.resource( 'provar-step-reference', 'provar://docs/step-reference', @@ -209,3 +225,27 @@ export function resolveDocsDir(currentDir: string): string { const sibling = join(currentDir, 'docs'); return existsSync(sibling) ? sibling : join(currentDir, '..', '..', 'docs'); } + +/** + * Read NITROX_CATALOG_SOURCE.json from the docs directory and return it as + * a formatted JSON string. Returns a fallback object string if the file is + * absent or unreadable. + */ +export function readCatalogSource(docsDir: string): string { + try { + const raw = readFileSync(join(docsDir, 'NITROX_CATALOG_SOURCE.json'), 'utf-8'); + // Round-trip through JSON to normalise formatting + return JSON.stringify(JSON.parse(raw) as unknown, null, 2); + } catch { + return JSON.stringify( + { + repo: 'https://github.com/ProvarTesting/factPackages', + branch: 'main', + commitSha: null, + fetchedAt: null, + }, + null, + 2 + ); + } +} diff --git a/test/unit/mcp/server.test.ts b/test/unit/mcp/server.test.ts index 4692c15..b4797f8 100644 --- a/test/unit/mcp/server.test.ts +++ b/test/unit/mcp/server.test.ts @@ -10,7 +10,7 @@ import path from 'node:path'; import fs from 'node:fs'; import os from 'node:os'; import { describe, it, afterEach } from 'mocha'; -import { resolveDocsDir } from '../../../src/mcp/server.js'; +import { resolveDocsDir, readCatalogSource } from '../../../src/mcp/server.js'; describe('resolveDocsDir', () => { const tmpDirs: string[] = []; @@ -45,3 +45,54 @@ describe('resolveDocsDir', () => { assert.equal(resolveDocsDir(base), expected); }); }); + +describe('readCatalogSource', () => { + const tmpDirs: string[] = []; + + afterEach(() => { + for (const d of tmpDirs) { + try { + fs.rmSync(d, { recursive: true, force: true }); + } catch { + // ignore + } + } + tmpDirs.length = 0; + }); + + function makeTmpDir(): string { + const d = fs.mkdtempSync(path.join(os.tmpdir(), 'provar-server-test-')); + tmpDirs.push(d); + return d; + } + + it('returns parsed JSON when NITROX_CATALOG_SOURCE.json is present', () => { + const docsDir = makeTmpDir(); + const source = { + repo: 'https://github.com/ProvarTesting/factPackages', + branch: 'main', + commitSha: 'abc1234567890', + fetchedAt: '2026-05-08T10:00:00.000Z', + }; + fs.writeFileSync(path.join(docsDir, 'NITROX_CATALOG_SOURCE.json'), JSON.stringify(source)); + const result = JSON.parse(readCatalogSource(docsDir)) as typeof source; + assert.equal(result.commitSha, 'abc1234567890'); + assert.equal(result.branch, 'main'); + assert.equal(result.fetchedAt, '2026-05-08T10:00:00.000Z'); + }); + + it('returns fallback object when the file is absent', () => { + const docsDir = makeTmpDir(); + const result = JSON.parse(readCatalogSource(docsDir)) as Record; + assert.equal(result['commitSha'], null); + assert.equal(result['fetchedAt'], null); + assert.equal(result['repo'], 'https://github.com/ProvarTesting/factPackages'); + }); + + it('returns fallback object when the file contains invalid JSON', () => { + const docsDir = makeTmpDir(); + fs.writeFileSync(path.join(docsDir, 'NITROX_CATALOG_SOURCE.json'), '{bad json'); + const result = JSON.parse(readCatalogSource(docsDir)) as Record; + assert.equal(result['commitSha'], null); + }); +}); From 444d7e0c2c0dcfdab8369ec5c4cb520176395095 Mon Sep 17 00:00:00 2001 From: Michael Dailey Date: Fri, 8 May 2026 22:03:04 -0500 Subject: [PATCH 07/12] PDX-463: fix(mcp): correct factPackages path layout and fix lint warnings RCA: The factPackages repo stores component files under fact-*/src/components/ not fact-*/components/, so the path-matching regexes and catalog builder needed updating; additionally nine pre-existing unicorn/numeric-separators-style lint warnings in updateChecker.ts and its test file were left unaddressed. Fix: Updated PKG_JSON_RE and COMPONENT_FILE_RE in fetch-nitrox-packages.cjs to match the fact-*/src/ layout and adjusted buildCatalogFromDir to navigate the src/ subdirectory; ran eslint --fix on updateChecker.ts and updateChecker.test.ts to resolve all numeric-separator warnings, leaving the project at 0 lint errors and 0 warnings. --- scripts/fetch-nitrox-packages.cjs | 17 ++++++++++------- src/mcp/update/updateChecker.ts | 8 ++++---- test/unit/mcp/updateChecker.test.ts | 8 ++++---- 3 files changed, 18 insertions(+), 15 deletions(-) diff --git a/scripts/fetch-nitrox-packages.cjs b/scripts/fetch-nitrox-packages.cjs index 3a2cf0d..402e0e7 100644 --- a/scripts/fetch-nitrox-packages.cjs +++ b/scripts/fetch-nitrox-packages.cjs @@ -122,10 +122,10 @@ async function getTree(sha, token) { return data.tree; } -/** Matches top-level package.json files: e.g. "common/package.json" */ -const PKG_JSON_RE = /^[^/]+\/package\.json$/; -/** Matches component definitions nested under a components/ dir */ -const COMPONENT_FILE_RE = /^[^/]+\/components\/[^/]+\.(cp|po)\.json$/; +// Matches fact-* package manifests: e.g. "fact-common/src/package.json" +const PKG_JSON_RE = /^[^/]+\/src\/package\.json$/; +// Matches component definitions under fact-{pkg}/src/components/ +const COMPONENT_FILE_RE = /^[^/]+\/src\/components\/[^/]+\.(cp|po)\.json$/; function isRelevant(treePath) { return PKG_JSON_RE.test(treePath) || COMPONENT_FILE_RE.test(treePath); @@ -195,8 +195,11 @@ function buildCatalogFromDir(baseDir, commitSha) { ]; for (const entry of pkgDirEntries) { - const pkgDir = path.join(baseDir, entry.name); - const meta = safeReadJson(path.join(pkgDir, 'package.json')) ?? {}; + // factPackages stores package content under a src/ subdirectory + const srcDir = path.join(baseDir, entry.name, 'src'); + if (!fs.existsSync(srcDir)) continue; + + const meta = safeReadJson(path.join(srcDir, 'package.json')) ?? {}; const displayName = meta.name ?? entry.name; const displayVersion = meta.version ? ` (v${meta.version})` : ''; @@ -206,7 +209,7 @@ function buildCatalogFromDir(baseDir, commitSha) { if (meta.provarVersion) lines.push(`**Requires Provar:** ${meta.provarVersion}`); lines.push(''); - const componentsDir = path.join(pkgDir, 'components'); + const componentsDir = path.join(srcDir, 'components'); if (!fs.existsSync(componentsDir)) { lines.push('_No component definitions found._', '', '---', ''); continue; diff --git a/src/mcp/update/updateChecker.ts b/src/mcp/update/updateChecker.ts index a77a5af..354b746 100644 --- a/src/mcp/update/updateChecker.ts +++ b/src/mcp/update/updateChecker.ts @@ -32,14 +32,14 @@ interface UpdateCacheEntry { channel: string; } -const UPDATE_TTL_MS = 4 * 60 * 60 * 1_000; -const UPDATE_GRACE_MS = 48 * 60 * 60 * 1_000; +const UPDATE_TTL_MS = 4 * 60 * 60 * 1000; +const UPDATE_GRACE_MS = 48 * 60 * 60 * 1000; const SPAWN_OPTS = { stdio: ['ignore', 'pipe', 'pipe'] as const, timeout: 30_000, shell: process.platform === 'win32', - maxBuffer: 10 * 1_024 * 1_024, + maxBuffer: 10 * 1024 * 1024, } satisfies SpawnSyncOptions; const SEMVER_RE = /^\d+\.\d+\.\d+(-[a-zA-Z0-9.]+)?$/; @@ -140,7 +140,7 @@ function resultFromCache(cached: UpdateCacheEntry, currentVersion: string): Chec async function fetchLatestVersion(channel: string): Promise { const controller = new AbortController(); - const timer = setTimeout(() => controller.abort(), 5_000); + const timer = setTimeout(() => controller.abort(), 5000); try { const resp = await fetch('https://registry.npmjs.org/@provartesting/provardx-cli', { signal: controller.signal, diff --git a/test/unit/mcp/updateChecker.test.ts b/test/unit/mcp/updateChecker.test.ts index 1093c08..9668d20 100644 --- a/test/unit/mcp/updateChecker.test.ts +++ b/test/unit/mcp/updateChecker.test.ts @@ -171,7 +171,7 @@ describe('checkForUpdate', () => { const { currentVersion } = await checkForUpdate({ noUpdateCheck: true, autoUpdate: false }); const channel = deriveChannel(currentVersion); writeFreshCache({ - checkedAt: Date.now() - 30 * 60 * 1_000, // 30 min ago + checkedAt: Date.now() - 30 * 60 * 1000, // 30 min ago currentVersion, latestVersion: currentVersion, channel, @@ -188,7 +188,7 @@ describe('checkForUpdate', () => { it('fetches registry when cache is stale (>4h)', async () => { writeFreshCache({ - checkedAt: Date.now() - 5 * 60 * 60 * 1_000, // 5 hours ago + checkedAt: Date.now() - 5 * 60 * 60 * 1000, // 5 hours ago currentVersion: '1.5.0-beta.10', latestVersion: '1.5.0-beta.10', channel: 'beta', @@ -252,7 +252,7 @@ describe('checkForUpdate', () => { it('returns updateAvailable=false when cache is >48h stale and fetch fails', async () => { writeFreshCache({ - checkedAt: Date.now() - 50 * 60 * 60 * 1_000, // 50 hours ago + checkedAt: Date.now() - 50 * 60 * 60 * 1000, // 50 hours ago currentVersion: '1.5.0-beta.10', latestVersion: '1.5.0-beta.10', channel: 'beta', @@ -280,7 +280,7 @@ describe('checkForUpdate', () => { it('returns stale cache within 48h grace period when fetch fails', async () => { writeFreshCache({ - checkedAt: Date.now() - 6 * 60 * 60 * 1_000, // 6 hours ago (stale but within 48h) + checkedAt: Date.now() - 6 * 60 * 60 * 1000, // 6 hours ago (stale but within 48h) currentVersion: '1.5.0-beta.10', latestVersion: '1.5.0-beta.10', channel: 'beta', From 019bb87031e0501486537cb6af9b5c3d8b56ff0a Mon Sep 17 00:00:00 2001 From: Michael Dailey Date: Fri, 8 May 2026 22:06:42 -0500 Subject: [PATCH 08/12] PDX-463: fix(mcp): address PR review comments on fetch-nitrox-packages RCA: downloadRaw() used the branch name (main) in the raw URL rather than the resolved commit SHA, so files could be fetched from a different commit than the one the tree listing described; additionally both httpsGet and httpsGetBuffer had no timeout, meaning a stalled network connection would block prepack indefinitely. Fix: Added REQUEST_TIMEOUT_MS (15s) to both http helpers via req.setTimeout/req.destroy so hangs fail fast and fall through to the graceful fallback; updated downloadRaw to accept and use the commitSha parameter so all downloads are pinned to the same commit as the tree. --- scripts/fetch-nitrox-packages.cjs | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/scripts/fetch-nitrox-packages.cjs b/scripts/fetch-nitrox-packages.cjs index 402e0e7..c0a7d3d 100644 --- a/scripts/fetch-nitrox-packages.cjs +++ b/scripts/fetch-nitrox-packages.cjs @@ -40,7 +40,9 @@ function log(msg) { console.log(`[fetch-nitrox-packages] ${msg}`); } -/** Wraps https.get with redirect support; resolves to the response body string. */ +const REQUEST_TIMEOUT_MS = 15_000; + +/** Wraps https.get with redirect support and a per-request timeout; resolves to the response body string. */ function httpsGet(url, headers) { return new Promise((resolve, reject) => { const parsed = new URL(url); @@ -69,11 +71,14 @@ function httpsGet(url, headers) { res.on('error', reject); } ); + req.setTimeout(REQUEST_TIMEOUT_MS, () => { + req.destroy(new Error(`Request timed out after ${REQUEST_TIMEOUT_MS}ms: ${url}`)); + }); req.on('error', reject); }); } -/** Downloads raw file bytes (supports redirect); resolves to a Buffer. */ +/** Downloads raw file bytes (supports redirect and per-request timeout); resolves to a Buffer. */ function httpsGetBuffer(url, headers) { return new Promise((resolve, reject) => { const parsed = new URL(url); @@ -93,6 +98,9 @@ function httpsGetBuffer(url, headers) { }); res.on('error', reject); }); + req.setTimeout(REQUEST_TIMEOUT_MS, () => { + req.destroy(new Error(`Request timed out after ${REQUEST_TIMEOUT_MS}ms: ${url}`)); + }); req.on('error', reject); }); } @@ -131,8 +139,9 @@ function isRelevant(treePath) { return PKG_JSON_RE.test(treePath) || COMPONENT_FILE_RE.test(treePath); } -async function downloadRaw(filePath, token) { - const url = `https://raw.githubusercontent.com/${REPO_OWNER}/${REPO_NAME}/${BRANCH}/${filePath}`; +async function downloadRaw(filePath, commitSha, token) { + // Pin to the resolved commit SHA so all downloads are consistent with the tree listing + const url = `https://raw.githubusercontent.com/${REPO_OWNER}/${REPO_NAME}/${commitSha}/${filePath}`; const headers = token ? { Authorization: `Bearer ${token}` } : {}; return httpsGetBuffer(url, headers); } @@ -265,7 +274,7 @@ async function main() { for (const file of relevant) { const destPath = path.join(tmpDir, file.path); fs.mkdirSync(path.dirname(destPath), { recursive: true }); - const content = await downloadRaw(file.path, token); + const content = await downloadRaw(file.path, commitSha, token); fs.writeFileSync(destPath, content); } From cccf37f8e61d42513e9400b010c13a61844743c1 Mon Sep 17 00:00:00 2001 From: Michael Dailey <49916244+mrdailey99@users.noreply.github.com> Date: Mon, 11 May 2026 12:56:03 -0500 Subject: [PATCH 09/12] PDX-465: feat(mcp): add bin entry to enable zero-install npx MCP server startup (#158) RCA: No bin entry in package.json forced users through a two-step sf CLI plugin install before connecting Claude Desktop, creating unnecessary onboarding friction. Fix: Added provardx bin entry pointing to bin/mcp-start.js; lightweight ESM entrypoint parses mcp start flags, validates --allowed-paths as required, then delegates to the same server bootstrap used by the sf plugin path. --- README.md | 54 ++++++++++++++++++++----- bin/mcp-start.js | 74 ++++++++++++++++++++++++++++++++++ package.json | 4 ++ test/unit/bin/mcpStart.test.ts | 51 +++++++++++++++++++++++ 4 files changed, 173 insertions(+), 10 deletions(-) create mode 100644 bin/mcp-start.js create mode 100644 test/unit/bin/mcpStart.test.ts diff --git a/README.md b/README.md index 7ea495e..a3437f9 100644 --- a/README.md +++ b/README.md @@ -39,26 +39,54 @@ Validation runs in two modes: **local only** (structural rules, no key required) ## Quick setup -**Requires:** Provar Automation IDE installed with an activated license. +**Requires:** Provar Automation IDE installed with an activated license. Node.js 18–24 must be on your PATH. -```sh -# 1. Install the plugin — @beta is required for MCP support -sf plugins install @provartesting/provardx-cli@beta +### Option A — Zero-install (recommended for Claude Desktop) -# 2. (Optional) Authenticate for full 170+ rule validation -sf provar auth login +No prior setup needed. Paste this into your Claude Desktop config file and restart the app: + +- macOS: `~/Library/Application Support/Claude/claude_desktop_config.json` +- Windows: `%APPDATA%\Claude\claude_desktop_config.json` + +```json +{ + "mcpServers": { + "provar": { + "command": "npx", + "args": [ + "-y", + "@provartesting/provardx-cli@beta", + "mcp", + "start", + "--allowed-paths", + "/path/to/your/provar/project" + ] + } + } +} ``` +`npx -y` downloads the package automatically on first use — no `sf` or separate install step required. + **Claude Code** — run once to register the server: ```sh -claude mcp add provar -s user -- sf provar mcp start --allowed-paths /path/to/your/provar/project +claude mcp add provar -s user -- npx -y @provartesting/provardx-cli@beta mcp start --allowed-paths /path/to/your/provar/project ``` -**Claude Desktop** — add to your config file and restart the app: +### Option B — Global sf plugin install -- macOS: `~/Library/Application Support/Claude/claude_desktop_config.json` -- Windows: `%APPDATA%\Claude\claude_desktop_config.json` +Prefer a persistent global install? Install once, then use the `sf` command: + +```sh +# 1. Install the plugin — @beta is required for MCP support +sf plugins install @provartesting/provardx-cli@beta + +# 2. (Optional) Authenticate for full 170+ rule validation +sf provar auth login +``` + +**Claude Desktop** config using the global install: ```json { @@ -73,6 +101,12 @@ claude mcp add provar -s user -- sf provar mcp start --allowed-paths /path/to/yo > **Windows (Claude Desktop):** Use `sf.cmd` instead of `sf` if the server fails to start. +**Claude Code** using the global install: + +```sh +claude mcp add provar -s user -- sf provar mcp start --allowed-paths /path/to/your/provar/project +``` + 📖 **[docs/mcp.md](https://github.com/ProvarTesting/provardx-cli/blob/main/docs/mcp.md) — full setup, all 35+ tools, 7 MCP prompts, troubleshooting.** --- diff --git a/bin/mcp-start.js b/bin/mcp-start.js new file mode 100644 index 0000000..439b31f --- /dev/null +++ b/bin/mcp-start.js @@ -0,0 +1,74 @@ +#!/usr/bin/env node +// Lightweight zero-install entrypoint for the Provar MCP server. +// Usage: npx -y @provartesting/provardx-cli@beta mcp start --allowed-paths /path/to/project + +const args = process.argv.slice(2); + +if (args[0] !== 'mcp' || args[1] !== 'start') { + process.stderr.write( + 'Usage: provardx mcp start --allowed-paths [--auto-defects] [--auto-update] [--no-update-check]\n' + ); + process.exit(1); +} + +const remaining = args.slice(2); +/** @type {string[]} */ +const allowedPaths = []; +let autoDefects = false; +let autoUpdate = false; +let noUpdateCheck = false; + +for (let i = 0; i < remaining.length; i++) { + const arg = remaining[i]; + if (arg === '--allowed-paths' || arg === '-a') { + if (i + 1 >= remaining.length) { + process.stderr.write('[provar-mcp] Error: --allowed-paths requires a path value.\n'); + process.exit(1); + } + allowedPaths.push(remaining[++i]); + } else if (arg.startsWith('--allowed-paths=')) { + allowedPaths.push(arg.slice('--allowed-paths='.length)); + } else if (arg === '--auto-defects') { + autoDefects = true; + } else if (arg === '--auto-update') { + autoUpdate = true; + } else if (arg === '--no-update-check') { + noUpdateCheck = true; + } +} + +if (allowedPaths.length === 0) { + process.stderr.write( + '[provar-mcp] Error: --allowed-paths is required.\n' + + 'Example: npx -y @provartesting/provardx-cli@beta mcp start --allowed-paths /path/to/project\n' + ); + process.exit(1); +} + +if (autoDefects) { + process.env['PROVAR_AUTO_DEFECTS'] = '1'; +} + +// Dynamic imports placed after arg validation so early-exit paths need no compiled lib. +const { validateLicense, LicenseError } = await import('../lib/mcp/licensing/index.js'); +const { checkForUpdate } = await import('../lib/mcp/update/updateChecker.js'); +const { createProvarMcpServer } = await import('../lib/mcp/server.js'); +const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js'); + +try { + const result = await validateLicense(); + if (result.offlineGrace) { + process.stderr.write('[provar-mcp] Warning: license validated from offline cache (last checked > 2h ago).\n'); + } +} catch (err) { + if (err instanceof LicenseError) { + process.stderr.write(`[provar-mcp] Error: ${/** @type {Error} */ (err).message}\n`); + process.exit(1); + } + throw err; +} + +const updateResult = await checkForUpdate({ noUpdateCheck, autoUpdate }); +const server = createProvarMcpServer({ allowedPaths, updateResult }); +const transport = new StdioServerTransport(); +await server.connect(transport); diff --git a/package.json b/package.json index 17143ed..f1f95d1 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,11 @@ "engines": { "node": ">=18.0.0 <25.0.0" }, + "bin": { + "provardx": "./bin/mcp-start.js" + }, "files": [ + "/bin/mcp-start.js", "/lib", "/messages", "/oclif.manifest.json" diff --git a/test/unit/bin/mcpStart.test.ts b/test/unit/bin/mcpStart.test.ts new file mode 100644 index 0000000..8ca0af5 --- /dev/null +++ b/test/unit/bin/mcpStart.test.ts @@ -0,0 +1,51 @@ +import { strict as assert } from 'node:assert'; +import { spawnSync } from 'node:child_process'; +import { fileURLToPath } from 'node:url'; +import { dirname, join } from 'node:path'; +import { describe, it } from 'mocha'; + +const currentDir = dirname(fileURLToPath(import.meta.url)); +const BIN_SCRIPT = join(currentDir, '../../../bin/mcp-start.js'); + +function runBin(args: string[]): { status: number | null; stderr: string } { + const result = spawnSync('node', [BIN_SCRIPT, ...args], { encoding: 'utf8' }); + return { status: result.status, stderr: result.stderr }; +} + +describe('bin/mcp-start.js — argument validation', () => { + it('exits 1 with usage when no arguments given', () => { + const { status, stderr } = runBin([]); + assert.equal(status, 1); + assert.ok(stderr.includes('Usage:'), `expected usage hint, got: ${stderr}`); + }); + + it('exits 1 with usage when "mcp" subcommand is missing', () => { + const { status, stderr } = runBin(['start']); + assert.equal(status, 1); + assert.ok(stderr.includes('Usage:'), `expected usage hint, got: ${stderr}`); + }); + + it('exits 1 with usage when only "mcp" is given without "start"', () => { + const { status, stderr } = runBin(['mcp']); + assert.equal(status, 1); + assert.ok(stderr.includes('Usage:'), `expected usage hint, got: ${stderr}`); + }); + + it('exits 1 with required-arg error when --allowed-paths is omitted', () => { + const { status, stderr } = runBin(['mcp', 'start']); + assert.equal(status, 1); + assert.ok(stderr.includes('--allowed-paths is required'), `expected required-arg error, got: ${stderr}`); + }); + + it('exits 1 with value-required error when --allowed-paths has no value', () => { + const { status, stderr } = runBin(['mcp', 'start', '--allowed-paths']); + assert.equal(status, 1); + assert.ok(stderr.includes('requires a path value'), `expected value-required error, got: ${stderr}`); + }); + + it('exits 1 with value-required error when -a has no value', () => { + const { status, stderr } = runBin(['mcp', 'start', '-a']); + assert.equal(status, 1); + assert.ok(stderr.includes('requires a path value'), `expected value-required error, got: ${stderr}`); + }); +}); From 877368267fdff7a0a31faab449d641e631639ce7 Mon Sep 17 00:00:00 2001 From: Michael Dailey <49916244+mrdailey99@users.noreply.github.com> Date: Mon, 11 May 2026 13:15:19 -0500 Subject: [PATCH 10/12] PDX-464: fetch NitroX schemas from internal source at build time (#157) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * PDX-464: feat(mcp): fetch NitroX schemas from internal source at build time RCA: FactComponent.schema and FactPackage.schema were bundled statically and never refreshed from the canonical internal source, risking stale schema validation in released packages. Fix: Extended fetch-nitrox-packages.cjs to download both schemas from the same commit SHA as the component catalog, write to src/mcp/rules/ and root-level copies, and record schemasUpdated in NITROX_CATALOG_SOURCE.json. Falls back to bundled schemas with a warning on any failure. * PDX-464: fix(mcp): address Copilot review — schema consumers, repo field, schemasUpdated normalisation RCA: Four review issues: docs incorrectly named runtime tools as schema consumers; repo field exposed internal URL in MCP resource; readCatalogSource did not normalise missing schemasUpdated from older build artifacts; fallback object also contained the internal URL. Fix: Corrected docs to describe IDE/SchemaStore as schema consumers; removed repo field from emitted JSON and fallback; normalised schemasUpdated to null in readCatalogSource try-path when field is absent; updated tests to cover the new normalisation and assert no repo field in fallback. --- docs/mcp.md | 12 ++--- scripts/fetch-nitrox-packages.cjs | 74 ++++++++++++++++++++++++++++--- src/mcp/server.ts | 11 +++-- test/unit/mcp/server.test.ts | 41 ++++++++++++++--- 4 files changed, 119 insertions(+), 19 deletions(-) diff --git a/docs/mcp.md b/docs/mcp.md index 86c0678..82627ac 100644 --- a/docs/mcp.md +++ b/docs/mcp.md @@ -1583,6 +1583,8 @@ The five `provar_nitrox_*` tools let an AI agent discover existing NitroX page o > **Note:** NitroX page objects are read and written directly from disk using the standard file-system path policy (`--allowed-paths`). No `sf` subprocess is involved. +> **Schema sourcing:** The `FactComponent.schema` and `FactPackage.schema` JSON schemas bundled in this package are used by editors and IDE tooling (e.g., VS Code JSON language server, SchemaStore) to provide IntelliSense when authoring `.po.json` files. They are fetched from an internal Provar source during each `provardx-cli` release build alongside the component catalog, so the bundled copies always reflect the latest NitroX specification. Both schemas are pinned to the same internal revision to avoid version skew. If the fetch fails at build time, the previously committed schemas are used as a fallback. Check `provar://nitrox/catalog-source` to see whether the schemas in a running server were successfully refreshed (`schemasUpdated: true`). + --- ### `provar_nitrox_discover` @@ -1963,7 +1965,7 @@ Catalog of all shipped NitroX (Hybrid Model) base component packages. Lists ever The resource content is the same as `docs/NITROX_COMPONENT_CATALOG.md` in this repository, compiled into the package at build time. -The catalog is automatically refreshed from the `main` branch of [ProvarTesting/factPackages](https://github.com/ProvarTesting/factPackages) during each `provardx-cli` release build (via `scripts/fetch-nitrox-packages.cjs`). If the fetch fails at build time (e.g. no `GITHUB_TOKEN`, network unavailable), the previously committed catalog is used as a fallback and a warning is logged. +The catalog is automatically refreshed from an internal Provar source during each `provardx-cli` release build. If the fetch fails at build time (e.g. network unavailable), the previously committed catalog is used as a fallback and a warning is logged. To check which version is bundled in a running server, read the `provar://nitrox/catalog-source` resource. @@ -1971,21 +1973,21 @@ To check which version is bundled in a running server, read the `provar://nitrox ### `provar://nitrox/catalog-source` -Version metadata for the bundled NitroX component catalog. Returns the `factPackages` commit SHA and fetch timestamp recorded during the release build that produced this package. +Version metadata for the bundled NitroX component catalog and JSON schemas. Returns the internal source commit SHA, fetch timestamp, and schema update status recorded during the release build that produced this package. **URI:** `provar://nitrox/catalog-source` **MIME type:** `application/json` ```json { - "repo": "https://github.com/ProvarTesting/factPackages", "branch": "main", "commitSha": "<40-char SHA or null if fetched from fallback>", - "fetchedAt": "" + "fetchedAt": "", + "schemasUpdated": "" } ``` -`commitSha` and `fetchedAt` are `null` when the release build could not reach GitHub (fallback catalog in use). +`commitSha` and `fetchedAt` are `null` when the release build could not reach the internal source (fallback catalog in use). `schemasUpdated` is `true` when both `FactComponent.schema` and `FactPackage.schema` were successfully fetched from the same internal revision and bundled into this release; `false` when the schema fetch failed and the previously committed schemas are in use; `null` when the catalog source was not generated (dev build or pre-PDX-464 release). --- diff --git a/scripts/fetch-nitrox-packages.cjs b/scripts/fetch-nitrox-packages.cjs index c0a7d3d..718f110 100644 --- a/scripts/fetch-nitrox-packages.cjs +++ b/scripts/fetch-nitrox-packages.cjs @@ -1,13 +1,14 @@ #!/usr/bin/env node /** * Release pipeline utility: fetch the latest NitroX component packages - * from the ProvarTesting/factPackages GitHub repo (main branch) and - * regenerate docs/NITROX_COMPONENT_CATALOG.md. + * and JSON schema files from the ProvarTesting/factPackages GitHub repo + * (main branch), regenerate docs/NITROX_COMPONENT_CATALOG.md, and update + * the bundled FactComponent.schema.json and FactPackage.schema.json. * * On success, writes docs/NITROX_CATALOG_SOURCE.json with the commit SHA * so downstream consumers can verify which version was bundled. * - * Falls back silently to the committed catalog when: + * Falls back silently to the committed catalog/schemas when: * - GITHUB_TOKEN / GH_TOKEN is not set in the environment * - The GitHub API is unreachable * - Any download fails @@ -32,6 +33,17 @@ const DOCS_DIR = path.join(__dirname, '..', 'docs'); const OUTPUT_CATALOG = path.join(DOCS_DIR, 'NITROX_COMPONENT_CATALOG.md'); const OUTPUT_SOURCE = path.join(DOCS_DIR, 'NITROX_CATALOG_SOURCE.json'); +// Destination directories for the JSON schema files +const SCHEMA_RULES_DIR = path.join(__dirname, '..', 'src', 'mcp', 'rules'); +const REPO_ROOT_DIR = path.join(__dirname, '..'); + +// Paths within the factPackages tree that contain the NitroX JSON schemas. +// Both files must come from the same commit so there is no version skew. +const SCHEMA_TREE_PATHS = new Set([ + 'fact-parent/src/resources/FactComponent.schema', + 'fact-parent/src/resources/FactPackage.schema', +]); + function warn(msg) { console.warn(`[fetch-nitrox-packages] WARN: ${msg}`); } @@ -139,6 +151,40 @@ function isRelevant(treePath) { return PKG_JSON_RE.test(treePath) || COMPONENT_FILE_RE.test(treePath); } +function isSchemaFile(treePath) { + return SCHEMA_TREE_PATHS.has(treePath); +} + +/** + * Download both NitroX schema files from factPackages at the given commit SHA + * and write them to src/mcp/rules/ (with .json extension) and to the repo root + * (without extension, for schemastore.org registration). + * + * Returns true on success. Warns and returns false if the expected files are + * absent from the tree. Throws on download or write errors so the caller can + * catch and fall back. + */ +async function fetchAndWriteSchemas(tree, commitSha, token) { + const schemaFiles = tree.filter((f) => f.type === 'blob' && isSchemaFile(f.path)); + if (schemaFiles.length !== SCHEMA_TREE_PATHS.size) { + warn( + `Expected ${SCHEMA_TREE_PATHS.size} schema files in tree, found ${schemaFiles.length} — skipping schema update` + ); + return false; + } + + for (const file of schemaFiles) { + const content = await downloadRaw(file.path, commitSha, token); + const baseName = path.basename(file.path); // e.g. "FactComponent.schema" + // Write to src/mcp/rules/ with .json extension (picked up by the compile step) + fs.writeFileSync(path.join(SCHEMA_RULES_DIR, baseName + '.json'), content); + // Write to repo root without extension (for schemastore.org registration) + fs.writeFileSync(path.join(REPO_ROOT_DIR, baseName), content); + log(`Updated schema: ${baseName}.json (commitSha: ${commitSha.slice(0, 7)})`); + } + return true; +} + async function downloadRaw(filePath, commitSha, token) { // Pin to the resolved commit SHA so all downloads are consistent with the tree listing const url = `https://raw.githubusercontent.com/${REPO_OWNER}/${REPO_NAME}/${commitSha}/${filePath}`; @@ -283,17 +329,33 @@ async function main() { fs.writeFileSync(OUTPUT_CATALOG, catalog, 'utf-8'); log(`Written: docs/NITROX_COMPONENT_CATALOG.md (${catalog.split('\n').length} lines)`); + // ── Schema fetch ───────────────────────────────────────────────────────── + let schemasUpdated = false; + try { + schemasUpdated = await fetchAndWriteSchemas(tree, commitSha, token); + } catch (schemaErr) { + warn(`Schema fetch failed — ${String(schemaErr instanceof Error ? schemaErr.message : schemaErr)}`); + warn( + 'Falling back to bundled schemas; release will use existing FactComponent.schema.json and FactPackage.schema.json' + ); + } + const sourceInfo = { - repo: `https://github.com/${REPO_OWNER}/${REPO_NAME}`, branch: BRANCH, commitSha, fetchedAt: new Date().toISOString(), + schemasUpdated, }; fs.writeFileSync(OUTPUT_SOURCE, JSON.stringify(sourceInfo, null, 2) + '\n', 'utf-8'); - log(`Written: docs/NITROX_CATALOG_SOURCE.json (commitSha: ${commitSha.slice(0, 7)})`); + log( + `Written: docs/NITROX_CATALOG_SOURCE.json (commitSha: ${commitSha.slice( + 0, + 7 + )}, schemasUpdated: ${schemasUpdated})` + ); } catch (err) { warn(`Fetch failed — ${String(err instanceof Error ? err.message : err)}`); - warn('Falling back to bundled catalog; release will use existing NITROX_COMPONENT_CATALOG.md'); + warn('Falling back to bundled catalog and schemas; release will use existing NITROX_COMPONENT_CATALOG.md'); } finally { try { if (fs.existsSync(tmpDir)) fs.rmSync(tmpDir, { recursive: true, force: true }); diff --git a/src/mcp/server.ts b/src/mcp/server.ts index cef8f87..cbd906f 100644 --- a/src/mcp/server.ts +++ b/src/mcp/server.ts @@ -234,15 +234,20 @@ export function resolveDocsDir(currentDir: string): string { export function readCatalogSource(docsDir: string): string { try { const raw = readFileSync(join(docsDir, 'NITROX_CATALOG_SOURCE.json'), 'utf-8'); - // Round-trip through JSON to normalise formatting - return JSON.stringify(JSON.parse(raw) as unknown, null, 2); + const parsed = JSON.parse(raw) as Record; + // Normalise schemasUpdated so older build artifacts (which lack this field) + // return a stable shape rather than omitting the key entirely. + if (!('schemasUpdated' in parsed)) { + parsed['schemasUpdated'] = null; + } + return JSON.stringify(parsed, null, 2); } catch { return JSON.stringify( { - repo: 'https://github.com/ProvarTesting/factPackages', branch: 'main', commitSha: null, fetchedAt: null, + schemasUpdated: null, }, null, 2 diff --git a/test/unit/mcp/server.test.ts b/test/unit/mcp/server.test.ts index b4797f8..f981c18 100644 --- a/test/unit/mcp/server.test.ts +++ b/test/unit/mcp/server.test.ts @@ -67,18 +67,47 @@ describe('readCatalogSource', () => { } it('returns parsed JSON when NITROX_CATALOG_SOURCE.json is present', () => { + const docsDir = makeTmpDir(); + const source = { branch: 'main', commitSha: 'abc1234567890', fetchedAt: '2026-05-08T10:00:00.000Z' }; + fs.writeFileSync(path.join(docsDir, 'NITROX_CATALOG_SOURCE.json'), JSON.stringify(source)); + const result = JSON.parse(readCatalogSource(docsDir)) as typeof source & { schemasUpdated: unknown }; + assert.equal(result.commitSha, 'abc1234567890'); + assert.equal(result.branch, 'main'); + assert.equal(result.fetchedAt, '2026-05-08T10:00:00.000Z'); + }); + + it('normalises missing schemasUpdated to null for files from older builds', () => { + const docsDir = makeTmpDir(); + const source = { branch: 'main', commitSha: 'abc1234567890', fetchedAt: '2026-05-08T10:00:00.000Z' }; + fs.writeFileSync(path.join(docsDir, 'NITROX_CATALOG_SOURCE.json'), JSON.stringify(source)); + const result = JSON.parse(readCatalogSource(docsDir)) as Record; + assert.equal(result['schemasUpdated'], null); + }); + + it('passes through schemasUpdated: true when present in the file', () => { const docsDir = makeTmpDir(); const source = { - repo: 'https://github.com/ProvarTesting/factPackages', branch: 'main', commitSha: 'abc1234567890', fetchedAt: '2026-05-08T10:00:00.000Z', + schemasUpdated: true, }; fs.writeFileSync(path.join(docsDir, 'NITROX_CATALOG_SOURCE.json'), JSON.stringify(source)); const result = JSON.parse(readCatalogSource(docsDir)) as typeof source; - assert.equal(result.commitSha, 'abc1234567890'); - assert.equal(result.branch, 'main'); - assert.equal(result.fetchedAt, '2026-05-08T10:00:00.000Z'); + assert.equal(result.schemasUpdated, true); + }); + + it('passes through schemasUpdated: false when schema fetch fell back', () => { + const docsDir = makeTmpDir(); + const source = { + branch: 'main', + commitSha: 'abc1234567890', + fetchedAt: '2026-05-08T10:00:00.000Z', + schemasUpdated: false, + }; + fs.writeFileSync(path.join(docsDir, 'NITROX_CATALOG_SOURCE.json'), JSON.stringify(source)); + const result = JSON.parse(readCatalogSource(docsDir)) as typeof source; + assert.equal(result.schemasUpdated, false); }); it('returns fallback object when the file is absent', () => { @@ -86,7 +115,8 @@ describe('readCatalogSource', () => { const result = JSON.parse(readCatalogSource(docsDir)) as Record; assert.equal(result['commitSha'], null); assert.equal(result['fetchedAt'], null); - assert.equal(result['repo'], 'https://github.com/ProvarTesting/factPackages'); + assert.equal(result['schemasUpdated'], null); + assert.ok(!('repo' in result), 'fallback should not expose an internal repo URL'); }); it('returns fallback object when the file contains invalid JSON', () => { @@ -94,5 +124,6 @@ describe('readCatalogSource', () => { fs.writeFileSync(path.join(docsDir, 'NITROX_CATALOG_SOURCE.json'), '{bad json'); const result = JSON.parse(readCatalogSource(docsDir)) as Record; assert.equal(result['commitSha'], null); + assert.equal(result['schemasUpdated'], null); }); }); From 8d483bb12c5fea316975d95f36fb5feaf3daba43 Mon Sep 17 00:00:00 2001 From: Michael Dailey <49916244+mrdailey99@users.noreply.github.com> Date: Tue, 12 May 2026 13:32:38 -0500 Subject: [PATCH 11/12] PDX-466: feat(mcp): add AJV JSON schema validation to provar_nitrox_validate (#159) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * PDX-466: feat(mcp): add AJV JSON schema validation alongside hardcoded NX rules RCA: provar_nitrox_validate only ran hardcoded NX001–NX010 semantic rules; structural errors (wrong types, extra properties, enum violations) encoded in FactComponent.schema.json were never caught at validation time. Fix: Added Ajv2020 as a runtime dependency; schema is lazily loaded from lib/mcp/rules/FactComponent.schema.json on first call and validated in parallel with existing rules. Violations are returned as NX_SCHEMA_ issues (ERROR for type/required, WARNING for additionalProperties/pattern/enum). Falls back to hardcoded-rules-only when schema is unavailable. * PDX-466: fix(test): replace no-explicit-any with typed ValidateFunction import in NX_SCHEMA tests RCA: ESLint no-explicit-any rule rejected the any parameter type used for the schemaOverride parameter in AJV schema override tests; the eslint-disable comment was positioned on the wrong line. Fix: Added import type { ValidateFunction } from ajv/dist/2020.js and replaced all any usages with properly typed ValidateFunction and a narrow IssueShape type alias for the return value. * PDX-466: fix(mcp): address Copilot review comments on nitrox-ajv-schema-validation RCA: Copilot flagged incorrect 'in parallel' wording (validation is synchronous/sequential), an overly broad ERROR severity mapping in ajvErrorToIssue (MIN_ITEMS/MINIMUM/MAXIMUM should be WARNING), and broken markdown rendering of NX_SCHEMA_* in docs (underscores parsed as italic markers). Fix: Reworded tool description and docs to 'sequential' passes; narrowed ERROR set to REQUIRED and TYPE only; fixed NX_SCHEMA_* heading and inline text with backtick quoting; updated docs table to show MIN_ITEMS as WARNING. --- docs/mcp.md | 22 +++++++++- package.json | 3 +- server.json | 4 +- src/mcp/tools/nitroXTools.ts | 64 ++++++++++++++++++++++++--- test/unit/mcp/nitroXTools.test.ts | 73 +++++++++++++++++++++++++++++++ 5 files changed, 154 insertions(+), 12 deletions(-) diff --git a/docs/mcp.md b/docs/mcp.md index 82627ac..577f225 100644 --- a/docs/mcp.md +++ b/docs/mcp.md @@ -1643,7 +1643,10 @@ Path policy is enforced per-file. A missing or unparseable file returns an `erro ### `provar_nitrox_validate` -Validate a NitroX `.po.json` (Hybrid Model component page object) against the FACT schema rules. Returns a quality score (0–100) and a list of issues. +Validate a NitroX `.po.json` (Hybrid Model component page object) against the FACT schema rules. Returns a quality score (0–100) and a combined list of issues from two sequential validation passes: + +1. **Hardcoded semantic rules (NX001–NX010)** — always run +2. **JSON schema validation (`NX_SCHEMA_*`)** — runs when the bundled `FactComponent.schema.json` is available; falls back to hardcoded-rules-only if the schema cannot be loaded Score formula: `100 − (20 × errors) − (5 × warnings) − (1 × infos)`, minimum 0. @@ -1659,7 +1662,7 @@ Score formula: `100 − (20 × errors) − (5 × warnings) − (1 × infos)`, mi | `issue_count` | Total issues | | `issues` | Array of `ValidationIssue` (see below) | -**Validation rules:** +**Hardcoded rules:** | Rule | Severity | Description | | ----- | -------- | -------------------------------------------------------------------------------------------------------------------------------------------- | @@ -1675,6 +1678,21 @@ Score formula: `100 − (20 × errors) − (5 × warnings) − (1 × infos)`, mi | NX009 | INFO | Interaction `name` contains characters outside `[A-Za-z0-9 ]` | | NX010 | INFO | `bodyTagName` contains whitespace | +**JSON schema rules (`NX_SCHEMA_*`):** + +Rule IDs follow the pattern `NX_SCHEMA_` where `` is the AJV validation keyword in `SCREAMING_SNAKE_CASE`. Common rule IDs: + +| Rule ID | Severity | Description | +| --------------------------------- | -------- | ------------------------------------------------------------------------------ | +| `NX_SCHEMA_TYPE` | ERROR | Property has the wrong JSON type (e.g. string where boolean expected) | +| `NX_SCHEMA_REQUIRED` | ERROR | Required property missing (per JSON schema `required` array) | +| `NX_SCHEMA_MIN_ITEMS` | WARNING | Array has fewer items than `minItems` requires | +| `NX_SCHEMA_ADDITIONAL_PROPERTIES` | WARNING | Property not defined in the schema (schema uses `additionalProperties: false`) | +| `NX_SCHEMA_PATTERN` | WARNING | String value does not match the schema `pattern` | +| `NX_SCHEMA_ENUM` | WARNING | Value not in the allowed `enum` list | + +Schema issues complement — and may overlap with — the hardcoded NX rules. When overlap occurs, both rule IDs appear in the `issues` array. + **Error codes:** `MISSING_INPUT`, `NX000`, `FILE_NOT_FOUND`, `PATH_NOT_ALLOWED` --- diff --git a/package.json b/package.json index f1f95d1..90c374d 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@provartesting/provardx-cli", "description": "A plugin for the Salesforce CLI to orchestrate testing activities and report quality metrics to Provar Quality Hub", - "version": "1.5.0-beta.18", + "version": "1.5.0-beta.19", "mcpName": "io.github.ProvarTesting/provar", "license": "BSD-3-Clause", "plugins": [ @@ -11,6 +11,7 @@ "dependencies": { "@modelcontextprotocol/sdk": "^1.8.0", "@oclif/core": "^3.27.0", + "ajv": "^8.17.1", "@provartesting/provardx-plugins-automation": "1.2.2", "@provartesting/provardx-plugins-manager": "1.3.2", "@provartesting/provardx-plugins-utils": "1.3.3", diff --git a/server.json b/server.json index 76faa4b..f7d1f22 100644 --- a/server.json +++ b/server.json @@ -14,12 +14,12 @@ "url": "https://github.com/ProvarTesting/provardx-cli", "source": "github" }, - "version": "1.5.0-beta.18", + "version": "1.5.0-beta.19", "packages": [ { "registryType": "npm", "identifier": "@provartesting/provardx-cli", - "version": "1.5.0-beta.18", + "version": "1.5.0-beta.19", "transport": { "type": "stdio" }, diff --git a/src/mcp/tools/nitroXTools.ts b/src/mcp/tools/nitroXTools.ts index 1a8821b..febddb2 100644 --- a/src/mcp/tools/nitroXTools.ts +++ b/src/mcp/tools/nitroXTools.ts @@ -10,6 +10,8 @@ import fs from 'node:fs'; import path from 'node:path'; import os from 'node:os'; import { randomUUID } from 'node:crypto'; +import { fileURLToPath } from 'node:url'; +import { Ajv2020, type ValidateFunction, type ErrorObject } from 'ajv/dist/2020.js'; import { z } from 'zod'; import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; import type { ServerConfig } from '../server.js'; @@ -36,6 +38,47 @@ function isObj(v: unknown): v is JsonObj { return typeof v === 'object' && v !== null && !Array.isArray(v); } +// ── AJV Schema Validator ────────────────────────────────────────────────────── + +const RULES_DIR = path.join(path.dirname(fileURLToPath(import.meta.url)), '..', 'rules'); + +let cachedFactComponentValidator: ValidateFunction | null | undefined; + +function getFactComponentValidator(): ValidateFunction | null { + if (cachedFactComponentValidator !== undefined) return cachedFactComponentValidator; + + const schemaPath = path.join(RULES_DIR, 'FactComponent.schema.json'); + try { + // Fix known broken $ref in the bundled schema (#/defs/ → #/$defs/) + const patched = fs.readFileSync(schemaPath, 'utf-8').replace(/"#\/defs\//g, '"#/$defs/'); + const schema = JSON.parse(patched) as Record; + const ajv = new Ajv2020({ allErrors: true, strict: false, validateFormats: false }); + cachedFactComponentValidator = ajv.compile(schema); + } catch (e) { + log('warn', 'provar_nitrox_validate: FactComponent schema unavailable, using hardcoded rules only', { + error: String(e), + }); + cachedFactComponentValidator = null; + } + return cachedFactComponentValidator; +} + +function ajvErrorToIssue(err: ErrorObject): NitroXIssue { + const keyword = err.keyword.replace(/([a-z])([A-Z])/g, '$1_$2').toUpperCase(); + const instancePath = err.instancePath; + const appliesTo = instancePath ? instancePath.replace(/^\//, '').replace(/\//g, '.') : 'root'; + const pathParts = instancePath.split('/').filter(Boolean); + const severity: 'ERROR' | 'WARNING' = ['REQUIRED', 'TYPE'].includes(keyword) ? 'ERROR' : 'WARNING'; + const issue: NitroXIssue = { + rule_id: `NX_SCHEMA_${keyword}`, + severity, + message: `Schema: ${instancePath || 'root'} — ${err.message ?? 'validation failed'}`, + applies_to: appliesTo, + }; + if (pathParts.length > 0) issue.field = pathParts[pathParts.length - 1]; + return issue; +} + // ── Directory Utilities ─────────────────────────────────────────────────────── const SKIP_DIRS = new Set(['node_modules', '.git']); @@ -168,40 +211,45 @@ function validateRootProperties(obj: JsonObj, issues: NitroXIssue[]): void { } } -/** Validate a parsed NitroX .po.json object against schema-derived rules. */ -export function validateNitroXContent(obj: JsonObj): NitroXValidationResult { +/** Validate a parsed NitroX .po.json against hardcoded NX rules and the FactComponent JSON schema. */ +export function validateNitroXContent(obj: JsonObj, schemaOverride?: ValidateFunction | null): NitroXValidationResult { const issues: NitroXIssue[] = []; validateRootProperties(obj, issues); - // Validate root-level parameters if (Array.isArray(obj['parameters'])) { for (const param of obj['parameters']) { if (isObj(param)) validateParameter(param, 'root', issues); } } - // Validate root-level interactions if (Array.isArray(obj['interactions'])) { for (const interaction of obj['interactions']) { if (isObj(interaction)) validateInteraction(interaction, 'root', issues); } } - // Validate root-level selectors if (Array.isArray(obj['selectors'])) { for (const sel of obj['selectors']) { if (isObj(sel)) validateSelector(sel, issues); } } - // Validate elements recursively if (Array.isArray(obj['elements'])) { for (const el of obj['elements']) { if (isObj(el)) validateElement(el, issues); } } + // AJV schema validation runs additively alongside NX001–NX010 + const validator = schemaOverride === undefined ? getFactComponentValidator() : schemaOverride; + if (validator) { + validator(obj); + for (const err of validator.errors ?? []) { + issues.push(ajvErrorToIssue(err)); + } + } + const errorCount = issues.filter((i) => i.severity === 'ERROR').length; const warningCount = issues.filter((i) => i.severity === 'WARNING').length; const infoCount = issues.filter((i) => i.severity === 'INFO').length; @@ -636,7 +684,9 @@ export function registerNitroXValidate(server: McpServer, config: ServerConfig): description: [ 'Validate a NitroX .po.json (Hybrid Model component page object) against schema rules.', 'Works for any NitroX-mapped component type: LWC, Screen Flow, Industry Components, Experience Cloud, HTML5.', - 'Returns a quality score (0–100) and a list of issues with rule IDs (NX001–NX010), severity, and suggestions.', + 'Runs two validation passes sequentially: hardcoded semantic rules (NX001–NX010) then JSON schema validation (NX_SCHEMA_* rule IDs).', + 'Schema issues catch structural errors not covered by NX rules: wrong property types, extra properties, enum violations.', + 'Returns a quality score (0–100) and a combined list of issues with rule IDs, severity, and suggestions.', 'Score formula: 100 − (20 × errors) − (5 × warnings) − (1 × infos).', ].join(' '), inputSchema: { diff --git a/test/unit/mcp/nitroXTools.test.ts b/test/unit/mcp/nitroXTools.test.ts index 5ae0b22..514f0bf 100644 --- a/test/unit/mcp/nitroXTools.test.ts +++ b/test/unit/mcp/nitroXTools.test.ts @@ -10,6 +10,7 @@ import { strict as assert } from 'node:assert'; import fs from 'node:fs'; import path from 'node:path'; import os from 'node:os'; +import type { ValidateFunction } from 'ajv/dist/2020.js'; import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; // ── Minimal mock server ─────────────────────────────────────────────────────── @@ -408,6 +409,78 @@ describe('nitroXTools', () => { }); }); + // ── NX_SCHEMA_ rules (AJV schema validation) ───────────────────────────────── + + describe('NX_SCHEMA_ rules (AJV schema override)', () => { + type IssueShape = { rule_id: string; severity: string; message: string; applies_to: string; field?: string }; + type ValidateFnType = ( + obj: Record, + v?: ValidateFunction | null + ) => { issues: IssueShape[]; valid: boolean; score: number; issue_count: number }; + + let validateFn!: ValidateFnType; + let extraPropsValidator!: ValidateFunction; + let typeViolationValidator!: ValidateFunction; + let permissiveValidator!: ValidateFunction; + + before(async () => { + const mod = await import('../../../src/mcp/tools/nitroXTools.js'); + // Cast through unknown: the private NitroXValidationResult is structurally compatible with IssueShape[] + validateFn = mod.validateNitroXContent as unknown as ValidateFnType; + + const { Ajv2020: AjvClass } = await import('ajv/dist/2020.js'); + const ajv = new AjvClass({ allErrors: true, strict: false }); + + extraPropsValidator = ajv.compile({ + type: 'object', + additionalProperties: false, + properties: { componentId: { type: 'string' } }, + }); + + typeViolationValidator = ajv.compile({ + type: 'object', + properties: { pageStructureElement: { type: 'boolean' } }, + }); + + permissiveValidator = ajv.compile({ + type: 'object', + additionalProperties: false, + properties: { + componentId: { type: 'string' }, + name: { type: 'string' }, + type: { type: 'string' }, + pageStructureElement: { type: 'boolean' }, + fieldDetailsElement: { type: 'boolean' }, + }, + }); + }); + + it('NX_SCHEMA_ADDITIONAL_PROPERTIES: extra property surfaces as WARNING', () => { + // Schema only allows componentId; passing an extra field should produce a schema issue + const result = validateFn({ componentId: VALID_UUID, _extraProp: true }, extraPropsValidator); + assert.ok(result.issues.some((i) => i.rule_id === 'NX_SCHEMA_ADDITIONAL_PROPERTIES')); + assert.equal(result.issues.find((i) => i.rule_id === 'NX_SCHEMA_ADDITIONAL_PROPERTIES')?.severity, 'WARNING'); + }); + + it('NX_SCHEMA_TYPE: wrong property type surfaces as ERROR', () => { + // Schema expects pageStructureElement to be boolean; passing a string should produce a type error + const result = validateFn({ ...VALID_ROOT, pageStructureElement: 'yes' }, typeViolationValidator); + assert.ok(result.issues.some((i) => i.rule_id === 'NX_SCHEMA_TYPE' && i.severity === 'ERROR')); + }); + + it('valid object matching schema produces no NX_SCHEMA_ issues', () => { + const result = validateFn(VALID_ROOT, permissiveValidator); + assert.ok(!result.issues.some((i) => i.rule_id.startsWith('NX_SCHEMA_'))); + }); + + it('null schema override: hardcoded rules still run; valid object scores 100', () => { + const result = validateFn(VALID_ROOT, null); + assert.equal(result.valid, true); + assert.equal(result.score, 100); + assert.equal(result.issue_count, 0); + }); + }); + // ── provar_nitrox_generate ───────────────────────────────────────────────── describe('provar_nitrox_generate', () => { From c9ef921b24648679cc80fa2f43f1da97da93c5a9 Mon Sep 17 00:00:00 2001 From: Michael Dailey <49916244+mrdailey99@users.noreply.github.com> Date: Tue, 12 May 2026 14:58:28 -0500 Subject: [PATCH 12/12] PDX-467: chore(docs): bump version to 1.5.0 stable and remove @beta install tag (#160) * PDX-467: chore(docs): bump version to 1.5.0 stable and remove @beta install tag RCA: The 1.5.0 release is ready for stable promotion; all docs and install commands still referenced the @beta dist-tag and pre-release version string. Fix: Updated package.json and server.json to 1.5.0, removed @beta from all install commands in README.md, docs/mcp.md, and docs/mcp-pilot-guide.md, and updated the stale-cache unit test to use the latest channel. * PDX-467: test(mcp): make stale-cache test release-agnostic by deriving channel at runtime RCA: Hardcoded version and channel values in the stale-cache test will fail once the branch version cycles back to a prerelease (beta/rc) after the 1.5.0 stable release. Fix: Derive currentVersion and channel from the running version at test time, mirroring the pattern used in the fresh-cache test, so the test remains valid across any semver channel. --- README.md | 17 +++++------------ docs/mcp-pilot-guide.md | 6 +++--- docs/mcp.md | 22 +++++++++++----------- package.json | 2 +- server.json | 4 ++-- test/unit/mcp/updateChecker.test.ts | 12 +++++++----- 6 files changed, 29 insertions(+), 34 deletions(-) diff --git a/README.md b/README.md index a3437f9..6057946 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ The Provar DX CLI is a Salesforce CLI plugin for Provar customers who want to au Install the plugin ```sh-session -$ sf plugins install @provartesting/provardx-cli@beta +$ sf plugins install @provartesting/provardx-cli ``` Update plugins @@ -53,14 +53,7 @@ No prior setup needed. Paste this into your Claude Desktop config file and resta "mcpServers": { "provar": { "command": "npx", - "args": [ - "-y", - "@provartesting/provardx-cli@beta", - "mcp", - "start", - "--allowed-paths", - "/path/to/your/provar/project" - ] + "args": ["-y", "@provartesting/provardx-cli", "mcp", "start", "--allowed-paths", "/path/to/your/provar/project"] } } } @@ -71,7 +64,7 @@ No prior setup needed. Paste this into your Claude Desktop config file and resta **Claude Code** — run once to register the server: ```sh -claude mcp add provar -s user -- npx -y @provartesting/provardx-cli@beta mcp start --allowed-paths /path/to/your/provar/project +claude mcp add provar -s user -- npx -y @provartesting/provardx-cli mcp start --allowed-paths /path/to/your/provar/project ``` ### Option B — Global sf plugin install @@ -79,8 +72,8 @@ claude mcp add provar -s user -- npx -y @provartesting/provardx-cli@beta mcp sta Prefer a persistent global install? Install once, then use the `sf` command: ```sh -# 1. Install the plugin — @beta is required for MCP support -sf plugins install @provartesting/provardx-cli@beta +# 1. Install the plugin +sf plugins install @provartesting/provardx-cli # 2. (Optional) Authenticate for full 170+ rule validation sf provar auth login diff --git a/docs/mcp-pilot-guide.md b/docs/mcp-pilot-guide.md index a958918..c5d2085 100644 --- a/docs/mcp-pilot-guide.md +++ b/docs/mcp-pilot-guide.md @@ -25,7 +25,7 @@ The server runs **locally on your machine**. It does not phone home, transmit yo | --------------------------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------- | | Provar Automation IDE | ≥ 3.x | Must be installed with an **activated license** on the same machine. The MCP server reads the license from `~/Provar/.licenses/`. | | Salesforce CLI (`sf`) | ≥ 2.x | `npm install -g @salesforce/cli` | -| Provar DX CLI plugin | ≥ 1.5.0 | `sf plugins install @provartesting/provardx-cli@beta` | +| Provar DX CLI plugin | ≥ 1.5.0 | `sf plugins install @provartesting/provardx-cli` | | An MCP-compatible AI client | — | Claude Desktop, Claude Code, GitHub Copilot (VS Code), Cursor, or Agentforce Vibes | | Node.js | 18–24 | Installed automatically with the SF CLI. **Node 25+ is not supported** — a transitive dependency crashes on startup. Use Node 22 LTS. | @@ -48,7 +48,7 @@ sf --version ### 2. Install the Provar DX CLI plugin ```sh -sf plugins install @provartesting/provardx-cli@beta +sf plugins install @provartesting/provardx-cli ``` Verify: @@ -561,7 +561,7 @@ After editing `claude_desktop_config.json`, you must fully restart Claude Deskto **Server starts but immediately exits** -Check that the SF CLI plugin is installed: `sf plugins | grep provardx`. If missing, run `sf plugins install @provartesting/provardx-cli@beta`. +Check that the SF CLI plugin is installed: `sf plugins | grep provardx`. If missing, run `sf plugins install @provartesting/provardx-cli`. --- diff --git a/docs/mcp.md b/docs/mcp.md index 577f225..57ed89a 100644 --- a/docs/mcp.md +++ b/docs/mcp.md @@ -89,8 +89,8 @@ The Provar DX CLI ships with a built-in **Model Context Protocol (MCP) server** ## Quick start ```sh -# 1. Install the plugin — @beta is required for MCP support -sf plugins install @provartesting/provardx-cli@beta +# 1. Install the plugin +sf plugins install @provartesting/provardx-cli # 2. (Optional) Authenticate for full 170+ rule validation sf provar auth login @@ -224,7 +224,7 @@ claude mcp add provar -s user -- npx -y @salesforce/cli provar mcp start --allow } ``` -> The Provar plugin must still be installed first via `sf plugins install @provartesting/provardx-cli@beta`. The npx invocation shares the same plugin directory as the globally installed `sf` binary. +> The Provar plugin must still be installed first via `sf plugins install @provartesting/provardx-cli`. The npx invocation shares the same plugin directory as the globally installed `sf` binary. ### Claude Desktop @@ -493,14 +493,14 @@ A lightweight sanity-check tool. Echoes back the message you send. Useful for ve **Output** -| Field | Type | Description | -| ----------------- | -------------- | --------------------------------------------------------- | -| `pong` | string | The echoed message | -| `ts` | string | ISO-8601 timestamp | -| `server` | string | Server name and version (e.g. `provar-mcp@1.5.0-beta.15`) | -| `updateAvailable` | boolean | Whether a newer version is available in the registry | -| `latestVersion` | string \| null | Latest version found in the npm registry, or `null` | -| `updateCommand` | string \| null | Command to run to update the plugin, or `null` | +| Field | Type | Description | +| ----------------- | -------------- | ---------------------------------------------------- | +| `pong` | string | The echoed message | +| `ts` | string | ISO-8601 timestamp | +| `server` | string | Server name and version (e.g. `provar-mcp@1.5.0`) | +| `updateAvailable` | boolean | Whether a newer version is available in the registry | +| `latestVersion` | string \| null | Latest version found in the npm registry, or `null` | +| `updateCommand` | string \| null | Command to run to update the plugin, or `null` | --- diff --git a/package.json b/package.json index 90c374d..d0112bc 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@provartesting/provardx-cli", "description": "A plugin for the Salesforce CLI to orchestrate testing activities and report quality metrics to Provar Quality Hub", - "version": "1.5.0-beta.19", + "version": "1.5.0", "mcpName": "io.github.ProvarTesting/provar", "license": "BSD-3-Clause", "plugins": [ diff --git a/server.json b/server.json index f7d1f22..1b4354d 100644 --- a/server.json +++ b/server.json @@ -14,12 +14,12 @@ "url": "https://github.com/ProvarTesting/provardx-cli", "source": "github" }, - "version": "1.5.0-beta.19", + "version": "1.5.0", "packages": [ { "registryType": "npm", "identifier": "@provartesting/provardx-cli", - "version": "1.5.0-beta.19", + "version": "1.5.0", "transport": { "type": "stdio" }, diff --git a/test/unit/mcp/updateChecker.test.ts b/test/unit/mcp/updateChecker.test.ts index 9668d20..cfc64dd 100644 --- a/test/unit/mcp/updateChecker.test.ts +++ b/test/unit/mcp/updateChecker.test.ts @@ -187,16 +187,18 @@ describe('checkForUpdate', () => { }); it('fetches registry when cache is stale (>4h)', async () => { + const { currentVersion } = await checkForUpdate({ noUpdateCheck: true, autoUpdate: false }); + const channel = deriveChannel(currentVersion); writeFreshCache({ checkedAt: Date.now() - 5 * 60 * 60 * 1000, // 5 hours ago - currentVersion: '1.5.0-beta.10', - latestVersion: '1.5.0-beta.10', - channel: 'beta', + currentVersion, + latestVersion: currentVersion, + channel, }); - mockFetchOk({ beta: '1.5.0-beta.11' }); + mockFetchOk({ [channel]: '99.0.0' }); const result = await checkForUpdate({ noUpdateCheck: false, autoUpdate: false }); assert.equal(result.fromCache, false); - assert.equal(result.latestVersion, '1.5.0-beta.11'); + assert.equal(result.latestVersion, '99.0.0'); }); it('returns updateAvailable=true when update is available', async () => {