diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 00000000..fd06794f --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +set -euo pipefail + +FIX="${FIX:-1}" +REPO_ROOT="$(git rev-parse --show-toplevel)" + +$REPO_ROOT/scripts/lint.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3a82ffc3..1b3af03d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,12 +3,30 @@ name: CI on: push: - branches: ['*'] + branches: ['main'] pull_request: - branches: [main] + branches: ['**'] + schedule: + # Nightly at 03:00 UTC + - cron: '0 3 * * *' + workflow_dispatch: + inputs: + reporting: + description: 'Create GitHub issue with scenario report' + type: boolean + default: false + skip_report_on_pass: + description: 'Skip filing issue when all scenarios pass' + type: boolean + default: true + +env: + # When true, file a GitHub issue even when all scenarios pass. + # Flip this to 'false' once the suite is stable to only file on failures. + REPORT_ON_SUCCESS: 'true' jobs: - fmt-clippy: + lint: runs-on: ubuntu-latest timeout-minutes: 10 @@ -20,15 +38,32 @@ jobs: with: components: rustfmt, clippy - - name: Check formatting - run: cargo fmt --all -- --check - - - name: Run clippy - run: cargo clippy --all-targets --all-features -- -D warnings - - foc-start-test: + - name: Setup Python tools + run: | + sudo apt-get update + sudo apt-get install -y pipx + pipx install black + pipx install ruff + echo "$HOME/.local/bin" >> $GITHUB_PATH + + - name: Run linting (check mode) + run: FIX=0 ./scripts/lint.sh + + foc-devnet-test: + strategy: + fail-fast: false + max-parallel: 1 + matrix: + include: + - name: latesttag + init_flags: "--lotus latestTag --curio latestTag --filecoin-services latestTag" + - name: latestcommit + init_flags: "--lotus latestTag --curio gitbranch:pdpv0 --filecoin-services gitbranch:main" runs-on: ["self-hosted", "linux", "x64", "16xlarge+gpu"] - timeout-minutes: 60 + timeout-minutes: 100 + permissions: + contents: read + issues: write steps: - uses: actions/checkout@v4 @@ -91,7 +126,7 @@ jobs: ~/.cargo/git/db/ target/ key: ${{ runner.os }}-rust-build-${{ hashFiles('**/Cargo.lock') }} - + # Copy binary and clean up Rust artifacts to save disk space - name: "EXEC: {Copy binary and clean cache}, DEP: {C-rust-cache}" run: | @@ -151,21 +186,21 @@ jobs: if: steps.cache-docker-images.outputs.cache-hit == 'true' run: | rm -rf ~/.foc-devnet - ./foc-devnet init --no-docker-build + ./foc-devnet init --no-docker-build ${{ matrix.init_flags }} # If Docker images are not cached, do full init (downloads YugabyteDB and builds all images) - name: "EXEC: {Initialize without cache}, independent" if: steps.cache-docker-images.outputs.cache-hit != 'true' run: | rm -rf ~/.foc-devnet - ./foc-devnet init + ./foc-devnet init ${{ matrix.init_flags }} - # CACHE-DOCKER: Build Docker images if not cached - - name: "EXEC: {Build Docker images}, DEP: {C-docker-images-cache}" + # CACHE-DOCKER: Save Docker images as tarballs for caching + - name: "EXEC: {Save Docker images for cache}, DEP: {C-docker-images-cache}" if: steps.cache-docker-images.outputs.cache-hit != 'true' run: |- mkdir -p ~/.docker-images-cache - echo "Building Docker images for cache..." + echo "Saving Docker images for cache..." docker save foc-lotus -o ~/.docker-images-cache/foc-lotus.tar docker save foc-lotus-miner -o ~/.docker-images-cache/foc-lotus-miner.tar docker save foc-builder -o ~/.docker-images-cache/foc-builder.tar @@ -189,7 +224,7 @@ jobs: uses: actions/cache/restore@v4 with: path: ~/.foc-devnet/bin - key: ${{ runner.os }}-binaries-${{ steps.version-hashes.outputs.code-hash }} + key: ${{ runner.os }}-binaries-${{ matrix.name }}-${{ steps.version-hashes.outputs.code-hash }} - name: "EXEC: {Ensure permissions on binaries}, DEP: {C-build-artifacts-cache}" if: steps.cache-binaries.outputs.cache-hit == 'true' @@ -202,9 +237,9 @@ jobs: uses: actions/cache/restore@v4 with: path: ~/.foc-devnet/docker/volumes/cache/foc-builder - key: ${{ runner.os }}-foc-builder-cache-${{ hashFiles('docker/**') }}-${{ hashFiles('src/config.rs') }} + key: ${{ runner.os }}-foc-builder-cache-${{ matrix.name }}-${{ hashFiles('docker/**') }}-${{ hashFiles('src/config.rs') }} restore-keys: | - ${{ runner.os }}-foc-builder-cache- + ${{ runner.os }}-foc-builder-cache-${{ matrix.name }}- - name: "EXEC: {Ensure permissions}, DEP: {C-foc-builder-cache}" if: steps.cache-binaries.outputs.cache-hit != 'true' && @@ -230,7 +265,7 @@ jobs: uses: actions/cache/save@v4 with: path: ~/.foc-devnet/docker/volumes/cache/foc-builder - key: ${{ runner.os }}-foc-builder-cache-${{ hashFiles('docker/**') }}-${{ hashFiles('src/config.rs') }} + key: ${{ runner.os }}-foc-builder-cache-${{ matrix.name }}-${{ hashFiles('docker/**') }}-${{ hashFiles('src/config.rs') }} # CACHE-BINARIES: Save built Lotus/Curio binaries for future runs - name: "CACHE_SAVE: {C-build-artifacts-cache}" @@ -238,7 +273,7 @@ jobs: uses: actions/cache/save@v4 with: path: ~/.foc-devnet/bin - key: ${{ runner.os }}-binaries-${{ steps.version-hashes.outputs.code-hash }} + key: ${{ runner.os }}-binaries-${{ matrix.name }}-${{ steps.version-hashes.outputs.code-hash }} # Disk free-up - name: "EXEC: {Clean up Go modules}, DEP: {C-build-artifacts-cache}" @@ -273,8 +308,9 @@ jobs: continue-on-error: true run: ./foc-devnet start --parallel - # On failure, collect and print Docker container logs for debugging - - name: "EXEC: {Collect Docker logs on failure}, independent" + # Collect and print Docker container logs for debugging (always runs for diagnostics) + - name: "EXEC: {Collect Docker logs}, independent" + if: always() run: | RUN_DIR="$HOME/.foc-devnet/state/latest" @@ -311,9 +347,11 @@ jobs: # Verify cluster is running correctly - name: "EXEC: {Check cluster status}, independent" + if: always() run: ./foc-devnet status - name: "EXEC: {List foc-* containers}, independent" + if: always() run: | echo "Containers using foc-* images (running or exited):" docker ps -a --format 'table {{.Names}}\t{{.Image}}\t{{.Status}}' @@ -334,6 +372,13 @@ jobs: with: node-version: '20' + # Setup pnpm (required by scenario tests) + - name: "EXEC: {Setup pnpm}, independent" + if: steps.start_cluster.outcome == 'success' + uses: pnpm/action-setup@v4 + with: + version: latest + # Validate schema using zod - name: "CHECK: {Validate devnet-info.json schema}" if: steps.start_cluster.outcome == 'success' @@ -346,13 +391,131 @@ jobs: node check-balances.js "$DEVNET_INFO" echo "✓ All examples ran well" - # Clean shutdown + # Run scenario tests against the live devnet + - name: "TEST: {Run scenario tests}" + if: steps.start_cluster.outcome == 'success' + env: + # Enable reporting for nightly schedule or when explicitly requested + REPORTING: ${{ github.event_name == 'schedule' || inputs.reporting == true }} + # By default, don't file an issue if everything passes + SKIP_REPORT_ON_PASS: ${{ inputs.skip_report_on_pass != false }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: python3 scenarios/run.py + + # Ensure scenario report exists even if tests didn't run (for issue reporting) + - name: "EXEC: {Ensure scenario report exists}" + if: always() + run: | + REPORT="$HOME/.foc-devnet/state/latest/scenario_report.md" + if [ ! -f "$REPORT" ]; then + mkdir -p "$(dirname "$REPORT")" + { + echo "# Scenario Test Report" + echo "" + echo "**No scenario tests were executed.**" + echo "" + echo "**Start cluster outcome**: ${{ steps.start_cluster.outcome }}" + echo "" + echo "## foc-devnet version" + echo '```' + ./foc-devnet version 2>&1 || echo "version command failed" + echo '```' + } > "$REPORT" + fi + + # Upload scenario report as artifact (name includes strategy to avoid collisions in matrix) + - name: "EXEC: {Upload scenario report}" + if: always() + uses: actions/upload-artifact@v4 + with: + name: scenario-report-${{ matrix.name }} + path: ~/.foc-devnet/state/latest/scenario_*.md + if-no-files-found: ignore + + # Clean shutdown (always runs to avoid leaving containers behind) - name: "EXEC: {Stop cluster}, independent" + if: always() run: ./foc-devnet stop # Mark job as failed if the start step failed, but only after all steps - name: "CHECK: {Fail job if start failed}" - if: ${{ always() && steps.start_cluster.outcome == 'failure' }} + if: always() && steps.start_cluster.outcome == 'failure' run: | echo "Start cluster failed earlier; marking job as failed." >&2 exit 1 + + issue-reporting: + name: Issue Reporting (${{ matrix.name }}) + if: always() && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') + needs: [foc-devnet-test] + strategy: + fail-fast: false + matrix: + include: + - name: latesttag + issue_label: scenarios-run-latesttag + issue_title: "FOC Devnet scenarios run report (latestTag)" + - name: latestcommit + issue_label: scenarios-run-latestcommit + issue_title: "FOC Devnet scenarios run report (latestCommit)" + runs-on: ubuntu-latest + permissions: + issues: write + steps: + - name: "CHECK: {Determine if issue should be filed}" + id: should_file + run: | + FOC_DEVNET_TEST_STEP_RESULT="${{ needs.foc-devnet-test.result }}" + if [[ "$FOC_DEVNET_TEST_STEP_RESULT" == "success" ]]; then + TEST_PASSED="true" + else + TEST_PASSED="false" + fi + echo "passed=$TEST_PASSED" >> $GITHUB_OUTPUT + if [[ "$TEST_PASSED" == "true" && "$REPORT_ON_SUCCESS" != "true" ]]; then + echo "file=false" >> $GITHUB_OUTPUT + echo "Skipping issue: tests passed and REPORT_ON_SUCCESS is not 'true'" + else + echo "file=true" >> $GITHUB_OUTPUT + echo "Filing issue (${{ matrix.name }}): test result was $FOC_DEVNET_TEST_STEP_RESULT" + fi + + - name: "EXEC: {Download scenario report for ${{ matrix.name }}}" + if: steps.should_file.outputs.file == 'true' + uses: actions/download-artifact@v4 + with: + name: scenario-report-${{ matrix.name }} + path: /tmp/scenario-report + continue-on-error: true + + - name: "EXEC: {Read report content}" + if: steps.should_file.outputs.file == 'true' + id: report + run: | + CONTENT="" + for f in /tmp/scenario-report/*.md; do + if [ -f "$f" ]; then + CONTENT+=$(cat "$f") + CONTENT+=$'\n\n' + fi + done + if [[ -z "$CONTENT" ]]; then + CONTENT="No scenario report available for **${{ matrix.name }}** strategy." + fi + EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64) + echo "content<<$EOF" >> $GITHUB_OUTPUT + echo "$CONTENT" >> $GITHUB_OUTPUT + echo "$EOF" >> $GITHUB_OUTPUT + + - name: "EXEC: {Create or update issue}" + if: steps.should_file.outputs.file == 'true' + uses: ipdxco/create-or-update-issue@v1 + with: + GITHUB_TOKEN: ${{ github.token }} + title: ${{ matrix.issue_title }} + body: | + The **${{ matrix.name }}** scenarios run **${{ needs.foc-devnet-test.result == 'success' && 'passed ✅' || 'failed ❌' }}**. + See [the workflow run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details. + + ${{ steps.report.outputs.content }} + label: ${{ matrix.issue_label }} diff --git a/.gitignore b/.gitignore index 8a22f1cf..7717af43 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,6 @@ target/ contracts/MockUSDFC/lib/ contracts/MockUSDFC/broadcast/ artifacts/ -.vscode/ \ No newline at end of file +.vscode/ +*__pycache__/ +.githooks \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index f195c515..1665eacf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -862,6 +862,7 @@ dependencies = [ "rand 0.8.5", "regex", "reqwest 0.11.27", + "semver", "serde", "serde_json", "sha2 0.10.9", @@ -2351,6 +2352,12 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + [[package]] name = "serde" version = "1.0.228" diff --git a/Cargo.toml b/Cargo.toml index d3b62ba1..ef8976af 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ base32 = "0.4" crc32fast = "1.3" bip32 = "0.5" rand = "0.8" +semver = "1.0" bls-signatures = "0.15" names = { version = "0.14", default-features = false } shellexpand = "3" diff --git a/README.md b/README.md index 06126cb8..5afce1ff 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,18 @@ For GitHub Actions, add this step before running foc-devnet: - run: echo '127.0.0.1 host.docker.internal' | sudo tee -a /etc/hosts ``` +(Optional) Additionally, you may want to get linters for python scenarios, and install pre-commit hooks for development: +```sh +sudo apt install pipx +pipx ensurepath + +# Install linting tools +pipx install black + +# Install pre-commit hooks +./scripts/install_precommit_hooks.sh +``` + ### Step 1: Initialize ```bash diff --git a/README_ADVANCED.md b/README_ADVANCED.md index 58375405..733eff42 100644 --- a/README_ADVANCED.md +++ b/README_ADVANCED.md @@ -1296,3 +1296,52 @@ docker run --rm --network host \ --broadcast ``` +## Scenario Tests + +Scenario tests are Python scripts that validate devnet state after startup. They share a single running devnet and execute serially in a defined order. The runner lives in `scenarios/` and uses **only Python stdlib** — no `pip install` required. + +### Running scenarios + +```bash +# Run all scenarios +python3 scenarios/run.py + +# Run a single scenario directly +python3 scenarios/test_basic_balances.py + +# Point at a specific devnet run +DEVNET_INFO=~/.foc-devnet/state//devnet-info.json python3 scenarios/run.py +``` + +Reports are written to `~/.foc-devnet/state/latest/scenario_report.md`. + +### Writing a new scenario + +1. Create `scenarios/test_.py`: + +```python +#!/usr/bin/env python3 +from scenarios.run import * + +def run(): + d = devnet_info()["info"] + rpc = d["lotus"]["host_rpc_url"] + + # Use helpers: sh, assert_eq, assert_gt, assert_not_empty, assert_ok + balance = sh(f"cast balance 0x... --rpc-url {rpc}") + assert_gt(balance, 0, "account has funds") + +if __name__ == "__main__": + run() +``` + +2. Add `"test_"` to the `ORDER` list in `scenarios/run.py`. + +### Constraints + +- **No third-party packages.** Only Python stdlib (`os`, `sys`, `json`, `subprocess`, etc.) plus external CLI tools already present on the host (`cast`, `docker`). This keeps CI setup trivial — no virtual env, no `pip install`. + +### CI integration + +Scenarios run automatically in CI after the devnet starts. On nightly runs (or manual dispatch with `reporting` enabled), failures automatically create a GitHub issue with a full report. + diff --git a/scenarios/__init__.py b/scenarios/__init__.py new file mode 100644 index 00000000..95f9a199 --- /dev/null +++ b/scenarios/__init__.py @@ -0,0 +1 @@ +# scenarios_py package diff --git a/scenarios/run.py b/scenarios/run.py new file mode 100755 index 00000000..6d6e1535 --- /dev/null +++ b/scenarios/run.py @@ -0,0 +1,356 @@ +#!/usr/bin/env python3 +# core.py — assertions, devnet-info helpers, test runner, and reporting. +# Run all tests: python3 core.py +# Run one test: python3 test_containers.py +import os +import sys +import json +import subprocess +import threading +import queue +import time + +# Ensure the project root (parent of scenarios_py/) is on sys.path so that +# test files can do `from scenarios_py.run import *` regardless of cwd. +_project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +if _project_root not in sys.path: + sys.path.insert(0, _project_root) + +# Allow test files to `from core import *` even when core runs as __main__. +sys.modules.setdefault("core", sys.modules[__name__]) + +DEVNET_INFO = os.environ.get( + "DEVNET_INFO", os.path.expanduser("~/.foc-devnet/state/latest/devnet-info.json") +) +REPORT_MD = os.environ.get( + "REPORT_FILE", os.path.expanduser("~/.foc-devnet/state/latest/scenario_report.md") +) + +# ── Scenario execution order (mirrors scenarios/order.sh) ──── +# Each entry is (test_name, timeout_seconds) +ORDER = [ + ("test_containers", 5), + ("test_basic_balances", 10), + ("test_storage_e2e", 100), + ("test_caching_subsystem", 200), +] + +_pass = 0 +_fail = 0 +_log_lines: list = [] + +# ── Logging ────────────────────────────────────────────────── + + +def info(msg): + _log_lines.append(f"[INFO] {msg}") + print(f"[INFO] {msg}") + + +def ok(msg): + global _pass + _log_lines.append(f"[ OK ] {msg}") + print(f"[ OK ] {msg}") + _pass += 1 + + +def fail(msg): + "fail logs a failure and exits the scenario entirely with exit code = 1" + global _fail + _log_lines.append(f"[FAIL] {msg}") + print(f"[FAIL] {msg}", file=sys.stderr) + _fail += 1 + sys.exit(1) + + +# ── Assertions ─────────────────────────────────────────────── + + +def assert_eq(a, b, msg): + if a == b: + ok(msg) + else: + fail(f"{msg} (got '{a}', want '{b}')") + + +def assert_gt(a, b, msg): + try: + if int(a) > int(b): + ok(msg) + else: + fail(f"{msg} (got '{a}', want > '{b}')") + except: + fail(f"{msg} (not an int: '{a}')") + + +def assert_not_empty(v, msg): + if v: + ok(msg) + else: + fail(f"{msg} (empty)") + + +def assert_ok(cmd, msg): + if ( + subprocess.call( + cmd, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + ) + == 0 + ): + ok(msg) + else: + fail(msg) + + +# ── Shell helpers ───────────────────────────────────────────── + + +def sh(cmd): + """Run cmd in a shell and return stdout stripped, or '' on error.""" + return subprocess.run( + cmd, shell=True, text=True, capture_output=True + ).stdout.strip() + + +def run_cmd( + cmd: list, *, cwd=None, env=None, label: str = "", print_output: bool = False +) -> bool: + """Run a subprocess command and report pass/fail; returns True on success.""" + result = subprocess.run(cmd, cwd=cwd, env=env, text=True, capture_output=True) + details = (result.stderr or result.stdout or "").strip() + if result.returncode == 0: + if print_output: + info(details) + ok(label) + return True + fail(f"{label} (exit={result.returncode}) {details}") + return False + + +def devnet_info(): + """Load devnet-info.json as a dict.""" + with open(DEVNET_INFO) as f: + return json.load(f) + + +def ensure_foundry(): + """Install Foundry if cast is not on PATH.""" + if ( + subprocess.call( + "command -v cast", + shell=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + != 0 + ): + info("Installing Foundry...") + os.system("curl -sSL https://foundry.paradigm.xyz | bash") + os.environ["PATH"] = os.path.expanduser("~/.foundry/bin:") + os.environ["PATH"] + os.system(os.path.expanduser("~/.foundry/bin/foundryup")) + assert_ok("command -v cast", "cast is installed") + + +# ── Version info ────────────────────────────────────────────── + + +def get_version_info(): + """Capture output of `foc-devnet version` for inclusion in reports.""" + for binary in ["./foc-devnet", "foc-devnet"]: + try: + result = subprocess.run( + [binary, "version"], + capture_output=True, + text=True, + timeout=10, + ) + if result.returncode == 0: + return result.stdout.strip() + except (FileNotFoundError, subprocess.TimeoutExpired): + continue + return "foc-devnet version: not available" + + +# ── Runner ──────────────────────────────────────────────────── + + +def _read_stream(stream, q, label): + """Read lines from a subprocess stream and enqueue them with a label.""" + try: + for line in stream: + q.put((label, line.rstrip("\n"))) + except ValueError: + pass # Pipe closed + finally: + q.put((label, None)) # Sentinel to signal stream EOF + + +def run_tests(): + """Run scenarios in ORDER. Returns list of (name, passed, elapsed_time, log_lines, timed_out).""" + here = os.path.dirname(os.path.abspath(__file__)) + results = [] + for name, timeout_sec in ORDER: + path = os.path.join(here, f"{name}.py") + info(f"=== {name} (timeout: {timeout_sec}s) ===") + test_start = time.time() + # Run the test in a subprocess, capturing stdout and stderr separately + process = subprocess.Popen( + [sys.executable, path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=1, # Line buffered + ) + q = queue.Queue() + stdout_lines = [] + stderr_lines = [] + timed_out = False + # Reader threads for non-blocking stdout/stderr capture + t_out = threading.Thread( + target=_read_stream, args=(process.stdout, q, "stdout"), daemon=True + ) + t_err = threading.Thread( + target=_read_stream, args=(process.stderr, q, "stderr"), daemon=True + ) + t_out.start() + t_err.start() + try: + streams_done = 0 + while streams_done < 2: + remaining = timeout_sec - (time.time() - test_start) + if remaining <= 0: + timed_out = True + process.kill() + break + try: + label, line = q.get(timeout=min(remaining, 1.0)) + if line is None: + streams_done += 1 + continue + if label == "stdout": + print(line) + stdout_lines.append(line) + else: + print(f" [stderr] {line}", file=sys.stderr) + stderr_lines.append(line) + except queue.Empty: + if process.poll() is not None and q.empty(): + break + continue + # Wait for reader threads to finish and drain remaining queue + t_out.join(timeout=3) + t_err.join(timeout=3) + while not q.empty(): + try: + label, line = q.get_nowait() + if line is None: + continue + if label == "stdout": + print(line) + stdout_lines.append(line) + else: + print(f" [stderr] {line}", file=sys.stderr) + stderr_lines.append(line) + except queue.Empty: + break + if timed_out: + timeout_msg = ( + f"[TIMEOUT] Test '{name}' exceeded {timeout_sec}s limit " + f"— {len(stdout_lines)} stdout and {len(stderr_lines)} stderr lines captured" + ) + print(timeout_msg, file=sys.stderr) + stdout_lines.append(timeout_msg) + try: + return_code = process.wait(timeout=5) + except subprocess.TimeoutExpired: + process.kill() + process.wait() + return_code = -1 + except Exception as e: + error_msg = f"[ERROR] Exception during test execution: {e}" + print(error_msg) + stdout_lines.append(error_msg) + process.kill() + process.wait() + return_code = -1 + elapsed_time = int(time.time() - test_start) + # Combine stdout and stderr into log_lines for the report + log_lines = stdout_lines.copy() + if stderr_lines: + log_lines.append("") + log_lines.append("--- stderr ---") + log_lines.extend(stderr_lines) + # Determine pass/fail based on return code and timeout + passed = return_code == 0 and not timed_out + results.append((name, passed, elapsed_time, log_lines, timed_out)) + return results + + +# ── Reporting ───────────────────────────────────────────────── + + +def write_report(results, elapsed): + """Write a markdown report to REPORT_MD. Returns path written.""" + total_scenarios = len(results) + scenario_pass = sum(1 for _, passed, _, _, _ in results if passed) + scenario_fail = total_scenarios - scenario_pass + with open(REPORT_MD, "w") as fh: + fh.write("# Scenario Test Report\n\n") + # If running in GitHub Actions, include a link to the run + github_run_id = os.environ.get("GITHUB_RUN_ID") + github_repo = os.environ.get("GITHUB_REPOSITORY") + if github_run_id and github_repo: + github_server = os.environ.get("GITHUB_SERVER_URL", "https://github.com") + ci_url = f"{github_server}/{github_repo}/actions/runs/{github_run_id}" + fh.write(f"**CI Run**: [{ci_url}]({ci_url})\n\n") + # Version info from foc-devnet version + version_info = get_version_info() + fh.write("## Version Info\n\n") + fh.write(f"```\n{version_info}\n```\n\n") + fh.write("## Summary\n\n") + fh.write("| Metric | Value |\n|--------|-------|\n") + fh.write( + f"| Total Scenarios | {total_scenarios} |\n| Scenarios Passed | {scenario_pass} |\n| Scenarios Failed | {scenario_fail} |\n" + ) + fh.write(f"| Duration | {elapsed}s |\n\n") + fh.write("## Test Results\n\n") + for name, passed, test_time, logs, timed_out in results: + icon = "✅" if passed else "❌" + if timed_out: + status = f"TIMEOUT ({test_time}s)" + else: + status = f"{'PASS' if passed else 'FAIL'} ({test_time}s)" + fh.write( + f"
\n{icon} {name} - {status}\n\n```\n" + ) + fh.write("\n".join(logs)) + fh.write("\n```\n
\n\n") + return REPORT_MD + + +if __name__ == "__main__": + start = time.time() + results = run_tests() + elapsed = int(time.time() - start) + + total_scenarios = len(results) + scenario_pass = sum(1 for _, passed, _, _, _ in results if passed) + scenario_fail = total_scenarios - scenario_pass + print(f"\n{'='*50}") + print( + f"Scenarios: {total_scenarios} Passed: {scenario_pass} Failed: {scenario_fail} ({elapsed}s)" + ) + # Show individual test timings + for name, passed, test_time, _, timed_out in results: + status_icon = "✅" if passed else "❌" + status_text = "TIMEOUT" if timed_out else ("PASS" if passed else "FAIL") + print(f" {status_icon} {name}: {status_text} ({test_time}s)") + + report = write_report(results, elapsed) + print(f"Report: {report}") + # Print CI run URL in stdout if available + if os.environ.get("GITHUB_RUN_ID") and os.environ.get("GITHUB_REPOSITORY"): + github_server = os.environ.get("GITHUB_SERVER_URL", "https://github.com") + ci_url = f"{github_server}/{os.environ.get('GITHUB_REPOSITORY')}/actions/runs/{os.environ.get('GITHUB_RUN_ID')}" + print(f"CI Run: {ci_url}") + sys.exit(0 if scenario_fail == 0 else 1) diff --git a/scenarios/test_basic_balances.py b/scenarios/test_basic_balances.py new file mode 100644 index 00000000..d9281521 --- /dev/null +++ b/scenarios/test_basic_balances.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# Verifies every devnet user has a positive FIL and USDFC balance. +import os +import sys + +# Ensure the project root (parent of scenarios/) is on sys.path +_project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +if _project_root not in sys.path: + sys.path.insert(0, _project_root) + +from scenarios.run import * + + +def run(): + ensure_foundry() + d = devnet_info()["info"] + lotus_rpc = d["lotus"]["host_rpc_url"] + usdfc_addr = d["contracts"]["mockusdfc_addr"] + users = d["users"] + assert_gt(len(users), 0, "at least one user exists") + + for user in users: + name, user_addr = user["name"], user["evm_addr"] + fil_wei = sh(f"cast balance {user_addr} --rpc-url {lotus_rpc}") + assert_gt(fil_wei, 0, f"{name} FIL balance > 0") + usdfc_raw = sh( + f"cast call {usdfc_addr} 'balanceOf(address)(uint256)' {user_addr} --rpc-url {lotus_rpc}" + ) + usdfc_wei = "".join(c for c in usdfc_raw if c.isdigit()) + assert_gt(usdfc_wei, 0, f"{name} USDFC balance > 0") + + +if __name__ == "__main__": + run() diff --git a/scenarios/test_caching_subsystem.py b/scenarios/test_caching_subsystem.py new file mode 100644 index 00000000..96aaaf49 --- /dev/null +++ b/scenarios/test_caching_subsystem.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +""" +Caching subsystem scenario. + +Checks whether uploading a small piece does not trigger caching and +whether a larger piece does trigger caching (> 32MB). Ensures that +cassandra rows are populated. + +Standalone run: + python3 scenarios/test_caching_subsystem.py +""" + +import os +import sys +import time +import random +import tempfile +from pathlib import Path + +_project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +if _project_root not in sys.path: + sys.path.insert(0, _project_root) + +from scenarios.run import * + +SYNAPSE_SDK_REPO = "https://github.com/FilOzone/synapse-sdk/" +SMALL_FILE_SIZE = 20 * 1024 * 1024 # 20MB — below 32MB threshold +LARGE_FILE_SIZE = 60 * 1024 * 1024 # 60MB — above 32MB threshold +RAND_SEED_SMALL = 42 +RAND_SEED_LARGE = 84 +CACHE_WAIT_SECS = 10 +GOCQL_ERROR = "gocql: no hosts available in the pool" +_CHUNK = 1024 * 1024 + + +def _write_random_file(path: Path, size: int, seed: int) -> None: + """Write a deterministic pseudo-random file of exactly `size` bytes.""" + rng = random.Random(seed) + remaining = size + with path.open("wb") as fh: + while remaining > 0: + chunk = min(_CHUNK, remaining) + fh.write(rng.randbytes(chunk)) + remaining -= chunk + + +def _install_cqlsh(venv_dir): + """Install cqlsh into a temporary venv, return path to cqlsh binary.""" + cqlsh = os.path.join(venv_dir, "bin", "cqlsh") + info("--- Installing cqlsh into temp venv ---") + sh(f"python3 -m venv {venv_dir}") + sh(f"{venv_dir}/bin/pip install cqlsh") + assert_ok(f"test -x {cqlsh}", "cqlsh installed") + return cqlsh + + +def _ycql(cqlsh, ycql_port, query): + """Run a YCQL query on the host via cqlsh, return raw output.""" + return sh(f'{cqlsh} localhost {ycql_port} -u cassandra -p cassandra -e "{query}"') + + +def _upload_file(sdk_dir, filepath, label): + """Upload a single file via example-storage-e2e.js.""" + env = {**os.environ, "NETWORK": "devnet"} + run_cmd( + ["node", "utils/example-storage-e2e.js", str(filepath)], + cwd=str(sdk_dir), + env=env, + label=label, + print_output=True, + ) + + +def _verify_cache_layer(cqlsh, ycql_port, expected_is_empty=True): + """Check pdp_cache_layer is empty due to gocql connectivity issue.""" + info("--- Querying pdp_cache_layer ---") + out = _ycql(cqlsh, ycql_port, "SELECT * FROM curio.pdp_cache_layer") + info(f"CQL SELECT access: \n {out}") + actual_is_empty = "(0 rows)" in out + assert_eq(actual_is_empty, expected_is_empty, "ysql row count") + + +def run(): + assert_ok("command -v git", "git is installed") + assert_ok("command -v node", "node is installed") + assert_ok("command -v pnpm", "pnpm is installed") + + d = devnet_info()["info"] + sp = d["pdp_sps"][0] + yb = sp["yugabyte"] + ycql_port = yb["ycql_port"] + + with tempfile.TemporaryDirectory(prefix="cqlsh-venv-") as venv_dir: + cqlsh = _install_cqlsh(venv_dir) + + with tempfile.TemporaryDirectory(prefix="synapse-sdk-cache-") as tmp: + sdk_dir = Path(tmp) / "synapse-sdk" + info("--- Cloning synapse-sdk ---") + if not run_cmd( + ["git", "clone", SYNAPSE_SDK_REPO, str(sdk_dir)], + label="clone synapse-sdk", + ): + return + if not run_cmd( + ["git", "checkout", "master"], + cwd=str(sdk_dir), + label="checkout master HEAD", + ): + return + if not run_cmd(["pnpm", "install"], cwd=str(sdk_dir), label="pnpm install"): + return + if not run_cmd(["pnpm", "build"], cwd=str(sdk_dir), label="pnpm build"): + return + + small_file = sdk_dir / "small_20mb" + large_file = sdk_dir / "large_60mb" + _write_random_file(small_file, SMALL_FILE_SIZE, RAND_SEED_SMALL) + _write_random_file(large_file, LARGE_FILE_SIZE, RAND_SEED_LARGE) + + info("--- Uploading 20MB piece (below 32MB threshold) ---") + _upload_file(sdk_dir, small_file.name, "upload 20MB piece") + info(f"--- Waiting {CACHE_WAIT_SECS}s for caching tasks ---") + time.sleep(CACHE_WAIT_SECS) + _verify_cache_layer(cqlsh, ycql_port, expected_is_empty=True) + + info("--- Uploading 60MB piece (above 32MB threshold) ---") + _upload_file(sdk_dir, large_file.name, "upload 60MB piece") + time.sleep(CACHE_WAIT_SECS) + _verify_cache_layer(cqlsh, ycql_port, expected_is_empty=False) + + +if __name__ == "__main__": + run() diff --git a/scenarios/test_containers.py b/scenarios/test_containers.py new file mode 100644 index 00000000..b3227686 --- /dev/null +++ b/scenarios/test_containers.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# Verifies all devnet containers are running and no unexpected foc-* containers exist. +import os +import sys + +# Ensure the project root (parent of scenarios/) is on sys.path +_project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +if _project_root not in sys.path: + sys.path.insert(0, _project_root) + +from scenarios.run import * + + +def run(): + d = devnet_info()["info"] + run_id = d.get("run_id", "") + + expected = [d["lotus"]["container_name"], d["lotus_miner"]["container_name"]] + sps = d.get("pdp_sps", []) + for sp in sps: + expected.append(sp["container_name"]) + + for name in expected: + status = sh( + f"docker inspect -f '{{{{.State.Status}}}}' {name} 2>/dev/null || echo missing" + ) + assert_eq(status, "running", f"container {name} is running") + + +if __name__ == "__main__": + run() diff --git a/scenarios/test_storage_e2e.py b/scenarios/test_storage_e2e.py new file mode 100644 index 00000000..3b068caa --- /dev/null +++ b/scenarios/test_storage_e2e.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +import os +import random +import sys +import tempfile +from pathlib import Path + +# Ensure the project root (parent of scenarios/) is on sys.path +_project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +if _project_root not in sys.path: + sys.path.insert(0, _project_root) + +from scenarios.run import * + +SYNAPSE_SDK_REPO = "https://github.com/FilOzone/synapse-sdk/" +RAND_FILE_NAME = "random_file" +RAND_FILE_SIZE = 20 * 1024 * 1024 +RAND_FILE_SEED = 42 +_RANDOM_CHUNK_SIZE = 1024 * 1024 + + +def _write_random_file(path: Path, size: int) -> None: + """Write a deterministic pseudo-random file of exactly `size` bytes.""" + rng = random.Random(RAND_FILE_SEED) + remaining = size + with path.open("wb") as fh: + while remaining > 0: + chunk = min(_RANDOM_CHUNK_SIZE, remaining) + fh.write(rng.randbytes(chunk)) + remaining -= chunk + + +def run(): + assert_ok("command -v git", "git is installed") + assert_ok("command -v node", "node is installed") + assert_ok("command -v pnpm", "pnpm is installed") + + with tempfile.TemporaryDirectory(prefix="synapse-sdk-") as temp_dir: + sdk_dir = Path(temp_dir) / "synapse-sdk" + + info(f"--- Cloning synapse-sdk to {sdk_dir} ---") + if not run_cmd( + ["git", "clone", SYNAPSE_SDK_REPO, str(sdk_dir)], label="synapse-sdk cloned" + ): + return + + info("--- Checking out synapse-sdk @ master (latest) ---") + if not run_cmd( + ["git", "checkout", "master"], + cwd=str(sdk_dir), + label="synapse-sdk checked out at master head", + ): + return + + info("--- Installing synapse-sdk dependencies with pnpm ---") + if not run_cmd( + ["pnpm", "install"], cwd=str(sdk_dir), label="pnpm install completed" + ): + return + + info("--- Building synapse-sdk TypeScript packages ---") + if not run_cmd( + ["pnpm", "build"], cwd=str(sdk_dir), label="pnpm build completed" + ): + return + + random_file = sdk_dir / RAND_FILE_NAME + info(f"--- Creating random file ({RAND_FILE_SIZE} bytes) ---") + _write_random_file(random_file, RAND_FILE_SIZE) + actual_size = random_file.stat().st_size + assert_eq( + actual_size, + RAND_FILE_SIZE, + f"{RAND_FILE_NAME} created with exact size {RAND_FILE_SIZE} bytes", + ) + + info("--- Running Synapse SDK storage e2e script against devnet ---") + cmd_env = os.environ.copy() + cmd_env["NETWORK"] = "devnet" + run_cmd( + ["node", "utils/example-storage-e2e.js", RAND_FILE_NAME], + cwd=str(sdk_dir), + env=cmd_env, + label="NETWORK=devnet node utils/example-storage-e2e.js random_file", + print_output=True, + ) + + +if __name__ == "__main__": + run() diff --git a/scripts/install_precommit_hooks.sh b/scripts/install_precommit_hooks.sh new file mode 100755 index 00000000..0a6080ff --- /dev/null +++ b/scripts/install_precommit_hooks.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# ───────────────────────────────────────────────────────────── +# install_precommit_hooks.sh — Install pre-commit hooks +# +# This script installs a pre-commit hook that runs lint.sh +# in check mode (FIX=0) before each commit. +# +# Usage: +# ./scripts/install_precommit_hooks.sh +# ───────────────────────────────────────────────────────────── +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" + +cd "$REPO_ROOT" + +# Get the actual git hooks directory (works for both regular repos and worktrees) +GIT_HOOKS_DIR="$(git rev-parse --git-path hooks)" +PRE_COMMIT_HOOK="$GIT_HOOKS_DIR/pre-commit" + +# Ensure hooks directory exists +mkdir -p "$GIT_HOOKS_DIR" + +# Create the pre-commit hook +cat > "$PRE_COMMIT_HOOK" << 'EOF' +#!/usr/bin/env bash +set -euo pipefail + +FIX="${FIX:-1}" +REPO_ROOT="$(git rev-parse --show-toplevel)" + +$REPO_ROOT/scripts/lint.sh +EOF + +# Make the hook executable +chmod +x "$PRE_COMMIT_HOOK" diff --git a/scripts/lint.sh b/scripts/lint.sh new file mode 100755 index 00000000..b4f77d99 --- /dev/null +++ b/scripts/lint.sh @@ -0,0 +1,112 @@ +#!/usr/bin/env bash +# ───────────────────────────────────────────────────────────── +# lint.sh — Unified linting script for foc-devnet +# +# Runs linters and formatters for Rust and Python code. +# Designed to work both locally and in CI. +# +# Modes: +# FIX=1 (default) — Auto-fix issues where possible +# FIX=0 — Check only, fail on issues +# +# Usage: +# ./scripts/lint.sh # Fix mode +# FIX=0 ./scripts/lint.sh # Check mode (CI) +# +# Requirements: +# Rust: cargo, rustfmt, clippy +# Python: black, ruff (or pip install black ruff) +# ───────────────────────────────────────────────────────────── +set -euo pipefail + +FIX="${FIX:-1}" + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +BLUE='\033[0;34m' +NC='\033[0m' + +FAIL=0 + +pass() { printf "${GREEN}✓${NC} %s\n" "$1"; } +fail() { printf "${RED}✗${NC} %s\n" "$1"; FAIL=1; } +skip() { printf "${YELLOW}⊘${NC} %s (skipped — tool not found)\n" "$1"; } +fixed() { printf "${BLUE}⟳${NC} %s (auto-fixed)\n" "$1"; } +info() { printf "${BLUE}ℹ${NC} %s\n" "$1"; } + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" + +cd "$REPO_ROOT" + +info "Checking Rust code..." + +if command -v cargo &>/dev/null; then + # ── cargo fmt ── + if [[ "$FIX" == "1" ]]; then + if cargo fmt --all; then + fixed "cargo fmt" + else + fail "cargo fmt failed" + fi + else + if cargo fmt --all -- --check &>/dev/null; then + pass "cargo fmt" + else + fail "cargo fmt — run './scripts/lint.sh' or 'cargo fmt --all' to fix" + fi + fi + + # ── cargo clippy ── + if cargo clippy --all-targets --all-features -- -D warnings &>/dev/null; then + pass "cargo clippy" + else + fail "cargo clippy — fix warnings before committing" + fi +else + skip "cargo (Rust checks)" +fi + +echo "" + +info "Checking Python code in scenarios/..." + +PYTHON_FILES=$(find scenarios -name '*.py' 2>/dev/null || true) + +if [[ -z "$PYTHON_FILES" ]]; then + skip "Python files (none found in scenarios/)" +else + # ── black (formatter) ── + if command -v black &>/dev/null; then + if [[ "$FIX" == "1" ]]; then + if black scenarios/ &>/dev/null; then + fixed "black (Python formatter)" + else + fail "black failed" + fi + else + if black --check scenarios/ &>/dev/null; then + pass "black (Python formatter)" + else + fail "black — run './scripts/lint.sh' or 'black scenarios/' to fix" + fi + fi + else + skip "black (install with: pip install black)" + fi +fi + +echo "" + +echo "════════════════════════════════════════════════════════" +if [[ $FAIL -ne 0 ]]; then + printf "${RED}✗ Linting failed.${NC}\n" + if [[ "$FIX" == "0" ]]; then + echo " Run './scripts/lint.sh' (FIX=1 mode) to auto-fix issues." + fi + exit 1 +else + printf "${GREEN}✓ All linting checks passed.${NC}\n" +fi +echo "════════════════════════════════════════════════════════" diff --git a/src/cli.rs b/src/cli.rs index 993301dc..c78cd32d 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -23,13 +23,31 @@ pub enum Commands { Stop, /// Initialize foc-devnet by building and caching Docker images Init { - /// Curio source location (e.g., 'gittag:tag', 'gittag:url:tag', 'gitcommit:commit', 'gitcommit:url:commit', 'gitbranch:branch', 'gitbranch:url:branch', 'local:/path/to/curio') + /// Curio source location. + /// Magic values: 'latestTag' (newest stable tag, auto-detects branch), + /// 'latestTag:' (newest stable tag on given branch), + /// 'latestCommit' (tip of default branch), 'latestCommit:'. + /// Explicit: 'gittag:', 'gittag::', 'gitcommit:', + /// 'gitcommit::', 'gitbranch:', 'gitbranch::', + /// 'local:/path/to/curio'. #[arg(long)] curio: Option, - /// Lotus source location (e.g., 'gittag:v1.0.0', 'gittag:url:tag', 'gitcommit:abc123', 'gitcommit:url:commit', 'gitbranch:main', 'gitbranch:url:main', 'local:/path/to/lotus') + /// Lotus source location. + /// Magic values: 'latestTag' (newest stable tag, auto-detects branch), + /// 'latestTag:' (newest stable tag on given branch), + /// 'latestCommit' (tip of default branch), 'latestCommit:'. + /// Explicit: 'gittag:', 'gittag::', 'gitcommit:', + /// 'gitcommit::', 'gitbranch:', 'gitbranch::', + /// 'local:/path/to/lotus'. #[arg(long)] lotus: Option, - /// Filecoin Services source location (e.g., 'gittag:v1.0.0', 'gittag:url:tag', 'gitcommit:abc123', 'gitcommit:url:commit', 'gitbranch:main', 'gitbranch:url:main', 'local:/path/to/filecoin-services') + /// Filecoin Services source location. + /// Magic values: 'latestTag' (newest stable tag, auto-detects branch), + /// 'latestTag:' (newest stable tag on given branch), + /// 'latestCommit' (tip of default branch), 'latestCommit:'. + /// Explicit: 'gittag:', 'gittag::', 'gitcommit:', + /// 'gitcommit::', 'gitbranch:', 'gitbranch::', + /// 'local:/path/to/filecoin-services'. #[arg(long)] filecoin_services: Option, /// Yugabyte download URL @@ -82,12 +100,12 @@ pub enum BuildCommands { pub enum ConfigCommands { /// Configure Lotus source location Lotus { - /// Lotus source location (e.g., 'gittag:v1.0.0', 'gitcommit:abc123', 'local:/path/to/lotus') + /// Lotus source location (e.g., 'latestTag', 'latestTag:master', 'latestCommit', 'latestCommit:main', 'gittag:v1.0.0', 'gitcommit:abc123', 'local:/path/to/lotus') source: String, }, /// Configure Curio source location Curio { - /// Curio source location (e.g., 'gittag:v1.0.0', 'gitcommit:abc123', 'local:/path/to/curio') + /// Curio source location (e.g., 'latestTag', 'latestTag:main', 'latestCommit', 'latestCommit:main', 'gittag:v1.0.0', 'gitcommit:abc123', 'local:/path/to/curio') source: String, }, } diff --git a/src/commands/build/repository.rs b/src/commands/build/repository.rs index 7520f032..b2d0b837 100644 --- a/src/commands/build/repository.rs +++ b/src/commands/build/repository.rs @@ -50,6 +50,15 @@ pub fn prepare_repository( prepare_git_repo(&repo_path, url)?; checkout_branch(&repo_path, branch)?; } + // LatestCommit / LatestTag are resolved to GitCommit / GitTag at init time + // and never appear in a saved config.toml, so this is a programming error. + Location::LatestCommit { .. } | Location::LatestTag { .. } => { + return Err( + "Dynamic location (latestCommit/latestTag) was not resolved before build. \ + Run 'foc-devnet init' first." + .into(), + ); + } } info!("Repository prepared successfully"); diff --git a/src/commands/init/config.rs b/src/commands/init/config.rs index 82966f3f..79c89334 100644 --- a/src/commands/init/config.rs +++ b/src/commands/init/config.rs @@ -1,11 +1,15 @@ //! Configuration generation utilities for foc-devnet initialization. //! //! This module handles the generation of default configuration files -//! and application of location overrides. +//! and application of location overrides. Dynamic location variants +//! (`LatestCommit`, `LatestTag`) are resolved to concrete values at init +//! time via [`super::latest_resolver`], ensuring the stored config always +//! records the exact commit or tag that was used. use std::fs; use tracing::{info, warn}; +use super::latest_resolver::resolve_location; use crate::config::{Config, Location}; use crate::paths::foc_devnet_config; @@ -67,6 +71,12 @@ pub fn generate_default_config( "https://github.com/FilOzone/filecoin-services.git", )?; + // Resolve any dynamic variants (LatestCommit / LatestTag) by querying the remote. + // The resolved concrete SHA or tag is stored in config.toml for reproducibility. + config.lotus = resolve_location(config.lotus)?; + config.curio = resolve_location(config.curio)?; + config.filecoin_services = resolve_location(config.filecoin_services)?; + // Override yugabyte URL if provided if let Some(url) = yugabyte_url { config.yugabyte_download_url = url; @@ -103,6 +113,8 @@ pub fn apply_location_override( Location::GitTag { ref url, .. } => url.clone(), Location::GitCommit { ref url, .. } => url.clone(), Location::GitBranch { ref url, .. } => url.clone(), + Location::LatestCommit { ref url, .. } => url.clone(), + Location::LatestTag { ref url, .. } => url.clone(), Location::LocalSource { .. } => default_url.to_string(), }; *location = Location::parse_with_default(&loc_str, &url) diff --git a/src/commands/init/latest_resolver.rs b/src/commands/init/latest_resolver.rs new file mode 100644 index 00000000..4e903462 --- /dev/null +++ b/src/commands/init/latest_resolver.rs @@ -0,0 +1,364 @@ +//! Resolver for dynamic location variants (`LatestCommit`, `LatestTag`). +//! +//! Queries remote Git repositories to resolve dynamic location variants to +//! concrete `GitCommit` / `GitTag` values at init time. The resolved SHA or +//! tag is then written to `config.toml` so that builds are always reproducible +//! and the exact version is recorded in the run state. +//! +//! `LatestCommit` uses `git ls-remote` to resolve to the tip of `main` +//! (or `master` if `main` does not exist — no local clone needed). +//! +//! `LatestTag` performs a blobless bare fetch of the default branch (`main` +//! or `master`) and all tags into a temporary directory, then runs `git tag` +//! to enumerate all fetched tags. Pre-release tags (those with semver +//! pre-release identifiers such as `-rc1`, `-alpha`, `-beta`) are filtered +//! out, and the highest stable version is returned. The `--merged` filter is +//! deliberately avoided because projects like Lotus cut releases on separate +//! branches that are never merged back into `master`/`main`. +//! +//! # Example +//! +//! ```text +//! foc-devnet init --curio latestCommit --lotus latestTag +//! // Queries remote → resolves to GitCommit { commit: "abc123..." } +//! // GitTag { tag: "v1.34.5" } +//! // Stores concrete values in config.toml +//! ``` + +use crate::config::Location; +use semver::Version; +use std::process::Command; +use tracing::info; + +/// Temporary bare repo used for tag-reachability queries. +struct TempBareRepo(std::path::PathBuf); + +impl TempBareRepo { + /// Initialise an empty bare repository in a system temp directory. + fn create() -> Result> { + let dir = std::env::temp_dir().join(format!( + "foc-devnet-tag-probe-{}", + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let status = Command::new("git") + .args(["init", "--bare", dir.to_str().unwrap()]) + .env("GIT_TERMINAL_PROMPT", "0") + .status()?; + if !status.success() { + return Err("git init --bare failed".into()); + } + Ok(Self(dir)) + } + + /// Return the path of this bare repo. + fn path(&self) -> &std::path::Path { + &self.0 + } +} + +impl Drop for TempBareRepo { + fn drop(&mut self) { + let _ = std::fs::remove_dir_all(&self.0); + } +} + +/// Resolve a `Location` to a concrete variant by querying the remote if needed. +/// +/// `LatestCommit` and `LatestTag` are resolved against the remote repository. +/// All other variants are returned unchanged. +pub fn resolve_location(location: Location) -> Result> { + match location { + Location::LatestCommit { url, branch } => { + let commit = fetch_latest_commit(&url, branch.as_deref())?; + info!("Resolved latestCommit for {} → {}", url, commit); + Ok(Location::GitCommit { url, commit }) + } + Location::LatestTag { url, branch } => { + let tag = fetch_latest_tag(&url, branch.as_deref())?; + info!("Resolved latestTag for {} → {}", url, tag); + Ok(Location::GitTag { url, tag }) + } + other => Ok(other), + } +} + +/// Fetch the SHA of the tip of the given branch (or the auto-detected default +/// branch) on a remote. +/// +/// If `branch` is `None`, the default branch (`main` or `master`) is +/// auto-detected from the remote. Fails if the resolved branch is not found. +fn fetch_latest_commit( + url: &str, + branch: Option<&str>, +) -> Result> { + let branch = resolve_branch(url, branch)?; + info!("Fetching latest commit on {} from {}", branch, url); + + let output = Command::new("git") + .args(["ls-remote", url, &format!("refs/heads/{}", branch)]) + .env("GIT_TERMINAL_PROMPT", "0") + .output()?; + + if !output.status.success() { + return Err(format!( + "git ls-remote failed for {}: {}", + url, + String::from_utf8_lossy(&output.stderr).trim() + ) + .into()); + } + + parse_ls_remote_commit(&String::from_utf8_lossy(&output.stdout), url) +} + +/// Parse the commit SHA from `git ls-remote` stdout. +/// +/// The output would be of the form: +/// ``` +/// 7741226198083e943a64d917e88a0a77d17aa30e refs/heads/master +/// ``` +fn parse_ls_remote_commit(stdout: &str, url: &str) -> Result> { + stdout + .lines() + .next() + .and_then(|line| line.split_whitespace().next()) + .map(str::to_string) + .ok_or_else(|| format!("No commit found in ls-remote output for {}", url).into()) +} + +/// Resolve the default branch name for a remote repository. +/// +/// Queries the remote for both `main` and `master` in a single `git ls-remote` +/// call. Returns `"main"` if it exists, `"master"` if only that exists, or an +/// error if neither is present. +fn resolve_default_branch(url: &str) -> Result> { + let output = Command::new("git") + .args([ + "ls-remote", + "--heads", + url, + "refs/heads/main", + "refs/heads/master", + ]) + .env("GIT_TERMINAL_PROMPT", "0") + .output()?; + + if !output.status.success() { + return Err(format!( + "git ls-remote --heads failed for {}: {}", + url, + String::from_utf8_lossy(&output.stderr).trim() + ) + .into()); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let has_main = stdout.lines().any(|l| l.contains("refs/heads/main")); + let has_master = stdout.lines().any(|l| l.contains("refs/heads/master")); + + if has_main { + Ok("main".to_string()) + } else if has_master { + info!( + "Remote {} has no 'main' branch, falling back to 'master'", + url + ); + Ok("master".to_string()) + } else { + Err(format!("Remote {} has neither 'main' nor 'master' branch", url).into()) + } +} + +/// Return `branch` if explicitly provided, otherwise auto-detect the default +/// branch (`main` / `master`) from the remote. +fn resolve_branch(url: &str, branch: Option<&str>) -> Result> { + match branch { + Some(b) => Ok(b.to_string()), + None => resolve_default_branch(url), + } +} + +/// Fetch the highest stable semver tag for a remote repo. +/// +/// Strategy: +/// 1. Resolve the branch: use `branch` if provided, otherwise auto-detect +/// the default branch (`main` / `master`) from the remote. +/// 2. Create a throwaway bare repo in a temp directory. +/// 3. Blobless-fetch all tags from the remote (no file content downloaded). +/// 4. Run `git tag` to enumerate all fetched tags. +/// 5. Filter for stable semver tags (no `-rc`, `-alpha`, etc.) and return +/// the highest by numeric segment comparison. +/// +/// Note: We intentionally do *not* use `git tag --merged ` because +/// projects like Lotus cut releases on separate release branches that are +/// never merged back into `master`/`main`. Using `--merged` would cause the +/// resolver to return a stale version (e.g. `v1.28.1` instead of `v1.35.0`). +fn fetch_latest_tag(url: &str, branch: Option<&str>) -> Result> { + let branch = resolve_branch(url, branch)?; + info!("Fetching latest stable tag on {} from {}", branch, url); + + let repo = TempBareRepo::create()?; + + fetch_default_branch_and_tags(repo.path(), url, &branch)?; + + let tags_output = Command::new("git") + .args(["tag"]) + .current_dir(repo.path()) + .output()?; + + if !tags_output.status.success() { + return Err(format!( + "git tag failed: {}", + String::from_utf8_lossy(&tags_output.stderr).trim() + ) + .into()); + } + + parse_latest_tag(&String::from_utf8_lossy(&tags_output.stdout), url) +} + +/// Fetch the default branch and all tags from `url` into an existing bare repo. +/// +/// Uses `--filter=blob:none` so only commit and tree objects are transferred +/// (no file content), keeping the operation fast even for large repositories. +fn fetch_default_branch_and_tags( + repo_path: &std::path::Path, + url: &str, + branch: &str, +) -> Result<(), Box> { + let refspec = format!("refs/heads/{b}:refs/heads/{b}", b = branch); + let status = Command::new("git") + .args(["fetch", "--tags", "--filter=blob:none", url, &refspec]) + .current_dir(repo_path) + .env("GIT_TERMINAL_PROMPT", "0") + .status()?; + + if !status.success() { + return Err(format!("git fetch failed for {}", url).into()); + } + Ok(()) +} + +/// Parse and return the highest stable semver tag from `git tag --merged` stdout. +/// +/// Each line is a plain tag name (e.g. `v1.2.3`). Tags that cannot be parsed +/// as a valid semver version, or that carry a pre-release identifier (e.g. +/// `-rc1`, `-alpha`, `-beta`), are silently skipped. The remaining tags are +/// sorted by the `semver::Version` `Ord` implementation and the highest is +/// returned. +fn parse_latest_tag(stdout: &str, url: &str) -> Result> { + let mut tags: Vec<(Version, &str)> = stdout + .lines() + .map(str::trim) + .filter_map(|tag| { + // Strip leading 'v' before parsing, semver crate requires bare `1.2.3` + let raw = tag.trim_start_matches('v'); + Version::parse(raw).ok().map(|v| (v, tag)) + }) + .filter(|(v, _)| v.pre.is_empty()) // exclude pre-release versions + .collect(); + + if tags.is_empty() { + return Err(format!( + "No stable semver tags reachable from default branch for {}", + url + ) + .into()); + } + + tags.sort_by(|(a, _), (b, _)| a.cmp(b)); + Ok(tags.last().unwrap().1.to_string()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_semver_sort_picks_highest() { + let output = "v1.9.0\nv1.10.0\nv1.2.3\nv1.10.1\n"; + let tag = parse_latest_tag(output, "https://example.com/repo").unwrap(); + assert_eq!(tag, "v1.10.1"); + } + + #[test] + fn test_semver_sort_ignores_rc_suffix() { + let output = "v1.34.3\nv1.34.4\n"; + let tag = parse_latest_tag(output, "https://example.com/repo").unwrap(); + assert_eq!(tag, "v1.34.4"); + } + + #[test] + fn test_parse_ls_remote_commit() { + let output = "abc123def456\tHEAD\n"; + let commit = parse_ls_remote_commit(output, "https://example.com/repo").unwrap(); + assert_eq!(commit, "abc123def456"); + } + + /// Simulate the stdout of `git ls-remote --heads` for branch resolution. + fn ls_remote_heads(branches: &[&str]) -> String { + branches + .iter() + .map(|b| format!("deadbeef\trefs/heads/{}\n", b)) + .collect() + } + + #[test] + fn test_resolve_default_branch_prefers_main() { + let stdout = ls_remote_heads(&["main", "master"]); + let has_main = stdout.lines().any(|l| l.contains("refs/heads/main")); + let has_master = stdout.lines().any(|l| l.contains("refs/heads/master")); + assert!(has_main); + let branch = if has_main { + "main" + } else if has_master { + "master" + } else { + "" + }; + assert_eq!(branch, "main"); + } + + #[test] + fn test_resolve_default_branch_falls_back_to_master() { + let stdout = ls_remote_heads(&["master", "develop"]); + let has_main = stdout.lines().any(|l| l.contains("refs/heads/main")); + let has_master = stdout.lines().any(|l| l.contains("refs/heads/master")); + assert!(!has_main); + assert!(has_master); + let branch = if has_main { + "main" + } else if has_master { + "master" + } else { + "" + }; + assert_eq!(branch, "master"); + } + + #[test] + fn test_parse_latest_tag() { + let output = "v1.0.0\nv1.2.0\nv1.1.0\n"; + let tag = parse_latest_tag(output, "https://example.com/repo").unwrap(); + assert_eq!(tag, "v1.2.0"); + } + + #[test] + fn test_parse_latest_tag_skips_rc() { + // v1.2.0-rc1 is excluded; v1.1.0 is the latest stable + let output = "v1.0.0\nv1.1.0\nv1.2.0-rc1\n"; + let tag = parse_latest_tag(output, "https://example.com/repo").unwrap(); + assert_eq!(tag, "v1.1.0"); + } + + #[test] + fn test_parse_latest_tag_skips_non_semver() { + // "latest" and bare "rc" strings should be silently ignored + let output = "latest\nv1.0.0\nrc\nv1.1.0\n"; + let tag = parse_latest_tag(output, "https://example.com/repo").unwrap(); + assert_eq!(tag, "v1.1.0"); + } +} diff --git a/src/commands/init/mod.rs b/src/commands/init/mod.rs index 25b0932e..e42338cf 100644 --- a/src/commands/init/mod.rs +++ b/src/commands/init/mod.rs @@ -13,6 +13,7 @@ pub mod artifacts; pub mod config; pub mod directories; pub mod keys; +pub mod latest_resolver; pub mod path_setup; pub mod repositories; diff --git a/src/commands/init/repositories.rs b/src/commands/init/repositories.rs index a23fa392..fbdb7d51 100644 --- a/src/commands/init/repositories.rs +++ b/src/commands/init/repositories.rs @@ -99,6 +99,13 @@ fn download_repository(name: &str, location: &Location) -> Result<(), Box { clone_and_checkout(name, url, None, None, Some(branch)) } + // LatestCommit / LatestTag are resolved to concrete variants before this function + // is called, so reaching here indicates a programming error. + Location::LatestCommit { .. } | Location::LatestTag { .. } => Err( + "Dynamic location (latestCommit/latestTag) was not resolved before repository \ + download. This is an internal error." + .into(), + ), } } diff --git a/src/commands/start/yugabyte/mod.rs b/src/commands/start/yugabyte/mod.rs index dd46151a..eae7bd70 100644 --- a/src/commands/start/yugabyte/mod.rs +++ b/src/commands/start/yugabyte/mod.rs @@ -102,7 +102,6 @@ fn spawn_yugabyte_instance( "--base_dir=/home/foc-user/yb_base", "--ui=true", "--callhome=false", - "--advertise_address=0.0.0.0", "--master_flags=rpc_bind_addresses=0.0.0.0", "--tserver_flags=rpc_bind_addresses=0.0.0.0,pgsql_proxy_bind_address=0.0.0.0:5433,cql_proxy_bind_address=0.0.0.0:9042", "--daemon=false", diff --git a/src/commands/status/git/formatters.rs b/src/commands/status/git/formatters.rs index 20a92e58..d0422378 100644 --- a/src/commands/status/git/formatters.rs +++ b/src/commands/status/git/formatters.rs @@ -78,6 +78,9 @@ pub fn format_location_info( ) if expected_branch == actual_branch => true, (Location::GitBranch { .. }, GitInfo::Tag(_) | GitInfo::Commit(_)) => true, // Assume it's ready if we have some valid state (Location::GitBranch { .. }, _) => false, + + // LatestCommit / LatestTag are resolved at init time; treat as not ready if somehow present. + (Location::LatestCommit { .. } | Location::LatestTag { .. }, _) => false, }; let status = if is_ready { @@ -125,6 +128,17 @@ pub fn format_location_info( "Not found".to_string(), ), }, + // Resolved at init time; display as their underlying type if somehow still present. + Location::LatestCommit { .. } => ( + "Latest Commit".to_string(), + "(unresolved)".to_string(), + "".to_string(), + ), + Location::LatestTag { .. } => ( + "Latest Tag".to_string(), + "(unresolved)".to_string(), + "".to_string(), + ), }; (source_type, version, commit, status) diff --git a/src/commands/status/git/repo_paths.rs b/src/commands/status/git/repo_paths.rs index b6fd59e3..2b4571c9 100644 --- a/src/commands/status/git/repo_paths.rs +++ b/src/commands/status/git/repo_paths.rs @@ -34,8 +34,12 @@ pub fn get_repo_path_from_config(location: &Location, component: &str) -> std::p // For local sources, check the specified directory std::path::PathBuf::from(dir) } - Location::GitTag { .. } | Location::GitCommit { .. } | Location::GitBranch { .. } => { - // For git sources, check if it exists in the foc-devnet code directory + Location::GitTag { .. } + | Location::GitCommit { .. } + | Location::GitBranch { .. } + | Location::LatestCommit { .. } + | Location::LatestTag { .. } => { + // For git sources (including unresolved dynamic variants), use the foc-devnet code directory foc_devnet_code().join(component) } } diff --git a/src/config.rs b/src/config.rs index 85b3e822..f1ee24bd 100644 --- a/src/config.rs +++ b/src/config.rs @@ -36,26 +36,67 @@ pub enum Location { /// The `url` field is the Git repository URL, and `branch` is the specific /// branch (e.g., "main", "develop") to check out. GitBranch { url: String, branch: String }, + + /// Resolve to the latest commit on the given (or auto-detected default) branch at init time. + /// + /// `url` is the Git repository URL. `branch` pins a specific branch; when + /// `None` the default branch (`main` / `master`) is auto-detected from the + /// remote. At init time this is immediately resolved to a concrete `GitCommit` + /// so the stored config always records the exact SHA used. + /// + /// Example CLI usage: `--curio latestCommit` or `--curio latestCommit:main` + LatestCommit { url: String, branch: Option }, + + /// Resolve to the latest stable semver tag reachable from the given (or + /// auto-detected default) branch at init time. + /// + /// `url` is the Git repository URL. `branch` pins a specific branch; when + /// `None` the default branch (`main` / `master`) is auto-detected from the + /// remote. At init time this is immediately resolved to a concrete `GitTag` + /// so the stored config always records the exact tag used. + /// + /// Example CLI usage: `--lotus latestTag` or `--lotus latestTag:release/v2` + LatestTag { url: String, branch: Option }, } impl Location { - /// Parse a location string in the format "type:value" or "type:url:value" + /// Parse a location string in the format "type" or "type:value". /// /// Supported formats: - /// - "gittag:tag" (uses default URL) - /// - "gitcommit:commit" (uses default URL) - /// - "gitbranch:branch" (uses default URL) - /// - "local:dir" - /// - "gittag:url:tag" - /// - "gitcommit:url:commit" - /// - "gitbranch:url:branch" - /// - /// Where url can contain colons (e.g., https://github.com/repo.git) + /// - `latestCommit` — auto-detects default branch (`main` / `master`) + /// - `latestCommit:` — uses specified branch (e.g. `latestCommit:release/v2`) + /// - `latestTag` — auto-detects default branch + /// - `latestTag:` — uses specified branch (e.g. `latestTag:master`) + /// - `gittag:` — (uses default URL) + /// - `gitcommit:` — (uses default URL) + /// - `gitbranch:` — (uses default URL) + /// - `local:` + /// - `gittag::` + /// - `gitcommit::` + /// - `gitbranch::` pub fn parse_with_default(s: &str, default_url: &str) -> Result { + // Handle bare magic keywords (no colon) — auto-detect branch + match s { + "latestCommit" => { + return Ok(Location::LatestCommit { + url: default_url.to_string(), + branch: None, + }) + } + "latestTag" => { + return Ok(Location::LatestTag { + url: default_url.to_string(), + branch: None, + }) + } + _ => {} + } + let parts: Vec<&str> = s.split(':').collect(); if parts.len() < 2 { return Err(format!( - "Invalid location format: {}. Expected 'type:value' or 'type:url:value'", + "Invalid location format: '{}'. Expected 'latestCommit', 'latestTag', \ + 'latestCommit:', 'latestTag:', or 'gittag/gitcommit/gitbranch/local:...'", s )); } @@ -64,6 +105,15 @@ impl Location { let remaining = &parts[1..].join(":"); match location_type { + // latestCommit: and latestTag: + "latestCommit" => Ok(Location::LatestCommit { + url: default_url.to_string(), + branch: Some(remaining.to_string()), + }), + "latestTag" => Ok(Location::LatestTag { + url: default_url.to_string(), + branch: Some(remaining.to_string()), + }), "local" => Ok(Location::LocalSource { dir: remaining.to_string(), }), @@ -107,7 +157,7 @@ impl Location { } } _ => Err(format!( - "Unknown location type: {}. Supported types: local, gittag, gitcommit, gitbranch", + "Unknown location type: {}. Supported types: latestCommit, latestTag, local, gittag, gitcommit, gitbranch", location_type )), } diff --git a/src/external_api/devnet_info.rs b/src/external_api/devnet_info.rs index 7adfafcf..af5d4150 100644 --- a/src/external_api/devnet_info.rs +++ b/src/external_api/devnet_info.rs @@ -140,4 +140,6 @@ pub struct YugabyteInfo { pub master_rpc_port: u16, /// YSQL port for Postgres-compatible connections pub ysql_port: u16, + /// YCQL port for Cassandra-compatible connections + pub ycql_port: u16, } diff --git a/src/external_api/export.rs b/src/external_api/export.rs index 6e792654..203dbba4 100644 --- a/src/external_api/export.rs +++ b/src/external_api/export.rs @@ -286,10 +286,19 @@ fn build_yugabyte_info( provider_id ))?; + let ycql_port: u16 = ctx + .get(&format!("yugabyte_{}_ycql_port", provider_id)) + .and_then(|p| p.parse().ok()) + .ok_or(format!( + "yugabyte_{}_ycql_port not found or invalid in context", + provider_id + ))?; + Ok(YugabyteInfo { web_ui_url: format!("http://localhost:{}", web_ui_port), master_rpc_port, ysql_port, + ycql_port, }) } diff --git a/src/main_app/version.rs b/src/main_app/version.rs index a0c6a576..e42f75ae 100644 --- a/src/main_app/version.rs +++ b/src/main_app/version.rs @@ -70,5 +70,11 @@ fn print_location_info(label: &str, location: &Location) { Location::GitBranch { url, branch } => { info!("{}: {}, branch {}", label, url, branch); } + Location::LatestCommit { url, .. } => { + info!("{}: {}, latest commit (unresolved)", label, url); + } + Location::LatestTag { url, .. } => { + info!("{}: {}, latest tag (unresolved)", label, url); + } } }