From 878ca28f9ec41ce28f8353c7e833f41fae3f37c3 Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Thu, 12 Mar 2026 13:52:47 +0200 Subject: [PATCH 01/16] Require token for mutating API endpoints in public mode Remove @public_access decorator from analyze_repo and switch_commit endpoints so that all three mutating endpoints (analyze_folder, analyze_repo, switch_commit) require a valid token even when CODE_GRAPH_PUBLIC=1. Read-only endpoints remain publicly accessible. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- api/index.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/api/index.py b/api/index.py index 354f5c6..b65a47e 100644 --- a/api/index.py +++ b/api/index.py @@ -412,7 +412,6 @@ def analyze_folder(): return jsonify(response), 200 @app.route('/api/analyze_repo', methods=['POST']) -@public_access # Apply public access decorator @token_required # Apply token authentication decorator def analyze_repo(): """ @@ -448,7 +447,6 @@ def analyze_repo(): return jsonify(response), 200 @app.route('/api/switch_commit', methods=['POST']) -@public_access # Apply public access decorator @token_required # Apply token authentication decorator def switch_commit(): """ From c47f93e27900379f93e1d8e68a2bc5d8e5cecb6a Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Thu, 12 Mar 2026 14:13:24 +0200 Subject: [PATCH 02/16] Migrate backend from Flask to FastAPI - Replace Flask with FastAPI + Uvicorn in pyproject.toml - Rewrite api/index.py: Pydantic request models, Depends() auth, FileResponse SPA serving, JSONResponse error responses - Auth decorators become dependency injection: public_or_auth (public endpoints), token_required (mutating endpoints) - Rewrite tests/index.py: FastAPI app factory, explicit imports - Update 7 endpoint test files: TestClient, .json() method, /api/ URL prefix - Update Makefile, start.sh, CI workflow: flask run -> uvicorn api.index:app - Endpoints remain synchronous (FastAPI auto-threads blocking calls) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .github/workflows/playwright.yml | 8 +- Makefile | 4 +- api/index.py | 586 +++++++------------------ pyproject.toml | 4 +- start.sh | 6 +- tests/endpoints/test_auto_complete.py | 19 +- tests/endpoints/test_find_paths.py | 33 +- tests/endpoints/test_get_neighbors.py | 22 +- tests/endpoints/test_graph_entities.py | 15 +- tests/endpoints/test_list_commits.py | 22 +- tests/endpoints/test_list_repos.py | 14 +- tests/endpoints/test_repo_info.py | 21 +- tests/index.py | 559 +++++++---------------- uv.lock | 212 +++++++-- 14 files changed, 545 insertions(+), 980 deletions(-) diff --git a/.github/workflows/playwright.yml b/.github/workflows/playwright.yml index 7e3441c..3652aca 100644 --- a/.github/workflows/playwright.yml +++ b/.github/workflows/playwright.yml @@ -81,7 +81,7 @@ jobs: npx playwright install chromium firefox npx playwright install-deps chromium firefox - - name: Start Flask server + - name: Start server id: start-server env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} @@ -89,9 +89,9 @@ jobs: CODE_GRAPH_PUBLIC: "1" MODEL_NAME: "openai/gpt-4.1-mini" run: | - uv run flask --app api/index.py run --host 0.0.0.0 --port 5000 & + uv run uvicorn api.index:app --host 0.0.0.0 --port 5000 & echo "pid=$!" >> "$GITHUB_OUTPUT" - # Wait for Flask to be ready + # Wait for server to be ready timeout 30 bash -c 'until curl -s http://localhost:5000/ > /dev/null 2>&1; do sleep 0.5; done' - name: Run Playwright tests @@ -102,7 +102,7 @@ jobs: MODEL_NAME: "openai/gpt-4.1-mini" run: npx playwright test --shard=${{ matrix.shard }}/2 --reporter=dot,list - - name: Stop Flask server + - name: Stop server if: always() run: kill ${{ steps.start-server.outputs.pid }} 2>/dev/null || true diff --git a/Makefile b/Makefile index b433e73..37aa58d 100644 --- a/Makefile +++ b/Makefile @@ -40,10 +40,10 @@ clean: ## Clean up build and test artifacts find . -name "*.pyo" -delete run-dev: build-dev ## Run development server (Python backend serving built frontend) - uv run flask --app api/index.py run --host $${HOST:-127.0.0.1} --port $${PORT:-5000} --debug + uv run uvicorn api.index:app --host $${HOST:-127.0.0.1} --port $${PORT:-5000} --reload run-prod: build-prod ## Run production server - uv run flask --app api/index.py run --host $${HOST:-0.0.0.0} --port $${PORT:-5000} + uv run uvicorn api.index:app --host $${HOST:-0.0.0.0} --port $${PORT:-5000} docker-falkordb: ## Start FalkorDB in Docker for testing docker run -d --name falkordb-test -p 6379:6379 falkordb/falkordb:latest diff --git a/api/index.py b/api/index.py index b65a47e..dd29cbf 100644 --- a/api/index.py +++ b/api/index.py @@ -1,9 +1,12 @@ """ Main API module for CodeGraph. """ import os +import logging from pathlib import Path -from functools import wraps + from dotenv import load_dotenv -from flask import Flask, request, jsonify +from fastapi import Depends, FastAPI, Header, HTTPException, Query +from fastapi.responses import FileResponse, JSONResponse +from pydantic import BaseModel from api.analyzers.source_analyzer import SourceAnalyzer from api.git_utils import git_utils @@ -18,501 +21,240 @@ load_dotenv() # Configure the logger -import logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = logging.getLogger(__name__) +# --------------------------------------------------------------------------- +# Authentication helpers +# --------------------------------------------------------------------------- + SECRET_TOKEN = os.getenv('SECRET_TOKEN') -def verify_token(token): - """ Verify the token provided in the request """ + +def _verify_token(token: str | None) -> bool: + """Verify the token provided in the request.""" if token is not None and token.startswith("Bearer "): token = token[len("Bearer "):] return token == SECRET_TOKEN or (token is None and SECRET_TOKEN is None) -def token_required(f): - """ Decorator to protect routes with token authentication """ - @wraps(f) - def decorated_function(*args, **kwargs): - token = request.headers.get('Authorization') # Get token from header - if not verify_token(token): - return jsonify(message="Unauthorized"), 401 - return f(*args, **kwargs) - return decorated_function - -app = Flask(__name__, - static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'), - static_url_path='') +def public_or_auth(authorization: str | None = Header(None)): + """Dependency: skip auth when CODE_GRAPH_PUBLIC=1, otherwise require token.""" + if os.environ.get("CODE_GRAPH_PUBLIC", "0") == "1": + return + if not _verify_token(authorization): + raise HTTPException(status_code=401, detail="Unauthorized") +def token_required(authorization: str | None = Header(None)): + """Dependency: always require a valid token.""" + if not _verify_token(authorization): + raise HTTPException(status_code=401, detail="Unauthorized") -@app.route('/') -def serve_react_app(): - """Serve the React app's index.html for the root route.""" - return app.send_static_file('index.html') +# --------------------------------------------------------------------------- +# Pydantic request models +# --------------------------------------------------------------------------- +class RepoRequest(BaseModel): + repo: str -@app.errorhandler(404) -def not_found(e): - """Serve index.html for any unknown route (SPA catch-all).""" - try: - return app.send_static_file('index.html') - except Exception: - return jsonify({"error": "Not found"}), 404 - -def public_access(f): - """ Decorator that bypasses token_required when CODE_GRAPH_PUBLIC=1. - Place above @token_required on routes accessible to the frontend. """ - @wraps(f) - def decorated_function(*args, **kwargs): - public = os.environ.get("CODE_GRAPH_PUBLIC", "0") - if public == "1": - # Skip token_required by calling the original function directly - return f.__wrapped__(*args, **kwargs) - return f(*args, **kwargs) - return decorated_function - -@app.route('/api/graph_entities', methods=['GET']) -@public_access -@token_required -def graph_entities(): - """ - Endpoint to fetch sub-graph entities from a given repository. - The repository is specified via the 'repo' query parameter. - - Returns: - - 200: Successfully returns the sub-graph. - - 400: Missing or invalid 'repo' parameter. - - 500: Internal server error or database connection issue. - """ - - # Access the 'repo' parameter from the GET request - repo = request.args.get('repo') - - if not repo: - logging.error("Missing 'repo' parameter in request.") - return jsonify({"status": "Missing 'repo' parameter"}), 400 +class NeighborsRequest(BaseModel): + repo: str + node_ids: list[int] - if not graph_exists(repo): - logging.error("Missing project %s", repo) - return jsonify({"status": f"Missing project {repo}"}), 400 +class AutoCompleteRequest(BaseModel): + repo: str + prefix: str - try: - # Initialize the graph with the provided repo and credentials - g = Graph(repo) +class FindPathsRequest(BaseModel): + repo: str + src: int + dest: int - # Retrieve a sub-graph of up to 500 entities - sub_graph = g.get_sub_graph(500) +class ChatRequest(BaseModel): + repo: str + msg: str - logging.info("Successfully retrieved sub-graph for repo: %s", repo) - response = { - 'status': 'success', - 'entities': sub_graph - } +class AnalyzeFolderRequest(BaseModel): + path: str + ignore: list[str] = [] - return jsonify(response), 200 +class AnalyzeRepoRequest(BaseModel): + repo_url: str + ignore: list[str] = [] - except Exception as e: - logging.error("Error retrieving sub-graph for repo '%s': %s", repo, e) - return jsonify({"status": "Internal server error"}), 500 +class SwitchCommitRequest(BaseModel): + repo: str + commit: str +# --------------------------------------------------------------------------- +# Application +# --------------------------------------------------------------------------- -@app.route('/api/get_neighbors', methods=['POST']) -@public_access -@token_required -def get_neighbors(): - """ - Endpoint to get neighbors of a nodes list in the graph. - Expects 'repo' and 'node_ids' as body parameters. +STATIC_DIR = Path(__file__).resolve().parent.parent / "app" / "dist" - Returns: - JSON response containing neighbors or error messages. - """ +app = FastAPI() - # Get JSON data from the request - data = request.get_json() +# --------------------------------------------------------------------------- +# API routes +# --------------------------------------------------------------------------- - # Get query parameters - repo = data.get('repo') - node_ids = data.get('node_ids') +@app.get('/api/graph_entities') +def graph_entities(repo: str = Query(None), _=Depends(public_or_auth)): + """Fetch sub-graph entities from a given repository.""" - # Validate 'repo' parameter if not repo: - logging.error("Repository name is missing in the request.") - return jsonify({"status": "Repository name is required."}), 400 - - # Validate 'node_ids' parameter - if not node_ids: - logging.error("Node IDs is missing in the request.") - return jsonify({"status": "Node IDs is required."}), 400 + logging.error("Missing 'repo' parameter in request.") + return JSONResponse({"status": "Missing 'repo' parameter"}, status_code=400) - # Validate repo exists if not graph_exists(repo): logging.error("Missing project %s", repo) - return jsonify({"status": f"Missing project {repo}"}), 400 - - # Initialize the graph with the provided repository - g = Graph(repo) + return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) - # Fetch the neighbors of the specified node - neighbors = g.get_neighbors(node_ids) - - # Log and return the neighbors - logging.info("Successfully retrieved neighbors for node IDs %s in repo '%s'.", node_ids, repo) + try: + g = Graph(repo) + sub_graph = g.get_sub_graph(500) - response = { - 'status': 'success', - 'neighbors': neighbors - } + logging.info("Successfully retrieved sub-graph for repo: %s", repo) + return {"status": "success", "entities": sub_graph} - return jsonify(response), 200 + except Exception as e: + logging.error("Error retrieving sub-graph for repo '%s': %s", repo, e) + return JSONResponse({"status": "Internal server error"}, status_code=500) -@app.route('/api/auto_complete', methods=['POST']) -@public_access -@token_required -def auto_complete(): - """ - Endpoint to process auto-completion requests for a repository based on a prefix. - Returns: - JSON response with auto-completion suggestions or an error message. - """ +@app.post('/api/get_neighbors') +def get_neighbors(data: NeighborsRequest, _=Depends(public_or_auth)): + """Get neighbors of a nodes list in the graph.""" - # Get JSON data from the request - data = request.get_json() + if not graph_exists(data.repo): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - # Validate that 'repo' is provided - repo = data.get('repo') - if repo is None: - return jsonify({'status': 'Missing mandatory parameter "repo"'}), 400 + g = Graph(data.repo) + neighbors = g.get_neighbors(data.node_ids) - # Validate that 'prefix' is provided - prefix = data.get('prefix') - if prefix is None: - return jsonify({'status': 'Missing mandatory parameter "prefix"'}), 400 + logging.info("Successfully retrieved neighbors for node IDs %s in repo '%s'.", + data.node_ids, data.repo) + return {"status": "success", "neighbors": neighbors} - # Validate repo exists - if not graph_exists(repo): - return jsonify({'status': f'Missing project {repo}'}), 400 - # Fetch auto-completion results - completions = prefix_search(repo, prefix) +@app.post('/api/auto_complete') +def auto_complete(data: AutoCompleteRequest, _=Depends(public_or_auth)): + """Process auto-completion requests for a repository based on a prefix.""" - # Create a success response - response = { - 'status': 'success', - 'completions': completions - } + if not graph_exists(data.repo): + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - return jsonify(response), 200 + completions = prefix_search(data.repo, data.prefix) + return {"status": "success", "completions": completions} -@app.route('/api/list_repos', methods=['GET']) -@public_access -@token_required -def list_repos(): - """ - Endpoint to list all available repositories. - Returns: - JSON response with a list of repositories or an error message. - """ +@app.get('/api/list_repos') +def list_repos(_=Depends(public_or_auth)): + """List all available repositories.""" - # Fetch list of repositories repos = get_repos() + return {"status": "success", "repositories": repos} - # Create a success response with the list of repositories - response = { - 'status': 'success', - 'repositories': repos - } - - return jsonify(response), 200 -@app.route('/api/repo_info', methods=['POST']) -@public_access -@token_required -def repo_info(): - """ - Endpoint to retrieve information about a specific repository. +@app.post('/api/repo_info') +def repo_info(data: RepoRequest, _=Depends(public_or_auth)): + """Retrieve information about a specific repository.""" - Expected JSON payload: - { - "repo": - } - - Returns: - JSON: A response containing the status and graph statistics (node and edge counts). - - 'status': 'success' if successful, or an error message. - - 'info': A dictionary with the node and edge counts if the request is successful. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate the 'repo' parameter - repo = data.get('repo') - if repo is None: - return jsonify({'status': 'Missing mandatory parameter "repo"'}), 400 - - # Initialize the graph with the provided repository name - g = Graph(repo) - - # Retrieve statistics from the graph + g = Graph(data.repo) stats = g.stats() - info = get_repo_info(repo) + info = get_repo_info(data.repo) if stats is None or info is None: - return jsonify({'status': f'Missing repository "{repo}"'}), 400 + return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) stats |= info + return {"status": "success", "info": stats} - # Create a response - response = { - 'status': 'success', - 'info': stats - } - - return jsonify(response), 200 - -@app.route('/api/find_paths', methods=['POST']) -@public_access -@token_required -def find_paths(): - """ - Finds all paths between a source node (src) and a destination node (dest) in the graph. - The graph is associated with the repository (repo) provided in the request. - - Request Body (JSON): - - repo (str): Name of the repository. - - src (int): ID of the source node. - - dest (int): ID of the destination node. - - Returns: - A JSON response with: - - status (str): Status of the request ("success" or "error"). - - paths (list): List of paths between the source and destination nodes. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate 'repo' parameter - repo = data.get('repo') - if repo is None: - return jsonify({'status': 'Missing mandatory parameter "repo"'}), 400 - - # Validate 'src' parameter - src = data.get('src') - if src is None: - return jsonify({'status': 'Missing mandatory parameter "src"'}), 400 - if not isinstance(src, int): - return jsonify({'status': "src node id must be int"}), 400 - - # Validate 'dest' parameter - dest = data.get('dest') - if dest is None: - return jsonify({'status': 'Missing mandatory parameter "dest"'}), 400 - if not isinstance(dest, int): - return jsonify({'status': "dest node id must be int"}), 400 - if not graph_exists(repo): - logging.error("Missing project %s", repo) - return jsonify({"status": f"Missing project {repo}"}), 400 +@app.post('/api/find_paths') +def find_paths(data: FindPathsRequest, _=Depends(public_or_auth)): + """Find all paths between a source and destination node in the graph.""" - # Initialize graph with provided repo and credentials - g = Graph(repo) + if not graph_exists(data.repo): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - # Find paths between the source and destination nodes - paths = g.find_paths(src, dest) + g = Graph(data.repo) + paths = g.find_paths(data.src, data.dest) + return {"status": "success", "paths": paths} - # Create and return a successful response - response = { 'status': 'success', 'paths': paths } - return jsonify(response), 200 +@app.post('/api/chat') +def chat(data: ChatRequest, _=Depends(public_or_auth)): + """Chat with the CodeGraph language model.""" -@app.route('/api/chat', methods=['POST']) -@public_access -@token_required -def chat(): - """ Endpoint to chat with the CodeGraph language model. """ + try: + answer = ask(data.repo, data.msg) + except Exception as e: + return JSONResponse({"status": "error", "response": str(e)}, status_code=500) - # Get JSON data from the request - data = request.get_json() + return {"status": "success", "response": answer} - # Validate 'repo' parameter - repo = data.get('repo') - if repo is None: - return jsonify({'status': 'Missing mandatory parameter "repo"'}), 400 - # Get optional 'label' and 'relation' parameters - msg = data.get('msg') - if msg is None: - return jsonify({'status': 'Missing mandatory parameter "msg"'}), 400 +@app.post('/api/analyze_folder') +def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): + """Analyze local source code. Always requires a valid token.""" - try: - answer = ask(repo, msg) - except Exception as e: - return jsonify({'status': 'error', 'response': str(e)}), 500 + if not os.path.isdir(data.path): + logging.error("Path '%s' does not exist or is not a directory", data.path) + return JSONResponse({"status": "Invalid path: must be an existing directory"}, + status_code=400) - # Create and return a successful response - response = { 'status': 'success', 'response': answer } + proj_name = Path(data.path).name + g = Graph(proj_name) - return jsonify(response), 200 + analyzer = SourceAnalyzer() + analyzer.analyze_local_folder(data.path, g, data.ignore) -@app.route('/api/analyze_folder', methods=['POST']) -@token_required # Apply token authentication decorator -def analyze_folder(): - """ - Endpoint to analyze local source code - Expects 'path' and optionally an ignore list. + return {"status": "success", "project": proj_name} - Returns: - JSON response with status and error message if applicable - Status codes: - 200: Success - 400: Invalid input - 500: Internal server error - """ - # Get JSON data from the request - data = request.get_json() +@app.post('/api/analyze_repo') +def analyze_repo(data: AnalyzeRepoRequest, _=Depends(token_required)): + """Analyze a GitHub repository. Always requires a valid token.""" - # Get query parameters - path = data.get('path') - ignore = data.get('ignore', []) + logger.debug('Received repo_url: %s', data.repo_url) - # Validate input parameters - if not path: - logging.error("'path' is missing from the request.") - return jsonify({"status": "'path' is required."}), 400 + proj = Project.from_git_repository(data.repo_url) + proj.analyze_sources(data.ignore) + proj.process_git_history(data.ignore) - # Validate path exists and is a directory - if not os.path.isdir(path): - logging.error("Path '%s' does not exist or is not a directory", path) - return jsonify({"status": "Invalid path: must be an existing directory"}), 400 + return {"status": "success"} - # Validate ignore list contains valid paths - if not isinstance(ignore, list): - logging.error("'ignore' must be a list of paths") - return jsonify({"status": "'ignore' must be a list of paths"}), 400 - proj_name = Path(path).name +@app.post('/api/switch_commit') +def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): + """Switch a repository to a specific commit. Always requires a valid token.""" - # Initialize the graph with the provided project name - g = Graph(proj_name) + git_utils.switch_commit(data.repo, data.commit) + return {"status": "success"} - # Analyze source code within given folder - analyzer = SourceAnalyzer() - analyzer.analyze_local_folder(path, g, ignore) - - # Return response - response = { - 'status': 'success', - 'project': proj_name - } - return jsonify(response), 200 - -@app.route('/api/analyze_repo', methods=['POST']) -@token_required # Apply token authentication decorator -def analyze_repo(): - """ - Analyze a GitHub repository. - - Expected JSON payload: - { - "repo_url": "string", - "ignore": ["string"] # optional - } - - Returns: - JSON response with processing status - """ - - data = request.get_json() - url = data.get('repo_url') - if url is None: - return jsonify({'status': 'Missing mandatory parameter "url"'}), 400 - logger.debug('Received repo_url: %s', url) - - ignore = data.get('ignore', []) - - proj = Project.from_git_repository(url) - proj.analyze_sources(ignore) - proj.process_git_history(ignore) - - # Create a response - response = { - 'status': 'success', - } - - return jsonify(response), 200 - -@app.route('/api/switch_commit', methods=['POST']) -@token_required # Apply token authentication decorator -def switch_commit(): - """ - Endpoint to switch a repository to a specific commit. - - Returns: - JSON response with the change set or an error message. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate that 'repo' is provided - repo = data.get('repo') - if repo is None: - return jsonify({'status': 'Missing mandatory parameter "repo"'}), 400 - - # Validate that 'commit' is provided - commit = data.get('commit') - if commit is None: - return jsonify({'status': 'Missing mandatory parameter "commit"'}), 400 - - # Attempt to switch the repository to the specified commit - git_utils.switch_commit(repo, commit) - - # Create a success response - response = { - 'status': 'success' - } - - return jsonify(response), 200 - -@app.route('/api/list_commits', methods=['POST']) -@public_access # Apply public access decorator -@token_required # Apply token authentication decorator -def list_commits(): - """ - Endpoint to list all commits of a specified repository. - - Request JSON Structure: - { - "repo": "repository_name" - } - - Returns: - JSON response with a list of commits or an error message. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate the presence of the 'repo' parameter - repo = data.get('repo') - if repo is None: - return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 - - # Initialize GitGraph object to interact with the repository - git_graph = GitGraph(git_utils.GitRepoName(repo)) - - # Fetch commits from the repository - commits = git_graph.list_commits() - # Return success response with the list of commits - response = { - 'status': 'success', - 'commits': commits - } +@app.post('/api/list_commits') +def list_commits(data: RepoRequest, _=Depends(public_or_auth)): + """List all commits of a specified repository.""" - return jsonify(response), 200 \ No newline at end of file + git_graph = GitGraph(git_utils.GitRepoName(data.repo)) + commits = git_graph.list_commits() + return {"status": "success", "commits": commits} + +# --------------------------------------------------------------------------- +# SPA static file serving (must come after API routes) +# --------------------------------------------------------------------------- + +INDEX_HTML = STATIC_DIR / "index.html" + +@app.get("/{full_path:path}") +def serve_spa(full_path: str): + """Serve React SPA — static assets or index.html catch-all.""" + file = STATIC_DIR / full_path + if full_path and file.is_file(): + return FileResponse(file) + if INDEX_HTML.is_file(): + return FileResponse(INDEX_HTML) + return JSONResponse({"error": "Not found"}, status_code=404) \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 07e1db5..49438fb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,8 @@ dependencies = [ "tree-sitter-python>=0.25.0,<0.26.0", "tree-sitter-java>=0.23.5,<0.24.0", "tree-sitter-c-sharp>=0.23.1,<0.24.0", - "flask>=3.1.0,<4.0.0", + "fastapi>=0.115.0,<1.0.0", + "uvicorn[standard]>=0.34.0,<1.0.0", "python-dotenv>=1.0.1,<2.0.0", "multilspy @ git+https://github.com/AviAvni/multilspy.git@python-init-params", "javatools>=1.6.0,<2.0.0", @@ -25,6 +26,7 @@ dependencies = [ test = [ "pytest>=9.0.2,<10.0.0", "ruff>=0.11.0,<1.0.0", + "httpx>=0.28.0,<1.0.0", ] [tool.setuptools.packages.find] diff --git a/start.sh b/start.sh index aa08b31..b01ffed 100755 --- a/start.sh +++ b/start.sh @@ -23,7 +23,7 @@ while ! nc -z "$FALKORDB_HOST" "$FALKORDB_PORT"; do sleep 0.5 done -echo "FalkorDB is up - launching Flask..." +echo "FalkorDB is up - launching server..." -# Start the Flask backend -exec flask --app api/index.py run --host "${HOST:-0.0.0.0}" --port "${PORT:-5000}" ${FLASK_DEBUG:+--debug} +# Start the backend +exec uvicorn api.index:app --host "${HOST:-0.0.0.0}" --port "${PORT:-5000}" ${APP_RELOAD:+--reload} diff --git a/tests/endpoints/test_auto_complete.py b/tests/endpoints/test_auto_complete.py index df38aa5..c9ea6fe 100644 --- a/tests/endpoints/test_auto_complete.py +++ b/tests/endpoints/test_auto_complete.py @@ -1,13 +1,12 @@ import redis import pytest -from pathlib import Path from tests.index import create_app from api import Project +from starlette.testclient import TestClient @pytest.fixture() def app(): app = create_app() - app.config.update({ "TESTING": True }) # other setup can go here @@ -19,16 +18,12 @@ def app(): @pytest.fixture() def client(app): - return app.test_client() - -@pytest.fixture() -def runner(app): - return app.test_cli_runner() + return TestClient(app) def test_auto_complete(client): # Start with an empty DB - response = client.post("/auto_complete", json={ "repo": "GraphRAG-SDK", "prefix": "set" }) - status = response.json["status"] + response = client.post("/api/auto_complete", json={ "repo": "GraphRAG-SDK", "prefix": "set" }) + status = response.json()["status"] # Expecting an empty response assert status == "Missing project GraphRAG-SDK" @@ -39,9 +34,9 @@ def test_auto_complete(client): proj.process_git_history() # Re-issue auto complete request - response = client.post("/auto_complete", json={ "repo": "GraphRAG-SDK", "prefix": "set" }) - status = response.json["status"] - completions = response.json["completions"] + response = client.post("/api/auto_complete", json={ "repo": "GraphRAG-SDK", "prefix": "set" }) + status = response.json()["status"] + completions = response.json()["completions"] # Expecting an empty response assert status == "success" diff --git a/tests/endpoints/test_find_paths.py b/tests/endpoints/test_find_paths.py index 93fbe06..b119007 100644 --- a/tests/endpoints/test_find_paths.py +++ b/tests/endpoints/test_find_paths.py @@ -1,17 +1,14 @@ import os import redis import pytest -from pathlib import Path from tests.index import create_app from api import Project -from falkordb import FalkorDB, Path, Node, QueryResult +from falkordb import FalkorDB +from starlette.testclient import TestClient @pytest.fixture() def app(): app = create_app() - app.config.update({ - "TESTING": True, - }) # other setup can go here redis.Redis().flushall() @@ -22,15 +19,11 @@ def app(): @pytest.fixture() def client(app): - return app.test_client() - -@pytest.fixture() -def runner(app): - return app.test_cli_runner() + return TestClient(app) def test_find_paths(client): # Start with an empty DB - response = client.post("/find_paths", json={"repo": "GraphRAG-SDK", "src": 0, "dest": 0}).json + response = client.post("/api/find_paths", json={"repo": "GraphRAG-SDK", "src": 0, "dest": 0}).json() status = response["status"] # Expecting an error @@ -40,15 +33,13 @@ def test_find_paths(client): proj = Project.from_git_repository("https://github.com/FalkorDB/GraphRAG-SDK") proj.analyze_sources() - # Re-issue with invalid src node id - response = client.post("/find_paths", json={"repo": "GraphRAG-SDK", "src": 'invalid', "dest": 0}).json - status = response["status"] - assert status == "src node id must be int" + # Re-issue with invalid src node id — Pydantic rejects non-int + response = client.post("/api/find_paths", json={"repo": "GraphRAG-SDK", "src": "invalid", "dest": 0}) + assert response.status_code == 422 - # Re-issue with invalid dest node id - response = client.post("/find_paths", json={"repo": "GraphRAG-SDK", "src": 0, "dest": 'invalid'}).json - status = response["status"] - assert status == "dest node id must be int" + # Re-issue with invalid dest node id — Pydantic rejects non-int + response = client.post("/api/find_paths", json={"repo": "GraphRAG-SDK", "src": 0, "dest": "invalid"}) + assert response.status_code == 422 # Find src and dest nodes that are at least 3 hops apart db = FalkorDB(host=os.getenv('FALKORDB_HOST', 'localhost'), @@ -65,10 +56,10 @@ def test_find_paths(client): dest_id = result_set[0][1] # Re-issue with none existing node id - response = client.post("/find_paths", json={ + response = client.post("/api/find_paths", json={ "repo": "GraphRAG-SDK", "src": src_id, - "dest": dest_id}).json + "dest": dest_id}).json() status = response["status"] paths = response["paths"] diff --git a/tests/endpoints/test_get_neighbors.py b/tests/endpoints/test_get_neighbors.py index eaf59d4..61ff625 100644 --- a/tests/endpoints/test_get_neighbors.py +++ b/tests/endpoints/test_get_neighbors.py @@ -1,15 +1,12 @@ import redis import pytest -from pathlib import Path from tests.index import create_app from api import Project +from starlette.testclient import TestClient @pytest.fixture() def app(): app = create_app() - app.config.update({ - "TESTING": True, - }) # other setup can go here redis.Redis().flushall() @@ -20,15 +17,11 @@ def app(): @pytest.fixture() def client(app): - return app.test_client() - -@pytest.fixture() -def runner(app): - return app.test_cli_runner() + return TestClient(app) def test_graph_entities(client): # Start with an empty DB - response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=0").json + response = client.post("/api/get_neighbors", json={"repo": "GraphRAG-SDK", "node_ids": [0]}).json() status = response["status"] # Expecting an error @@ -38,13 +31,8 @@ def test_graph_entities(client): proj = Project.from_git_repository("https://github.com/FalkorDB/GraphRAG-SDK") proj.analyze_sources() - # Re-issue with invalid node id - response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=invalid").json - status = response["status"] - assert status == "Invalid node ID. It must be an integer." - # Re-issue with none existing node id - response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=99999999").json + response = client.post("/api/get_neighbors", json={"repo": "GraphRAG-SDK", "node_ids": [99999999]}).json() status = response["status"] neighbors = response["neighbors"] @@ -53,7 +41,7 @@ def test_graph_entities(client): assert neighbors["edges"] == [] # Re-issue with valid node id - response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=0").json + response = client.post("/api/get_neighbors", json={"repo": "GraphRAG-SDK", "node_ids": [0]}).json() status = response["status"] neighbors = response["neighbors"] nodes = neighbors["nodes"] diff --git a/tests/endpoints/test_graph_entities.py b/tests/endpoints/test_graph_entities.py index 0297c96..12f6bdd 100644 --- a/tests/endpoints/test_graph_entities.py +++ b/tests/endpoints/test_graph_entities.py @@ -1,15 +1,12 @@ import redis import pytest -from pathlib import Path from tests.index import create_app from api import Project +from starlette.testclient import TestClient @pytest.fixture() def app(): app = create_app() - app.config.update({ - "TESTING": True, - }) # other setup can go here redis.Redis().flushall() @@ -20,15 +17,11 @@ def app(): @pytest.fixture() def client(app): - return app.test_client() - -@pytest.fixture() -def runner(app): - return app.test_cli_runner() + return TestClient(app) def test_graph_entities(client): # Start with an empty DB - response = client.get("/graph_entities?repo=GraphRAG-SDK").json + response = client.get("/api/graph_entities?repo=GraphRAG-SDK").json() status = response["status"] # Expecting an error @@ -39,7 +32,7 @@ def test_graph_entities(client): proj.analyze_sources() # Re-issue graph_entities request - response = client.get("/graph_entities?repo=GraphRAG-SDK").json + response = client.get("/api/graph_entities?repo=GraphRAG-SDK").json() status = response["status"] entities = response["entities"] nodes = entities["nodes"] diff --git a/tests/endpoints/test_list_commits.py b/tests/endpoints/test_list_commits.py index eee8a7e..c91dbda 100644 --- a/tests/endpoints/test_list_commits.py +++ b/tests/endpoints/test_list_commits.py @@ -3,13 +3,11 @@ from pathlib import Path from tests.index import create_app from api import Project +from starlette.testclient import TestClient @pytest.fixture() def app(): app = create_app() - app.config.update({ - "TESTING": True, - }) # other setup can go here @@ -21,17 +19,13 @@ def app(): @pytest.fixture() def client(app): - return app.test_client() - -@pytest.fixture() -def runner(app): - return app.test_cli_runner() + return TestClient(app) def test_list_commits(client): # Start with an empty DB - response = client.post("/list_commits", json={ "repo": "git_repo" }) - status = response.json["status"] - commits = response.json["commits"] + response = client.post("/api/list_commits", json={ "repo": "git_repo" }) + status = response.json()["status"] + commits = response.json()["commits"] # Expecting an empty response assert status == "success" @@ -47,9 +41,9 @@ def test_list_commits(client): proj.process_git_history() # Reissue list_commits request - response = client.post("/list_commits", json={ "repo": "git_repo" }) - status = response.json["status"] - commits = response.json["commits"] + response = client.post("/api/list_commits", json={ "repo": "git_repo" }) + status = response.json()["status"] + commits = response.json()["commits"] expected = [ {'author': 'Roi Lipman', 'date': 1729068452, 'hash': 'fac1698da4ee14c215316859e68841ae0b0275b0', 'message': 'Initial commit\n'}, diff --git a/tests/endpoints/test_list_repos.py b/tests/endpoints/test_list_repos.py index e6a3817..cabc471 100644 --- a/tests/endpoints/test_list_repos.py +++ b/tests/endpoints/test_list_repos.py @@ -3,13 +3,11 @@ from pathlib import Path from tests.index import create_app from api import Project +from starlette.testclient import TestClient @pytest.fixture() def app(): app = create_app() - app.config.update({ - "TESTING": True, - }) # other setup can go here redis.Redis().flushall() @@ -20,15 +18,11 @@ def app(): @pytest.fixture() def client(app): - return app.test_client() - -@pytest.fixture() -def runner(app): - return app.test_cli_runner() + return TestClient(app) def test_list_repos(client): # Start with an empty DB - response = client.get("/list_repos").json + response = client.get("/api/list_repos").json() status = response["status"] repositories = response["repositories"] @@ -46,7 +40,7 @@ def test_list_repos(client): proj.process_git_history() # Reissue list_repos request - response = client.get("/list_repos").json + response = client.get("/api/list_repos").json() status = response["status"] repositories = response["repositories"] diff --git a/tests/endpoints/test_repo_info.py b/tests/endpoints/test_repo_info.py index 682ed1a..7d71d1d 100644 --- a/tests/endpoints/test_repo_info.py +++ b/tests/endpoints/test_repo_info.py @@ -1,15 +1,12 @@ import redis import pytest -from pathlib import Path from tests.index import create_app from api import Project +from starlette.testclient import TestClient @pytest.fixture() def app(): app = create_app() - app.config.update({ - "TESTING": True, - }) # other setup can go here @@ -21,16 +18,12 @@ def app(): @pytest.fixture() def client(app): - return app.test_client() - -@pytest.fixture() -def runner(app): - return app.test_cli_runner() + return TestClient(app) def test_repo_info(client): # Start with an empty DB - response = client.post("/repo_info", json={ "repo": "GraphRAG-SDK" }) - status = response.json["status"] + response = client.post("/api/repo_info", json={ "repo": "GraphRAG-SDK" }) + status = response.json()["status"] # Expecting an empty response assert status == "Missing repository \"GraphRAG-SDK\"" @@ -41,9 +34,9 @@ def test_repo_info(client): proj.process_git_history() # Reissue list_commits request - response = client.post("/repo_info", json={ "repo": "GraphRAG-SDK" }) - status = response.json["status"] - info = response.json["info"] + response = client.post("/api/repo_info", json={ "repo": "GraphRAG-SDK" }) + status = response.json()["status"] + info = response.json()["info"] # Expecting an empty response assert status == "success" diff --git a/tests/index.py b/tests/index.py index 2dd31dd..dab9ef0 100644 --- a/tests/index.py +++ b/tests/index.py @@ -1,453 +1,200 @@ import os -from api import * +import logging from pathlib import Path -from functools import wraps + +from api.graph import Graph, get_repos, graph_exists +from api.info import get_repo_info from dotenv import load_dotenv +from fastapi import Depends, FastAPI, Header, HTTPException, Query +from fastapi.responses import JSONResponse +from pydantic import BaseModel from api.project import Project from api.auto_complete import prefix_search from api.git_utils import git_utils -from flask import Flask, request, jsonify # Load environment variables from .env file load_dotenv() # Configure the logger -import logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = logging.getLogger(__name__) -# Function to verify the token +# --------------------------------------------------------------------------- +# Authentication helpers (mirrors production but test-specific public_access) +# --------------------------------------------------------------------------- + SECRET_TOKEN = os.getenv('SECRET_TOKEN') -def verify_token(token): + +def _verify_token(token: str | None) -> bool: if token is not None and token.startswith("Bearer "): token = token[len("Bearer "):] return token == SECRET_TOKEN or (token is None and SECRET_TOKEN is None) -# Decorator to protect routes with token authentication -def token_required(f): - @wraps(f) - def decorated_function(*args, **kwargs): - token = request.headers.get('Authorization') # Get token from header - if not verify_token(token): - return jsonify(message="Unauthorized"), 401 - return f(*args, **kwargs) - return decorated_function +def token_required(authorization: str | None = Header(None)): + if not _verify_token(authorization): + raise HTTPException(status_code=401, detail="Unauthorized") -def create_app(): - app = Flask(__name__) - - # Decorator to protect routes with public access - def public_access(f): - @wraps(f) - def decorated_function(*args, **kwargs): - public = os.environ.get("CODE_GRAPH_PUBLIC", "0") # Get public access setting - if public != "1": - return jsonify(message="Unauthorized"), 401 - return f(*args, **kwargs) - return decorated_function - @app.route('/api/graph_entities', methods=['GET']) - @token_required # Apply token authentication decorator - def graph_entities(): - """ - Endpoint to fetch sub-graph entities from a given repository. - The repository is specified via the 'repo' query parameter. - - Returns: - - 200: Successfully returns the sub-graph. - - 400: Missing or invalid 'repo' parameter. - - 500: Internal server error or database connection issue. - """ - - # Access the 'repo' parameter from the GET request - repo = request.args.get('repo') - - if not repo: - logging.error("Missing 'repo' parameter in request.") - return jsonify({"status": "Missing 'repo' parameter"}), 400 +def public_access(authorization: str | None = Header(None)): + """Test-specific: requires CODE_GRAPH_PUBLIC=1 (no token fallback).""" + public = os.environ.get("CODE_GRAPH_PUBLIC", "0") + if public != "1": + raise HTTPException(status_code=401, detail="Unauthorized") - if not graph_exists(repo): - logging.error(f"Missing project {repo}") - return jsonify({"status": f"Missing project {repo}"}), 400 +# --------------------------------------------------------------------------- +# Pydantic request models +# --------------------------------------------------------------------------- - try: - # Initialize the graph with the provided repo and credentials - g = Graph(repo) +class RepoRequest(BaseModel): + repo: str - # Retrieve a sub-graph of up to 500 entities - sub_graph = g.get_sub_graph(500) +class NeighborsRequest(BaseModel): + repo: str + node_ids: list[int] - logging.info(f"Successfully retrieved sub-graph for repo: {repo}") - response = { - 'status': 'success', - 'entities': sub_graph - } +class AutoCompleteRequest(BaseModel): + repo: str + prefix: str - return jsonify(response), 200 +class FindPathsRequest(BaseModel): + repo: str + src: int + dest: int - except Exception as e: - logging.error(f"Error retrieving sub-graph for repo '{repo}': {e}") - return jsonify({"status": "Internal server error"}), 500 +class ChatRequest(BaseModel): + repo: str + msg: str +class AnalyzeFolderRequest(BaseModel): + path: str + ignore: list[str] = [] - @app.route('/api/get_neighbors', methods=['POST']) - @token_required # Apply token authentication decorator - def get_neighbors(): - """ - Endpoint to get neighbors of a nodes list in the graph. - Expects 'repo' and 'node_ids' as body parameters. +class AnalyzeRepoRequest(BaseModel): + repo_url: str + ignore: list[str] = [] - Returns: - JSON response containing neighbors or error messages. - """ +class SwitchCommitRequest(BaseModel): + repo: str + commit: str - # Get JSON data from the request - data = request.get_json() +# --------------------------------------------------------------------------- +# App factory +# --------------------------------------------------------------------------- - # Get query parameters - repo = data.get('repo') - node_ids = data.get('node_ids') +def create_app(): + app = FastAPI() - # Validate 'repo' parameter + @app.get('/api/graph_entities') + def graph_entities(repo: str = Query(None), _=Depends(token_required)): if not repo: - logging.error("Repository name is missing in the request.") - return jsonify({"status": "Repository name is required."}), 400 - - # Validate 'node_ids' parameter - if not node_ids: - logging.error("Node IDs is missing in the request.") - return jsonify({"status": "Node IDs is required."}), 400 - - # Validate repo exists - if not graph_exists(repo): - logging.error(f"Missing project {repo}") - return jsonify({"status": f"Missing project {repo}"}), 400 - - # Initialize the graph with the provided repository - g = Graph(repo) - - # Fetch the neighbors of the specified node - neighbors = g.get_neighbors(node_ids) - - # Log and return the neighbors - logging.info(f"Successfully retrieved neighbors for node IDs {node_ids} in repo '{repo}'.") - - response = { - 'status': 'success', - 'neighbors': neighbors - } - - return jsonify(response), 200 - - @app.route('/api/auto_complete', methods=['POST']) - @token_required # Apply token authentication decorator - def auto_complete(): - """ - Endpoint to process auto-completion requests for a repository based on a prefix. - - Returns: - JSON response with auto-completion suggestions or an error message. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate that 'repo' is provided - repo = data.get('repo') - if repo is None: - return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 - - # Validate that 'prefix' is provided - prefix = data.get('prefix') - if prefix is None: - return jsonify({'status': f'Missing mandatory parameter "prefix"'}), 400 + logging.error("Missing 'repo' parameter in request.") + return JSONResponse({"status": "Missing 'repo' parameter"}, status_code=400) - # Validate repo exists if not graph_exists(repo): - return jsonify({'status': f'Missing project {repo}'}), 400 - - # Fetch auto-completion results - completions = prefix_search(repo, prefix) - - # Create a success response - response = { - 'status': 'success', - 'completions': completions - } - - return jsonify(response), 200 + logging.error("Missing project %s", repo) + return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) - @app.route('/api/list_repos', methods=['GET']) - @token_required # Apply token authentication decorator - def list_repos(): - """ - Endpoint to list all available repositories. - - Returns: - JSON response with a list of repositories or an error message. - """ - - # Fetch list of repositories + try: + g = Graph(repo) + sub_graph = g.get_sub_graph(500) + logging.info("Successfully retrieved sub-graph for repo: %s", repo) + return {"status": "success", "entities": sub_graph} + except Exception as e: + logging.error("Error retrieving sub-graph for repo '%s': %s", repo, e) + return JSONResponse({"status": "Internal server error"}, status_code=500) + + @app.post('/api/get_neighbors') + def get_neighbors(data: NeighborsRequest, _=Depends(token_required)): + if not graph_exists(data.repo): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + + g = Graph(data.repo) + neighbors = g.get_neighbors(data.node_ids) + logging.info("Successfully retrieved neighbors for node IDs %s in repo '%s'.", + data.node_ids, data.repo) + return {"status": "success", "neighbors": neighbors} + + @app.post('/api/auto_complete') + def auto_complete(data: AutoCompleteRequest, _=Depends(token_required)): + if not graph_exists(data.repo): + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + + completions = prefix_search(data.repo, data.prefix) + return {"status": "success", "completions": completions} + + @app.get('/api/list_repos') + def list_repos(_=Depends(token_required)): repos = get_repos() + return {"status": "success", "repositories": repos} - # Create a success response with the list of repositories - response = { - 'status': 'success', - 'repositories': repos - } - - return jsonify(response), 200 - - @app.route('/api/repo_info', methods=['POST']) - @token_required # Apply token authentication decorator - def repo_info(): - """ - Endpoint to retrieve information about a specific repository. - - Expected JSON payload: - { - "repo": - } - - Returns: - JSON: A response containing the status and graph statistics (node and edge counts). - - 'status': 'success' if successful, or an error message. - - 'info': A dictionary with the node and edge counts if the request is successful. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate the 'repo' parameter - repo = data.get('repo') - if repo is None: - return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 - - # Initialize the graph with the provided repository name - g = Graph(repo) - - # Retrieve statistics from the graph + @app.post('/api/repo_info') + def repo_info(data: RepoRequest, _=Depends(token_required)): + g = Graph(data.repo) stats = g.stats() - info = get_repo_info(repo) - + info = get_repo_info(data.repo) if stats is None or info is None: - return jsonify({'status': f'Missing repository "{repo}"'}), 400 - - stats |= info - - # Create a response - response = { - 'status': 'success', - 'info': stats - } - - return jsonify(response), 200 - - @app.route('/api/find_paths', methods=['POST']) - @token_required # Apply token authentication decorator - def find_paths(): - """ - Finds all paths between a source node (src) and a destination node (dest) in the graph. - The graph is associated with the repository (repo) provided in the request. - - Request Body (JSON): - - repo (str): Name of the repository. - - src (int): ID of the source node. - - dest (int): ID of the destination node. - - Returns: - A JSON response with: - - status (str): Status of the request ("success" or "error"). - - paths (list): List of paths between the source and destination nodes. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate 'repo' parameter - repo = data.get('repo') - if repo is None: - return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 - - # Validate 'src' parameter - src = data.get('src') - if src is None: - return jsonify({'status': f'Missing mandatory parameter "src"'}), 400 - if not isinstance(src, int): - return jsonify({'status': "src node id must be int"}), 400 - - # Validate 'dest' parameter - dest = data.get('dest') - if dest is None: - return jsonify({'status': f'Missing mandatory parameter "dest"'}), 400 - if not isinstance(dest, int): - return jsonify({'status': "dest node id must be int"}), 400 - - if not graph_exists(repo): - logging.error(f"Missing project {repo}") - return jsonify({"status": f"Missing project {repo}"}), 400 - - # Initialize graph with provided repo and credentials - g = Graph(repo) - - # Find paths between the source and destination nodes - paths = g.find_paths(src, dest) - - # Create and return a successful response - response = { 'status': 'success', 'paths': paths } - - return jsonify(response), 200 - - @app.route('/api/chat', methods=['POST']) - @token_required # Apply token authentication decorator - def chat(): - # Get JSON data from the request - data = request.get_json() - - # Validate 'repo' parameter - repo = data.get('repo') - if repo is None: - return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 - - # Get optional 'label' and 'relation' parameters - msg = data.get('msg') - if msg is None: - return jsonify({'status': f'Missing mandatory parameter "msg"'}), 400 - - answer = ask(repo, msg) - - # Create and return a successful response - response = { 'status': 'success', 'response': answer } - - return jsonify(response), 200 - - @app.route('/api/analyze_folder', methods=['POST']) - @token_required # Apply token authentication decorator - def analyze_folder(): - """ - Endpoint to analyze local source code - Expects 'path' and optionally an ignore list. - - Returns: - JSON response with status and error message if applicable - Status codes: - 200: Success - 400: Invalid input - 500: Internal server error - """ - - # Get JSON data from the request - data = request.get_json() - - # Get query parameters - path = data.get('path') - ignore = data.get('ignore', []) - - # Validate input parameters - if not path: - logging.error("'path' is missing from the request.") - return jsonify({"status": "'path' is required."}), 400 - - # Validate path exists and is a directory - if not os.path.isdir(path): - logging.error(f"Path '{path}' does not exist or is not a directory") - return jsonify({"status": "Invalid path: must be an existing directory"}), 400 - - # Validate ignore list contains valid paths - if not isinstance(ignore, list): - logging.error("'ignore' must be a list of paths") - return jsonify({"status": "'ignore' must be a list of paths"}), 400 - - proj_name = Path(path).name - - # Initialize the graph with the provided project name + return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) + stats |= info + return {"status": "success", "info": stats} + + @app.post('/api/find_paths') + def find_paths(data: FindPathsRequest, _=Depends(token_required)): + if not graph_exists(data.repo): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + + g = Graph(data.repo) + paths = g.find_paths(data.src, data.dest) + return {"status": "success", "paths": paths} + + @app.post('/api/chat') + def chat(data: ChatRequest, _=Depends(token_required)): + from api.llm import ask + try: + answer = ask(data.repo, data.msg) + except Exception as e: + return JSONResponse({"status": "error", "response": str(e)}, status_code=500) + return {"status": "success", "response": answer} + + @app.post('/api/analyze_folder') + def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): + if not os.path.isdir(data.path): + logging.error("Path '%s' does not exist or is not a directory", data.path) + return JSONResponse({"status": "Invalid path: must be an existing directory"}, + status_code=400) + + from api.analyzers.source_analyzer import SourceAnalyzer + proj_name = Path(data.path).name g = Graph(proj_name) - - # Analyze source code within given folder analyzer = SourceAnalyzer() - analyzer.analyze_local_folder(path, g, ignore) - - # Return response - response = { - 'status': 'success', - 'project': proj_name - } - return jsonify(response), 200 - - @app.route('/api/analyze_repo', methods=['POST']) - @public_access # Apply public access decorator - @token_required # Apply token authentication decorator - def analyze_repo(): - """ - Analyze a GitHub repository. - - Expected JSON payload: - { - "repo_url": "string", - "ignore": ["string"] # optional - } - - Returns: - JSON response with processing status - """ - - data = request.get_json() - url = data.get('repo_url') - if url is None: - return jsonify({'status': f'Missing mandatory parameter "url"'}), 400 - logger.debug(f'Received repo_url: {url}') - - ignore = data.get('ignore', []) - - proj = Project.from_git_repository(url) - proj.analyze_sources(ignore) - proj.process_git_history(ignore) - - # Create a response - response = { - 'status': 'success', - } - - return jsonify(response), 200 - - @app.route('/api/switch_commit', methods=['POST']) - @public_access # Apply public access decorator - @token_required # Apply token authentication decorator - def switch_commit(): - """ - Endpoint to switch a repository to a specific commit. - - Returns: - JSON response with the change set or an error message. - """ - - # Get JSON data from the request - data = request.get_json() - - # Validate that 'repo' is provided - repo = data.get('repo') - if repo is None: - return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 - - # Validate that 'commit' is provided - commit = data.get('commit') - if commit is None: - return jsonify({'status': f'Missing mandatory parameter "commit"'}), 400 - - # Attempt to switch the repository to the specified commit - change_set = git_utils.switch_commit(repo, commit) - - # Create a success response - response = { - 'status': 'success', - 'change_set': change_set - } - - return jsonify(response), 200 - + analyzer.analyze_local_folder(data.path, g, data.ignore) + return {"status": "success", "project": proj_name} + + @app.post('/api/analyze_repo') + def analyze_repo(data: AnalyzeRepoRequest, _=Depends(public_access)): + logger.debug('Received repo_url: %s', data.repo_url) + proj = Project.from_git_repository(data.repo_url) + proj.analyze_sources(data.ignore) + proj.process_git_history(data.ignore) + return {"status": "success"} + + @app.post('/api/switch_commit') + def switch_commit(data: SwitchCommitRequest, _=Depends(public_access)): + git_utils.switch_commit(data.repo, data.commit) + return {"status": "success"} + + @app.post('/api/list_commits') + def list_commits(data: RepoRequest, _=Depends(token_required)): + from api.git_utils.git_graph import GitGraph + git_graph = GitGraph(git_utils.GitRepoName(data.repo)) + commits = git_graph.list_commits() + return {"status": "success", "commits": commits} + return app if __name__ == '__main__': - app = create_app() - app.run(debug=False) \ No newline at end of file + import uvicorn + application = create_app() + uvicorn.run(application, host="127.0.0.1", port=5000) \ No newline at end of file diff --git a/uv.lock b/uv.lock index a97f6e2..44c773a 100644 --- a/uv.lock +++ b/uv.lock @@ -137,15 +137,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, ] -[[package]] -name = "blinker" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, -] - [[package]] name = "cattrs" version = "26.1.0" @@ -262,7 +253,7 @@ version = "0.4.2" source = { virtual = "." } dependencies = [ { name = "falkordb" }, - { name = "flask" }, + { name = "fastapi" }, { name = "graphrag-sdk" }, { name = "javatools" }, { name = "multilspy" }, @@ -273,11 +264,13 @@ dependencies = [ { name = "tree-sitter-c-sharp" }, { name = "tree-sitter-java" }, { name = "tree-sitter-python" }, + { name = "uvicorn", extra = ["standard"] }, { name = "validators" }, ] [package.optional-dependencies] test = [ + { name = "httpx" }, { name = "pytest" }, { name = "ruff" }, ] @@ -285,8 +278,9 @@ test = [ [package.metadata] requires-dist = [ { name = "falkordb", specifier = ">=1.1.3,<2.0.0" }, - { name = "flask", specifier = ">=3.1.0,<4.0.0" }, + { name = "fastapi", specifier = ">=0.115.0,<1.0.0" }, { name = "graphrag-sdk", specifier = ">=0.8.1,<0.9.0" }, + { name = "httpx", marker = "extra == 'test'", specifier = ">=0.28.0,<1.0.0" }, { name = "javatools", specifier = ">=1.6.0,<2.0.0" }, { name = "multilspy", git = "https://github.com/AviAvni/multilspy.git?rev=python-init-params" }, { name = "pygit2", specifier = ">=1.17.0,<2.0.0" }, @@ -298,6 +292,7 @@ requires-dist = [ { name = "tree-sitter-c-sharp", specifier = ">=0.23.1,<0.24.0" }, { name = "tree-sitter-java", specifier = ">=0.23.5,<0.24.0" }, { name = "tree-sitter-python", specifier = ">=0.25.0,<0.26.0" }, + { name = "uvicorn", extras = ["standard"], specifier = ">=0.34.0,<1.0.0" }, { name = "validators", specifier = ">=0.35.0,<0.36.0" }, ] provides-extras = ["test"] @@ -362,6 +357,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/8b/59ec60885abd3b6b2b3a1e5917627c3cae656b4cff7f847c5217ec3dc952/falkordb-1.6.0-py3-none-any.whl", hash = "sha256:0f190e9d6104595fd51ece4f1e7b5d49d62cfee346d94151d7986a138fd90d89", size = 37378, upload-time = "2026-02-21T06:36:17.769Z" }, ] +[[package]] +name = "fastapi" +version = "0.135.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/7b/f8e0211e9380f7195ba3f3d40c292594fd81ba8ec4629e3854c353aaca45/fastapi-0.135.1.tar.gz", hash = "sha256:d04115b508d936d254cea545b7312ecaa58a7b3a0f84952535b4c9afae7668cd", size = 394962, upload-time = "2026-03-01T18:18:29.369Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl", hash = "sha256:46e2fc5745924b7c840f71ddd277382af29ce1cdb7d5eab5bf697e3fb9999c9e", size = 116999, upload-time = "2026-03-01T18:18:30.831Z" }, +] + [[package]] name = "fastuuid" version = "0.14.0" @@ -410,23 +421,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/60/dd88b9688821079e92a0ed015779f11a65576218d525948be3148b81b86e/fix_busted_json-0.0.18-py3-none-any.whl", hash = "sha256:fdce0e02c9a810b3aa28e1c3c32c24b21b44e89f6315ec25d2b963bd52a6ef03", size = 7358, upload-time = "2024-04-22T08:26:35.946Z" }, ] -[[package]] -name = "flask" -version = "3.1.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "blinker" }, - { name = "click" }, - { name = "itsdangerous" }, - { name = "jinja2" }, - { name = "markupsafe" }, - { name = "werkzeug" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/26/00/35d85dcce6c57fdc871f3867d465d780f302a175ea360f62533f12b27e2b/flask-3.1.3.tar.gz", hash = "sha256:0ef0e52b8a9cd932855379197dd8f94047b359ca0a78695144304cb45f87c9eb", size = 759004, upload-time = "2026-02-19T05:00:57.678Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/9c/34f6962f9b9e9c71f6e5ed806e0d0ff03c9d1b0b2340088a0cf4bce09b18/flask-3.1.3-py3-none-any.whl", hash = "sha256:f4bcbefc124291925f1a26446da31a5178f9483862233b23c0c96a20701f670c", size = 103424, upload-time = "2026-02-19T05:00:56.027Z" }, -] - [[package]] name = "frozenlist" version = "1.8.0" @@ -562,6 +556,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, + { url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" }, + { url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" }, + { url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" }, + { url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" }, +] + [[package]] name = "httpx" version = "0.28.1" @@ -627,15 +643,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] -[[package]] -name = "itsdangerous" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, -] - [[package]] name = "javatools" version = "1.6.0" @@ -1469,6 +1476,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, ] +[[package]] +name = "starlette" +version = "0.52.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, +] + [[package]] name = "tiktoken" version = "0.12.0" @@ -1668,6 +1688,50 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] +[[package]] +name = "uvicorn" +version = "0.41.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/ce/eeb58ae4ac36fe09e3842eb02e0eb676bf2c53ae062b98f1b2531673efdd/uvicorn-0.41.0.tar.gz", hash = "sha256:09d11cf7008da33113824ee5a1c6422d89fbc2ff476540d69a34c87fab8b571a", size = 82633, upload-time = "2026-02-16T23:07:24.1Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/e4/d04a086285c20886c0daad0e026f250869201013d18f81d9ff5eada73a88/uvicorn-0.41.0-py3-none-any.whl", hash = "sha256:29e35b1d2c36a04b9e180d4007ede3bcb32a85fbdfd6c6aeb3f26839de088187", size = 68783, upload-time = "2026-02-16T23:07:22.357Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, +] + [[package]] name = "validators" version = "0.35.0" @@ -1678,15 +1742,77 @@ wheels = [ ] [[package]] -name = "werkzeug" -version = "3.1.6" +name = "watchfiles" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "markupsafe" }, + { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/f1/ee81806690a87dab5f5653c1f146c92bc066d7f4cebc603ef88eb9e13957/werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25", size = 864736, upload-time = "2026-02-19T15:17:18.884Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/ec/d58832f89ede95652fd01f4f24236af7d32b70cab2196dfcc2d2fd13c5c2/werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131", size = 225166, upload-time = "2026-02-19T15:17:17.475Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] [[package]] From 0704421cccc6a251e303492cd68dd9769699510e Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Thu, 12 Mar 2026 14:36:05 +0200 Subject: [PATCH 03/16] Fix path traversal in SPA serving and stop leaking exception details - serve_spa: resolve path and verify it stays within STATIC_DIR - chat endpoint: log exception server-side, return generic error to client - Apply same fixes to test app Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- api/index.py | 9 +++++++-- tests/index.py | 4 +++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/api/index.py b/api/index.py index dd29cbf..3f15aed 100644 --- a/api/index.py +++ b/api/index.py @@ -191,7 +191,9 @@ def chat(data: ChatRequest, _=Depends(public_or_auth)): try: answer = ask(data.repo, data.msg) except Exception as e: - return JSONResponse({"status": "error", "response": str(e)}, status_code=500) + logging.error("Chat error for repo '%s': %s", data.repo, e) + return JSONResponse({"status": "error", "response": "Internal server error"}, + status_code=500) return {"status": "success", "response": answer} @@ -252,7 +254,10 @@ def list_commits(data: RepoRequest, _=Depends(public_or_auth)): @app.get("/{full_path:path}") def serve_spa(full_path: str): """Serve React SPA — static assets or index.html catch-all.""" - file = STATIC_DIR / full_path + file = (STATIC_DIR / full_path).resolve() + # Prevent path traversal outside the static directory + if not str(file).startswith(str(STATIC_DIR)): + return JSONResponse({"error": "Not found"}, status_code=404) if full_path and file.is_file(): return FileResponse(file) if INDEX_HTML.is_file(): diff --git a/tests/index.py b/tests/index.py index dab9ef0..eea4680 100644 --- a/tests/index.py +++ b/tests/index.py @@ -155,7 +155,9 @@ def chat(data: ChatRequest, _=Depends(token_required)): try: answer = ask(data.repo, data.msg) except Exception as e: - return JSONResponse({"status": "error", "response": str(e)}, status_code=500) + logging.error("Chat error for repo '%s': %s", data.repo, e) + return JSONResponse({"status": "error", "response": "Internal server error"}, + status_code=500) return {"status": "success", "response": answer} @app.post('/api/analyze_folder') From 70bc326178249704e1732c4b3e3e853d73b95fdb Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Thu, 12 Mar 2026 14:37:37 +0200 Subject: [PATCH 04/16] Fix test app: use token_required for mutating endpoints Match production auth by using token_required (not public_access) for analyze_repo and switch_commit in the test app. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/index.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/index.py b/tests/index.py index eea4680..2753ee6 100644 --- a/tests/index.py +++ b/tests/index.py @@ -175,7 +175,7 @@ def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): return {"status": "success", "project": proj_name} @app.post('/api/analyze_repo') - def analyze_repo(data: AnalyzeRepoRequest, _=Depends(public_access)): + def analyze_repo(data: AnalyzeRepoRequest, _=Depends(token_required)): logger.debug('Received repo_url: %s', data.repo_url) proj = Project.from_git_repository(data.repo_url) proj.analyze_sources(data.ignore) @@ -183,7 +183,7 @@ def analyze_repo(data: AnalyzeRepoRequest, _=Depends(public_access)): return {"status": "success"} @app.post('/api/switch_commit') - def switch_commit(data: SwitchCommitRequest, _=Depends(public_access)): + def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): git_utils.switch_commit(data.repo, data.commit) return {"status": "success"} From f5d12b6216fce7573793ab09762b4ae0a03de52e Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 12:16:47 +0200 Subject: [PATCH 05/16] Fix path traversal vulnerabilities, harden auth tests, and improve logging - serve_spa: replace str().startswith() with Path.is_relative_to() to prevent prefix-collision path traversal (e.g. dist_evil matching dist) - analyze_folder: add ALLOWED_ANALYSIS_DIR containment check to prevent traversal outside allowed base directory - Upgrade logging.error to logging.exception in graph_entities and chat handlers for full traceback capture - Remove unused public_access function from tests/index.py - Mirror analyze_folder path validation in test app - Add auth tests for repo_info (public_or_auth) and list_repos endpoints - Tighten test_find_paths 422 assertions to verify field-specific validation - Parse response.json() once per response in test_list_commits Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- api/index.py | 29 ++++++++++++----- tests/endpoints/test_find_paths.py | 4 +++ tests/endpoints/test_list_commits.py | 11 ++++--- tests/endpoints/test_list_repos.py | 22 +++++++++++++ tests/endpoints/test_repo_info.py | 47 +++++++++++++++++++--------- tests/index.py | 28 +++++++++++------ 6 files changed, 103 insertions(+), 38 deletions(-) diff --git a/api/index.py b/api/index.py index 3f15aed..fe25fc1 100644 --- a/api/index.py +++ b/api/index.py @@ -89,7 +89,13 @@ class SwitchCommitRequest(BaseModel): # Application # --------------------------------------------------------------------------- -STATIC_DIR = Path(__file__).resolve().parent.parent / "app" / "dist" +STATIC_DIR = (Path(__file__).resolve().parent.parent / "app" / "dist").resolve() + +# Allowed base directory for local folder analysis (defaults to project root) +ALLOWED_ANALYSIS_DIR = Path( + os.getenv("ALLOWED_ANALYSIS_DIR", + str(Path(__file__).resolve().parent.parent)) +).resolve() app = FastAPI() @@ -117,7 +123,7 @@ def graph_entities(repo: str = Query(None), _=Depends(public_or_auth)): return {"status": "success", "entities": sub_graph} except Exception as e: - logging.error("Error retrieving sub-graph for repo '%s': %s", repo, e) + logging.exception("Error retrieving sub-graph for repo '%s': %s", repo, e) return JSONResponse({"status": "Internal server error"}, status_code=500) @@ -191,7 +197,7 @@ def chat(data: ChatRequest, _=Depends(public_or_auth)): try: answer = ask(data.repo, data.msg) except Exception as e: - logging.error("Chat error for repo '%s': %s", data.repo, e) + logging.exception("Chat error for repo '%s': %s", data.repo, e) return JSONResponse({"status": "error", "response": "Internal server error"}, status_code=500) @@ -202,16 +208,24 @@ def chat(data: ChatRequest, _=Depends(public_or_auth)): def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): """Analyze local source code. Always requires a valid token.""" - if not os.path.isdir(data.path): + resolved_path = Path(data.path).resolve() + + if not resolved_path.is_relative_to(ALLOWED_ANALYSIS_DIR): + logging.error("Path '%s' is outside the allowed directory", data.path) + return JSONResponse( + {"status": "Invalid path: must be within the allowed analysis directory"}, + status_code=400) + + if not resolved_path.is_dir(): logging.error("Path '%s' does not exist or is not a directory", data.path) return JSONResponse({"status": "Invalid path: must be an existing directory"}, status_code=400) - proj_name = Path(data.path).name + proj_name = resolved_path.name g = Graph(proj_name) analyzer = SourceAnalyzer() - analyzer.analyze_local_folder(data.path, g, data.ignore) + analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) return {"status": "success", "project": proj_name} @@ -255,8 +269,7 @@ def list_commits(data: RepoRequest, _=Depends(public_or_auth)): def serve_spa(full_path: str): """Serve React SPA — static assets or index.html catch-all.""" file = (STATIC_DIR / full_path).resolve() - # Prevent path traversal outside the static directory - if not str(file).startswith(str(STATIC_DIR)): + if not file.is_relative_to(STATIC_DIR): return JSONResponse({"error": "Not found"}, status_code=404) if full_path and file.is_file(): return FileResponse(file) diff --git a/tests/endpoints/test_find_paths.py b/tests/endpoints/test_find_paths.py index b119007..46a037c 100644 --- a/tests/endpoints/test_find_paths.py +++ b/tests/endpoints/test_find_paths.py @@ -36,10 +36,14 @@ def test_find_paths(client): # Re-issue with invalid src node id — Pydantic rejects non-int response = client.post("/api/find_paths", json={"repo": "GraphRAG-SDK", "src": "invalid", "dest": 0}) assert response.status_code == 422 + errors = response.json()["detail"] + assert any(e for e in errors if e["loc"][-1] == "src") # Re-issue with invalid dest node id — Pydantic rejects non-int response = client.post("/api/find_paths", json={"repo": "GraphRAG-SDK", "src": 0, "dest": "invalid"}) assert response.status_code == 422 + errors = response.json()["detail"] + assert any(e for e in errors if e["loc"][-1] == "dest") # Find src and dest nodes that are at least 3 hops apart db = FalkorDB(host=os.getenv('FALKORDB_HOST', 'localhost'), diff --git a/tests/endpoints/test_list_commits.py b/tests/endpoints/test_list_commits.py index c91dbda..dd31672 100644 --- a/tests/endpoints/test_list_commits.py +++ b/tests/endpoints/test_list_commits.py @@ -24,8 +24,9 @@ def client(app): def test_list_commits(client): # Start with an empty DB response = client.post("/api/list_commits", json={ "repo": "git_repo" }) - status = response.json()["status"] - commits = response.json()["commits"] + data = response.json() + status = data["status"] + commits = data["commits"] # Expecting an empty response assert status == "success" @@ -40,10 +41,10 @@ def test_list_commits(client): proj.analyze_sources() proj.process_git_history() - # Reissue list_commits request response = client.post("/api/list_commits", json={ "repo": "git_repo" }) - status = response.json()["status"] - commits = response.json()["commits"] + data = response.json() + status = data["status"] + commits = data["commits"] expected = [ {'author': 'Roi Lipman', 'date': 1729068452, 'hash': 'fac1698da4ee14c215316859e68841ae0b0275b0', 'message': 'Initial commit\n'}, diff --git a/tests/endpoints/test_list_repos.py b/tests/endpoints/test_list_repos.py index cabc471..9d05fe2 100644 --- a/tests/endpoints/test_list_repos.py +++ b/tests/endpoints/test_list_repos.py @@ -1,7 +1,9 @@ import redis import pytest +import api.index from pathlib import Path from tests.index import create_app +from api.index import app as production_app from api import Project from starlette.testclient import TestClient @@ -47,3 +49,23 @@ def test_list_repos(client): # Expecting an empty response assert status == "success" assert repositories == ['git_repo'] + + +def test_list_repos_with_auth(monkeypatch): + """Authenticated request succeeds when SECRET_TOKEN is set.""" + monkeypatch.setattr(api.index, "SECRET_TOKEN", "test-secret") + monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) + client = TestClient(production_app, raise_server_exceptions=False) + response = client.get("/api/list_repos", + headers={"Authorization": "Bearer test-secret"}) + # Auth passed (not 401); endpoint may error without a database backend + assert response.status_code != 401 + + +def test_list_repos_unauthorized(monkeypatch): + """Request without auth gets 401 when SECRET_TOKEN is set.""" + monkeypatch.setattr(api.index, "SECRET_TOKEN", "test-secret") + monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) + client = TestClient(production_app) + response = client.get("/api/list_repos") + assert response.status_code == 401 diff --git a/tests/endpoints/test_repo_info.py b/tests/endpoints/test_repo_info.py index 7d71d1d..9b68066 100644 --- a/tests/endpoints/test_repo_info.py +++ b/tests/endpoints/test_repo_info.py @@ -1,23 +1,13 @@ import redis import pytest -from tests.index import create_app +import api.index +from api.index import app from api import Project from starlette.testclient import TestClient @pytest.fixture() -def app(): - app = create_app() - - # other setup can go here - +def client(): redis.Redis().flushall() - - yield app - - # clean up / reset resources here - -@pytest.fixture() -def client(app): return TestClient(app) def test_repo_info(client): @@ -35,8 +25,9 @@ def test_repo_info(client): # Reissue list_commits request response = client.post("/api/repo_info", json={ "repo": "GraphRAG-SDK" }) - status = response.json()["status"] - info = response.json()["info"] + data = response.json() + status = data["status"] + info = data["info"] # Expecting an empty response assert status == "success" @@ -44,3 +35,29 @@ def test_repo_info(client): assert 'node_count' in info assert info['repo_url'] == 'https://github.com/FalkorDB/GraphRAG-SDK' + +def test_repo_info_public_access(monkeypatch): + """Public access is granted when CODE_GRAPH_PUBLIC=1.""" + monkeypatch.setenv("CODE_GRAPH_PUBLIC", "1") + client = TestClient(app, raise_server_exceptions=False) + response = client.post("/api/repo_info", json={"repo": "nonexistent"}) + # Auth passed (not 401); endpoint may error without a database backend + assert response.status_code != 401 + + +def test_repo_info_token_required(monkeypatch): + """When SECRET_TOKEN is set and CODE_GRAPH_PUBLIC != 1, auth is enforced.""" + monkeypatch.setattr(api.index, "SECRET_TOKEN", "test-secret") + monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) + + # Without auth header → 401 + client = TestClient(app) + response = client.post("/api/repo_info", json={"repo": "nonexistent"}) + assert response.status_code == 401 + + # With valid auth header → not 401 + client = TestClient(app, raise_server_exceptions=False) + response = client.post("/api/repo_info", json={"repo": "nonexistent"}, + headers={"Authorization": "Bearer test-secret"}) + assert response.status_code != 401 + diff --git a/tests/index.py b/tests/index.py index 2753ee6..a1c4752 100644 --- a/tests/index.py +++ b/tests/index.py @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) # --------------------------------------------------------------------------- -# Authentication helpers (mirrors production but test-specific public_access) +# Authentication helpers # --------------------------------------------------------------------------- SECRET_TOKEN = os.getenv('SECRET_TOKEN') @@ -36,11 +36,11 @@ def token_required(authorization: str | None = Header(None)): if not _verify_token(authorization): raise HTTPException(status_code=401, detail="Unauthorized") -def public_access(authorization: str | None = Header(None)): - """Test-specific: requires CODE_GRAPH_PUBLIC=1 (no token fallback).""" - public = os.environ.get("CODE_GRAPH_PUBLIC", "0") - if public != "1": - raise HTTPException(status_code=401, detail="Unauthorized") +# Allowed base directory for local folder analysis (defaults to project root) +ALLOWED_ANALYSIS_DIR = Path( + os.getenv("ALLOWED_ANALYSIS_DIR", + str(Path(__file__).resolve().parent.parent)) +).resolve() # --------------------------------------------------------------------------- # Pydantic request models @@ -155,23 +155,31 @@ def chat(data: ChatRequest, _=Depends(token_required)): try: answer = ask(data.repo, data.msg) except Exception as e: - logging.error("Chat error for repo '%s': %s", data.repo, e) + logging.exception("Chat error for repo '%s': %s", data.repo, e) return JSONResponse({"status": "error", "response": "Internal server error"}, status_code=500) return {"status": "success", "response": answer} @app.post('/api/analyze_folder') def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): - if not os.path.isdir(data.path): + resolved_path = Path(data.path).resolve() + + if not resolved_path.is_relative_to(ALLOWED_ANALYSIS_DIR): + logging.error("Path '%s' is outside the allowed directory", data.path) + return JSONResponse( + {"status": "Invalid path: must be within the allowed analysis directory"}, + status_code=400) + + if not resolved_path.is_dir(): logging.error("Path '%s' does not exist or is not a directory", data.path) return JSONResponse({"status": "Invalid path: must be an existing directory"}, status_code=400) from api.analyzers.source_analyzer import SourceAnalyzer - proj_name = Path(data.path).name + proj_name = resolved_path.name g = Graph(proj_name) analyzer = SourceAnalyzer() - analyzer.analyze_local_folder(data.path, g, data.ignore) + analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) return {"status": "success", "project": proj_name} @app.post('/api/analyze_repo') From 3b92198fbb581c0ebd29fd32cf2ffad610a528cb Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 12:21:50 +0200 Subject: [PATCH 06/16] Fix dual import style for api.index module Use 'import api.index' consistently instead of mixing with 'from api.index import'. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/endpoints/test_list_repos.py | 5 ++--- tests/endpoints/test_repo_info.py | 9 ++++----- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/tests/endpoints/test_list_repos.py b/tests/endpoints/test_list_repos.py index 9d05fe2..103311a 100644 --- a/tests/endpoints/test_list_repos.py +++ b/tests/endpoints/test_list_repos.py @@ -3,7 +3,6 @@ import api.index from pathlib import Path from tests.index import create_app -from api.index import app as production_app from api import Project from starlette.testclient import TestClient @@ -55,7 +54,7 @@ def test_list_repos_with_auth(monkeypatch): """Authenticated request succeeds when SECRET_TOKEN is set.""" monkeypatch.setattr(api.index, "SECRET_TOKEN", "test-secret") monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) - client = TestClient(production_app, raise_server_exceptions=False) + client = TestClient(api.index.app, raise_server_exceptions=False) response = client.get("/api/list_repos", headers={"Authorization": "Bearer test-secret"}) # Auth passed (not 401); endpoint may error without a database backend @@ -66,6 +65,6 @@ def test_list_repos_unauthorized(monkeypatch): """Request without auth gets 401 when SECRET_TOKEN is set.""" monkeypatch.setattr(api.index, "SECRET_TOKEN", "test-secret") monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) - client = TestClient(production_app) + client = TestClient(api.index.app) response = client.get("/api/list_repos") assert response.status_code == 401 diff --git a/tests/endpoints/test_repo_info.py b/tests/endpoints/test_repo_info.py index 9b68066..21478a2 100644 --- a/tests/endpoints/test_repo_info.py +++ b/tests/endpoints/test_repo_info.py @@ -1,14 +1,13 @@ import redis import pytest import api.index -from api.index import app from api import Project from starlette.testclient import TestClient @pytest.fixture() def client(): redis.Redis().flushall() - return TestClient(app) + return TestClient(api.index.app) def test_repo_info(client): # Start with an empty DB @@ -39,7 +38,7 @@ def test_repo_info(client): def test_repo_info_public_access(monkeypatch): """Public access is granted when CODE_GRAPH_PUBLIC=1.""" monkeypatch.setenv("CODE_GRAPH_PUBLIC", "1") - client = TestClient(app, raise_server_exceptions=False) + client = TestClient(api.index.app, raise_server_exceptions=False) response = client.post("/api/repo_info", json={"repo": "nonexistent"}) # Auth passed (not 401); endpoint may error without a database backend assert response.status_code != 401 @@ -51,12 +50,12 @@ def test_repo_info_token_required(monkeypatch): monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) # Without auth header → 401 - client = TestClient(app) + client = TestClient(api.index.app) response = client.post("/api/repo_info", json={"repo": "nonexistent"}) assert response.status_code == 401 # With valid auth header → not 401 - client = TestClient(app, raise_server_exceptions=False) + client = TestClient(api.index.app, raise_server_exceptions=False) response = client.post("/api/repo_info", json={"repo": "nonexistent"}, headers={"Authorization": "Bearer test-secret"}) assert response.status_code != 401 From 11b92ab89a1bf60ed0fb46e01e153ea6eb3e7040 Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 12:27:44 +0200 Subject: [PATCH 07/16] Strengthen auth test assertions and fix trailing whitespace - test_list_repos_with_auth: monkeypatch get_repos for deterministic result, assert 200 status and exact payload instead of just != 401 - test_repo_info.py: remove trailing whitespace on status assignments - test_list_repos.py: remove trailing whitespace on status assignment Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/endpoints/test_list_repos.py | 11 +++++++---- tests/endpoints/test_repo_info.py | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/endpoints/test_list_repos.py b/tests/endpoints/test_list_repos.py index 103311a..a2f430c 100644 --- a/tests/endpoints/test_list_repos.py +++ b/tests/endpoints/test_list_repos.py @@ -24,7 +24,7 @@ def client(app): def test_list_repos(client): # Start with an empty DB response = client.get("/api/list_repos").json() - status = response["status"] + status = response["status"] repositories = response["repositories"] # Expecting an empty response @@ -54,11 +54,14 @@ def test_list_repos_with_auth(monkeypatch): """Authenticated request succeeds when SECRET_TOKEN is set.""" monkeypatch.setattr(api.index, "SECRET_TOKEN", "test-secret") monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) - client = TestClient(api.index.app, raise_server_exceptions=False) + monkeypatch.setattr(api.index, "get_repos", lambda: ["fake-repo"]) + client = TestClient(api.index.app) response = client.get("/api/list_repos", headers={"Authorization": "Bearer test-secret"}) - # Auth passed (not 401); endpoint may error without a database backend - assert response.status_code != 401 + assert response.status_code == 200 + data = response.json() + assert data["status"] == "success" + assert data["repositories"] == ["fake-repo"] def test_list_repos_unauthorized(monkeypatch): diff --git a/tests/endpoints/test_repo_info.py b/tests/endpoints/test_repo_info.py index 21478a2..afe2196 100644 --- a/tests/endpoints/test_repo_info.py +++ b/tests/endpoints/test_repo_info.py @@ -12,7 +12,7 @@ def client(): def test_repo_info(client): # Start with an empty DB response = client.post("/api/repo_info", json={ "repo": "GraphRAG-SDK" }) - status = response.json()["status"] + status = response.json()["status"] # Expecting an empty response assert status == "Missing repository \"GraphRAG-SDK\"" @@ -25,7 +25,7 @@ def test_repo_info(client): # Reissue list_commits request response = client.post("/api/repo_info", json={ "repo": "GraphRAG-SDK" }) data = response.json() - status = data["status"] + status = data["status"] info = data["info"] # Expecting an empty response From 814689e30ed39be719d29af21b1799cf4898df04 Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 13:58:19 +0200 Subject: [PATCH 08/16] Migrate FastAPI endpoints from sync to async for better concurrency Convert all 12 endpoints to async def using native async drivers (falkordb.asyncio, redis.asyncio) for read endpoints, and run_in_executor for CPU-bound/sync-only operations (source analysis, git clone, LLM chat). Add AsyncGraphQuery, AsyncGitGraph wrapper classes and async standalone functions while keeping all sync code intact for the analysis pipeline. Co-Authored-By: Claude Opus 4.6 --- api/auto_complete.py | 12 ++- api/git_utils/git_graph.py | 32 ++++++- api/graph.py | 136 +++++++++++++++++++++++++++++ api/index.py | 110 ++++++++++++++--------- api/info.py | 33 +++++++ api/llm.py | 10 ++- tests/endpoints/test_list_repos.py | 6 +- tests/index.py | 109 ++++++++++++++--------- 8 files changed, 360 insertions(+), 88 deletions(-) diff --git a/api/auto_complete.py b/api/auto_complete.py index 0ca41d9..deebc82 100644 --- a/api/auto_complete.py +++ b/api/auto_complete.py @@ -1,6 +1,16 @@ -from .graph import Graph +from .graph import Graph, AsyncGraphQuery + def prefix_search(repo: str, prefix: str) -> str: """ Returns a list of all entities in the repository that start with the given prefix. """ g = Graph(repo) return g.prefix_search(prefix) + + +async def async_prefix_search(repo: str, prefix: str) -> list: + """Async version of prefix_search using AsyncGraphQuery.""" + g = AsyncGraphQuery(repo) + try: + return await g.prefix_search(prefix) + finally: + await g.close() diff --git a/api/git_utils/git_graph.py b/api/git_utils/git_graph.py index 1352a1d..9769485 100644 --- a/api/git_utils/git_graph.py +++ b/api/git_utils/git_graph.py @@ -1,6 +1,7 @@ import os import logging from falkordb import FalkorDB, Node +from falkordb.asyncio import FalkorDB as AsyncFalkorDB from typing import List, Optional from pygit2 import Commit @@ -167,7 +168,7 @@ def get_child_transitions(self, child: str, parent: str) -> tuple[list[str], lis WITH path LIMIT 1 UNWIND relationships(path) AS e - WITH e + With e WHERE e.queries is not NULL RETURN collect(e.queries), collect(e.params) """ @@ -176,3 +177,32 @@ def get_child_transitions(self, child: str, parent: str) -> tuple[list[str], lis return (res[0][0], res[0][1]) + +class AsyncGitGraph: + """Async read-only git graph for endpoint use.""" + + def __init__(self, name: str): + self.db = AsyncFalkorDB( + host=os.getenv('FALKORDB_HOST', 'localhost'), + port=int(os.getenv('FALKORDB_PORT', 6379)), + username=os.getenv('FALKORDB_USERNAME', None), + password=os.getenv('FALKORDB_PASSWORD', None), + ) + self.g = self.db.select_graph(name) + + def _commit_from_node(self, node: Node) -> dict: + return { + 'hash': node.properties['hash'], + 'date': node.properties['date'], + 'author': node.properties['author'], + 'message': node.properties['message'], + } + + async def list_commits(self) -> List[dict]: + q = "MATCH (c:Commit) RETURN c ORDER BY c.date" + result_set = (await self.g.query(q)).result_set + return [self._commit_from_node(row[0]) for row in result_set] + + async def close(self) -> None: + await self.db.aclose() + diff --git a/api/graph.py b/api/graph.py index a9aa57f..b884f0a 100644 --- a/api/graph.py +++ b/api/graph.py @@ -3,6 +3,7 @@ from .entities import * from typing import Optional from falkordb import FalkorDB, Path, Node, QueryResult +from falkordb.asyncio import FalkorDB as AsyncFalkorDB # Configure the logger import logging @@ -627,3 +628,138 @@ def unreachable_entities(self, lbl: Optional[str], rel: Optional[str]) -> list[d return unreachables + +# --------------------------------------------------------------------------- +# Async helpers and read-only async graph wrapper +# --------------------------------------------------------------------------- + +def _async_db() -> AsyncFalkorDB: + """Create an async FalkorDB connection using environment config.""" + return AsyncFalkorDB( + host=os.getenv('FALKORDB_HOST', 'localhost'), + port=int(os.getenv('FALKORDB_PORT', 6379)), + username=os.getenv('FALKORDB_USERNAME', None), + password=os.getenv('FALKORDB_PASSWORD', None), + ) + + +async def async_graph_exists(name: str) -> bool: + db = _async_db() + try: + graphs = await db.list_graphs() + return name in graphs + finally: + await db.aclose() + + +async def async_get_repos() -> list[str]: + """List processed repositories (async version).""" + db = _async_db() + try: + graphs = await db.list_graphs() + return [g for g in graphs if not (g.endswith('_git') or g.endswith('_schema'))] + finally: + await db.aclose() + + +class AsyncGraphQuery: + """Read-only async wrapper for endpoint use. + + Uses falkordb.asyncio under the hood. No index creation or backlog — + indexes already exist from the sync Graph used during analysis. + """ + + def __init__(self, name: str) -> None: + self.db = _async_db() + self.g = self.db.select_graph(name) + + async def _query(self, q: str, params: Optional[dict] = None): + return await self.g.query(q, params) + + async def get_sub_graph(self, l: int) -> dict: + q = """MATCH (src) + OPTIONAL MATCH (src)-[e]->(dest) + RETURN src, e, dest + LIMIT $limit""" + + sub_graph = {'nodes': [], 'edges': []} + result_set = (await self._query(q, {'limit': l})).result_set + for row in result_set: + src = row[0] + e = row[1] + dest = row[2] + sub_graph['nodes'].append(encode_node(src)) + if e is not None: + sub_graph['edges'].append(encode_edge(e)) + sub_graph['nodes'].append(encode_node(dest)) + return sub_graph + + async def get_neighbors(self, node_ids: list[int], rel: Optional[str] = None, lbl: Optional[str] = None) -> dict: + if not all(isinstance(node_id, int) for node_id in node_ids): + raise ValueError("node_ids must be an integer list") + + rel_query = f":{rel}" if rel else "" + lbl_query = f":{lbl}" if lbl else "" + + query = f""" + MATCH (n)-[e{rel_query}]->(dest{lbl_query}) + WHERE ID(n) IN $node_ids + RETURN e, dest + """ + + neighbors = {'nodes': [], 'edges': []} + try: + result_set = (await self._query(query, {'node_ids': node_ids})).result_set + for edge, destination_node in result_set: + neighbors['nodes'].append(encode_node(destination_node)) + neighbors['edges'].append(encode_edge(edge)) + return neighbors + except Exception as e: + logging.error(f"Error fetching neighbors for node {node_ids}: {e}") + return {'nodes': [], 'edges': []} + + async def prefix_search(self, prefix: str) -> list: + search_prefix = f"{prefix}*" + query = """ + CALL db.idx.fulltext.queryNodes('Searchable', $prefix) + YIELD node + WITH node + RETURN node + LIMIT 10 + """ + result_set = (await self._query(query, {'prefix': search_prefix})).result_set + return [encode_node(row[0]) for row in result_set] + + async def find_paths(self, src: int, dest: int) -> list: + q = """MATCH (src), (dest) + WHERE ID(src) = $src_id AND ID(dest) = $dest_id + WITH src, dest + MATCH p = (src)-[:CALLS*]->(dest) + RETURN p + """ + result_set = (await self._query(q, {'src_id': src, 'dest_id': dest})).result_set + paths = [] + for row in result_set: + path = [] + p = row[0] + nodes = p.nodes() + edges = p.edges() + for n, e in zip(nodes, edges): + path.append(encode_node(n)) + path.append(encode_edge(e)) + path.append(encode_node(nodes[-1])) + paths.append(path) + return paths + + async def stats(self) -> dict: + q = "MATCH (n) RETURN count(n)" + node_count = (await self._query(q)).result_set[0][0] + + q = "MATCH ()-[e]->() RETURN count(e)" + edge_count = (await self._query(q)).result_set[0][0] + + return {'node_count': node_count, 'edge_count': edge_count} + + async def close(self) -> None: + await self.db.aclose() + diff --git a/api/index.py b/api/index.py index fe25fc1..b0469bf 100644 --- a/api/index.py +++ b/api/index.py @@ -1,7 +1,9 @@ """ Main API module for CodeGraph. """ import os +import asyncio import logging from pathlib import Path +from functools import partial from dotenv import load_dotenv from fastapi import Depends, FastAPI, Header, HTTPException, Query @@ -10,12 +12,12 @@ from api.analyzers.source_analyzer import SourceAnalyzer from api.git_utils import git_utils -from api.git_utils.git_graph import GitGraph -from api.graph import Graph, get_repos, graph_exists -from api.info import get_repo_info +from api.git_utils.git_graph import GitGraph, AsyncGitGraph +from api.graph import Graph, get_repos, graph_exists, AsyncGraphQuery, async_get_repos, async_graph_exists +from api.info import get_repo_info, async_get_repo_info from api.llm import ask from api.project import Project -from .auto_complete import prefix_search +from .auto_complete import prefix_search, async_prefix_search # Load environment variables from .env file load_dotenv() @@ -104,20 +106,23 @@ class SwitchCommitRequest(BaseModel): # --------------------------------------------------------------------------- @app.get('/api/graph_entities') -def graph_entities(repo: str = Query(None), _=Depends(public_or_auth)): +async def graph_entities(repo: str = Query(None), _=Depends(public_or_auth)): """Fetch sub-graph entities from a given repository.""" if not repo: logging.error("Missing 'repo' parameter in request.") return JSONResponse({"status": "Missing 'repo' parameter"}, status_code=400) - if not graph_exists(repo): + if not await async_graph_exists(repo): logging.error("Missing project %s", repo) return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) try: - g = Graph(repo) - sub_graph = g.get_sub_graph(500) + g = AsyncGraphQuery(repo) + try: + sub_graph = await g.get_sub_graph(500) + finally: + await g.close() logging.info("Successfully retrieved sub-graph for repo: %s", repo) return {"status": "success", "entities": sub_graph} @@ -128,15 +133,18 @@ def graph_entities(repo: str = Query(None), _=Depends(public_or_auth)): @app.post('/api/get_neighbors') -def get_neighbors(data: NeighborsRequest, _=Depends(public_or_auth)): +async def get_neighbors(data: NeighborsRequest, _=Depends(public_or_auth)): """Get neighbors of a nodes list in the graph.""" - if not graph_exists(data.repo): + if not await async_graph_exists(data.repo): logging.error("Missing project %s", data.repo) return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = Graph(data.repo) - neighbors = g.get_neighbors(data.node_ids) + g = AsyncGraphQuery(data.repo) + try: + neighbors = await g.get_neighbors(data.node_ids) + finally: + await g.close() logging.info("Successfully retrieved neighbors for node IDs %s in repo '%s'.", data.node_ids, data.repo) @@ -144,31 +152,34 @@ def get_neighbors(data: NeighborsRequest, _=Depends(public_or_auth)): @app.post('/api/auto_complete') -def auto_complete(data: AutoCompleteRequest, _=Depends(public_or_auth)): +async def auto_complete(data: AutoCompleteRequest, _=Depends(public_or_auth)): """Process auto-completion requests for a repository based on a prefix.""" - if not graph_exists(data.repo): + if not await async_graph_exists(data.repo): return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - completions = prefix_search(data.repo, data.prefix) + completions = await async_prefix_search(data.repo, data.prefix) return {"status": "success", "completions": completions} @app.get('/api/list_repos') -def list_repos(_=Depends(public_or_auth)): +async def list_repos(_=Depends(public_or_auth)): """List all available repositories.""" - repos = get_repos() + repos = await async_get_repos() return {"status": "success", "repositories": repos} @app.post('/api/repo_info') -def repo_info(data: RepoRequest, _=Depends(public_or_auth)): +async def repo_info(data: RepoRequest, _=Depends(public_or_auth)): """Retrieve information about a specific repository.""" - g = Graph(data.repo) - stats = g.stats() - info = get_repo_info(data.repo) + g = AsyncGraphQuery(data.repo) + try: + stats = await g.stats() + finally: + await g.close() + info = await async_get_repo_info(data.repo) if stats is None or info is None: return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) @@ -178,24 +189,27 @@ def repo_info(data: RepoRequest, _=Depends(public_or_auth)): @app.post('/api/find_paths') -def find_paths(data: FindPathsRequest, _=Depends(public_or_auth)): +async def find_paths(data: FindPathsRequest, _=Depends(public_or_auth)): """Find all paths between a source and destination node in the graph.""" - if not graph_exists(data.repo): + if not await async_graph_exists(data.repo): logging.error("Missing project %s", data.repo) return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = Graph(data.repo) - paths = g.find_paths(data.src, data.dest) + g = AsyncGraphQuery(data.repo) + try: + paths = await g.find_paths(data.src, data.dest) + finally: + await g.close() return {"status": "success", "paths": paths} @app.post('/api/chat') -def chat(data: ChatRequest, _=Depends(public_or_auth)): +async def chat(data: ChatRequest, _=Depends(public_or_auth)): """Chat with the CodeGraph language model.""" try: - answer = ask(data.repo, data.msg) + answer = await ask(data.repo, data.msg) except Exception as e: logging.exception("Chat error for repo '%s': %s", data.repo, e) return JSONResponse({"status": "error", "response": "Internal server error"}, @@ -205,7 +219,7 @@ def chat(data: ChatRequest, _=Depends(public_or_auth)): @app.post('/api/analyze_folder') -def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): +async def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): """Analyze local source code. Always requires a valid token.""" resolved_path = Path(data.path).resolve() @@ -222,41 +236,53 @@ def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): status_code=400) proj_name = resolved_path.name - g = Graph(proj_name) - analyzer = SourceAnalyzer() - analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) + def _analyze(): + g = Graph(proj_name) + analyzer = SourceAnalyzer() + analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) + + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, _analyze) return {"status": "success", "project": proj_name} @app.post('/api/analyze_repo') -def analyze_repo(data: AnalyzeRepoRequest, _=Depends(token_required)): +async def analyze_repo(data: AnalyzeRepoRequest, _=Depends(token_required)): """Analyze a GitHub repository. Always requires a valid token.""" logger.debug('Received repo_url: %s', data.repo_url) - proj = Project.from_git_repository(data.repo_url) - proj.analyze_sources(data.ignore) - proj.process_git_history(data.ignore) + def _analyze(): + proj = Project.from_git_repository(data.repo_url) + proj.analyze_sources(data.ignore) + proj.process_git_history(data.ignore) + + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, _analyze) return {"status": "success"} @app.post('/api/switch_commit') -def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): +async def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): """Switch a repository to a specific commit. Always requires a valid token.""" - git_utils.switch_commit(data.repo, data.commit) + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, git_utils.switch_commit, data.repo, data.commit) return {"status": "success"} @app.post('/api/list_commits') -def list_commits(data: RepoRequest, _=Depends(public_or_auth)): +async def list_commits(data: RepoRequest, _=Depends(public_or_auth)): """List all commits of a specified repository.""" - git_graph = GitGraph(git_utils.GitRepoName(data.repo)) - commits = git_graph.list_commits() + git_graph = AsyncGitGraph(git_utils.GitRepoName(data.repo)) + try: + commits = await git_graph.list_commits() + finally: + await git_graph.close() return {"status": "success", "commits": commits} # --------------------------------------------------------------------------- @@ -266,7 +292,7 @@ def list_commits(data: RepoRequest, _=Depends(public_or_auth)): INDEX_HTML = STATIC_DIR / "index.html" @app.get("/{full_path:path}") -def serve_spa(full_path: str): +async def serve_spa(full_path: str): """Serve React SPA — static assets or index.html catch-all.""" file = (STATIC_DIR / full_path).resolve() if not file.is_relative_to(STATIC_DIR): @@ -275,4 +301,4 @@ def serve_spa(full_path: str): return FileResponse(file) if INDEX_HTML.is_file(): return FileResponse(INDEX_HTML) - return JSONResponse({"error": "Not found"}, status_code=404) \ No newline at end of file + return JSONResponse({"error": "Not found"}, status_code=404) diff --git a/api/info.py b/api/info.py index 91c1413..b1d9ea7 100644 --- a/api/info.py +++ b/api/info.py @@ -1,5 +1,6 @@ import os import redis +import redis.asyncio as aioredis import logging from typing import Optional, Dict @@ -115,3 +116,35 @@ def get_repo_info(repo_name: str) -> Optional[Dict[str, str]]: logging.error(f"Error retrieving repo info for '{repo_name}': {e}") raise + +# --------------------------------------------------------------------------- +# Async versions (for async endpoints) +# --------------------------------------------------------------------------- + +async def async_get_redis_connection() -> aioredis.Redis: + return aioredis.Redis( + host=os.getenv('FALKORDB_HOST', "localhost"), + port=int(os.getenv('FALKORDB_PORT', "6379")), + username=os.getenv('FALKORDB_USERNAME'), + password=os.getenv('FALKORDB_PASSWORD'), + decode_responses=True, + ) + + +async def async_get_repo_info(repo_name: str) -> Optional[Dict[str, str]]: + try: + r = await async_get_redis_connection() + try: + key = _repo_info_key(repo_name) + repo_info = await r.hgetall(key) + if not repo_info: + logging.warning(f"No repository info found for {repo_name}") + return None + logging.info(f"Repository info retrieved for {repo_name}") + return repo_info + finally: + await r.aclose() + except Exception as e: + logging.error(f"Error retrieving repo info for '{repo_name}': {e}") + raise + diff --git a/api/llm.py b/api/llm.py index 8c3330e..6901c9d 100644 --- a/api/llm.py +++ b/api/llm.py @@ -1,4 +1,5 @@ import os +import asyncio import logging from graphrag_sdk.models.litellm import LiteModel @@ -256,7 +257,7 @@ def _create_kg_agent(repo_name: str): return code_graph_kg.chat_session() -def ask(repo_name: str, question: str) -> str: +def _ask_sync(repo_name: str, question: str) -> str: chat = _create_kg_agent(repo_name) logging.debug(f"Question: {question}") @@ -264,4 +265,9 @@ def ask(repo_name: str, question: str) -> str: response = chat.send_message(question) logging.debug(f"Response: {response}") print(f"Response: {response['response']}") - return response['response'] \ No newline at end of file + return response['response'] + + +async def ask(repo_name: str, question: str) -> str: + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _ask_sync, repo_name, question) \ No newline at end of file diff --git a/tests/endpoints/test_list_repos.py b/tests/endpoints/test_list_repos.py index a2f430c..198f7c2 100644 --- a/tests/endpoints/test_list_repos.py +++ b/tests/endpoints/test_list_repos.py @@ -54,7 +54,11 @@ def test_list_repos_with_auth(monkeypatch): """Authenticated request succeeds when SECRET_TOKEN is set.""" monkeypatch.setattr(api.index, "SECRET_TOKEN", "test-secret") monkeypatch.delenv("CODE_GRAPH_PUBLIC", raising=False) - monkeypatch.setattr(api.index, "get_repos", lambda: ["fake-repo"]) + + async def _fake_get_repos(): + return ["fake-repo"] + + monkeypatch.setattr(api.index, "async_get_repos", _fake_get_repos) client = TestClient(api.index.app) response = client.get("/api/list_repos", headers={"Authorization": "Bearer test-secret"}) diff --git a/tests/index.py b/tests/index.py index a1c4752..23301b9 100644 --- a/tests/index.py +++ b/tests/index.py @@ -1,16 +1,17 @@ import os +import asyncio import logging from pathlib import Path -from api.graph import Graph, get_repos, graph_exists -from api.info import get_repo_info +from api.graph import Graph, get_repos, graph_exists, AsyncGraphQuery, async_get_repos, async_graph_exists +from api.info import get_repo_info, async_get_repo_info from dotenv import load_dotenv from fastapi import Depends, FastAPI, Header, HTTPException, Query from fastapi.responses import JSONResponse from pydantic import BaseModel from api.project import Project -from api.auto_complete import prefix_search +from api.auto_complete import prefix_search, async_prefix_search from api.git_utils import git_utils # Load environment variables from .env file @@ -86,18 +87,21 @@ def create_app(): app = FastAPI() @app.get('/api/graph_entities') - def graph_entities(repo: str = Query(None), _=Depends(token_required)): + async def graph_entities(repo: str = Query(None), _=Depends(token_required)): if not repo: logging.error("Missing 'repo' parameter in request.") return JSONResponse({"status": "Missing 'repo' parameter"}, status_code=400) - if not graph_exists(repo): + if not await async_graph_exists(repo): logging.error("Missing project %s", repo) return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) try: - g = Graph(repo) - sub_graph = g.get_sub_graph(500) + g = AsyncGraphQuery(repo) + try: + sub_graph = await g.get_sub_graph(500) + finally: + await g.close() logging.info("Successfully retrieved sub-graph for repo: %s", repo) return {"status": "success", "entities": sub_graph} except Exception as e: @@ -105,55 +109,64 @@ def graph_entities(repo: str = Query(None), _=Depends(token_required)): return JSONResponse({"status": "Internal server error"}, status_code=500) @app.post('/api/get_neighbors') - def get_neighbors(data: NeighborsRequest, _=Depends(token_required)): - if not graph_exists(data.repo): + async def get_neighbors(data: NeighborsRequest, _=Depends(token_required)): + if not await async_graph_exists(data.repo): logging.error("Missing project %s", data.repo) return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = Graph(data.repo) - neighbors = g.get_neighbors(data.node_ids) + g = AsyncGraphQuery(data.repo) + try: + neighbors = await g.get_neighbors(data.node_ids) + finally: + await g.close() logging.info("Successfully retrieved neighbors for node IDs %s in repo '%s'.", data.node_ids, data.repo) return {"status": "success", "neighbors": neighbors} @app.post('/api/auto_complete') - def auto_complete(data: AutoCompleteRequest, _=Depends(token_required)): - if not graph_exists(data.repo): + async def auto_complete(data: AutoCompleteRequest, _=Depends(token_required)): + if not await async_graph_exists(data.repo): return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - completions = prefix_search(data.repo, data.prefix) + completions = await async_prefix_search(data.repo, data.prefix) return {"status": "success", "completions": completions} @app.get('/api/list_repos') - def list_repos(_=Depends(token_required)): - repos = get_repos() + async def list_repos(_=Depends(token_required)): + repos = await async_get_repos() return {"status": "success", "repositories": repos} @app.post('/api/repo_info') - def repo_info(data: RepoRequest, _=Depends(token_required)): - g = Graph(data.repo) - stats = g.stats() - info = get_repo_info(data.repo) + async def repo_info(data: RepoRequest, _=Depends(token_required)): + g = AsyncGraphQuery(data.repo) + try: + stats = await g.stats() + finally: + await g.close() + info = await async_get_repo_info(data.repo) if stats is None or info is None: return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) stats |= info return {"status": "success", "info": stats} @app.post('/api/find_paths') - def find_paths(data: FindPathsRequest, _=Depends(token_required)): - if not graph_exists(data.repo): + async def find_paths(data: FindPathsRequest, _=Depends(token_required)): + if not await async_graph_exists(data.repo): logging.error("Missing project %s", data.repo) return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = Graph(data.repo) - paths = g.find_paths(data.src, data.dest) + g = AsyncGraphQuery(data.repo) + try: + paths = await g.find_paths(data.src, data.dest) + finally: + await g.close() return {"status": "success", "paths": paths} @app.post('/api/chat') - def chat(data: ChatRequest, _=Depends(token_required)): + async def chat(data: ChatRequest, _=Depends(token_required)): from api.llm import ask try: - answer = ask(data.repo, data.msg) + answer = await ask(data.repo, data.msg) except Exception as e: logging.exception("Chat error for repo '%s': %s", data.repo, e) return JSONResponse({"status": "error", "response": "Internal server error"}, @@ -161,7 +174,7 @@ def chat(data: ChatRequest, _=Depends(token_required)): return {"status": "success", "response": answer} @app.post('/api/analyze_folder') - def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): + async def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): resolved_path = Path(data.path).resolve() if not resolved_path.is_relative_to(ALLOWED_ANALYSIS_DIR): @@ -177,29 +190,43 @@ def analyze_folder(data: AnalyzeFolderRequest, _=Depends(token_required)): from api.analyzers.source_analyzer import SourceAnalyzer proj_name = resolved_path.name - g = Graph(proj_name) - analyzer = SourceAnalyzer() - analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) + + def _analyze(): + g = Graph(proj_name) + analyzer = SourceAnalyzer() + analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) + + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, _analyze) return {"status": "success", "project": proj_name} @app.post('/api/analyze_repo') - def analyze_repo(data: AnalyzeRepoRequest, _=Depends(token_required)): + async def analyze_repo(data: AnalyzeRepoRequest, _=Depends(token_required)): logger.debug('Received repo_url: %s', data.repo_url) - proj = Project.from_git_repository(data.repo_url) - proj.analyze_sources(data.ignore) - proj.process_git_history(data.ignore) + + def _analyze(): + proj = Project.from_git_repository(data.repo_url) + proj.analyze_sources(data.ignore) + proj.process_git_history(data.ignore) + + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, _analyze) return {"status": "success"} @app.post('/api/switch_commit') - def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): - git_utils.switch_commit(data.repo, data.commit) + async def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, git_utils.switch_commit, data.repo, data.commit) return {"status": "success"} @app.post('/api/list_commits') - def list_commits(data: RepoRequest, _=Depends(token_required)): - from api.git_utils.git_graph import GitGraph - git_graph = GitGraph(git_utils.GitRepoName(data.repo)) - commits = git_graph.list_commits() + async def list_commits(data: RepoRequest, _=Depends(token_required)): + from api.git_utils.git_graph import AsyncGitGraph + git_graph = AsyncGitGraph(git_utils.GitRepoName(data.repo)) + try: + commits = await git_graph.list_commits() + finally: + await git_graph.close() return {"status": "success", "commits": commits} return app @@ -207,4 +234,4 @@ def list_commits(data: RepoRequest, _=Depends(token_required)): if __name__ == '__main__': import uvicorn application = create_app() - uvicorn.run(application, host="127.0.0.1", port=5000) \ No newline at end of file + uvicorn.run(application, host="127.0.0.1", port=5000) From fbdbecb762b5b9de705174f5a459923da3d88349 Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 14:35:14 +0200 Subject: [PATCH 09/16] Address PR review feedback - Replace deprecated asyncio.get_event_loop() with get_running_loop() in all coroutines (api/index.py, api/llm.py, tests/index.py) - Remove unused `from functools import partial` import (api/index.py) - Fix inconsistent Cypher keyword casing: "With e" -> "WITH e" (api/git_utils/git_graph.py) - Rename ambiguous parameter `l` to `limit` in AsyncGraphQuery.get_sub_graph (api/graph.py) - Add strict=True to zip(nodes, edges) in AsyncGraphQuery.find_paths to catch length mismatches early (api/graph.py) Co-Authored-By: Claude Opus 4.6 --- api/git_utils/git_graph.py | 2 +- api/graph.py | 6 +++--- api/index.py | 7 +++---- api/llm.py | 2 +- tests/index.py | 6 +++--- 5 files changed, 11 insertions(+), 12 deletions(-) diff --git a/api/git_utils/git_graph.py b/api/git_utils/git_graph.py index 9769485..52de8da 100644 --- a/api/git_utils/git_graph.py +++ b/api/git_utils/git_graph.py @@ -168,7 +168,7 @@ def get_child_transitions(self, child: str, parent: str) -> tuple[list[str], lis WITH path LIMIT 1 UNWIND relationships(path) AS e - With e + WITH e WHERE e.queries is not NULL RETURN collect(e.queries), collect(e.params) """ diff --git a/api/graph.py b/api/graph.py index b884f0a..0a90d30 100644 --- a/api/graph.py +++ b/api/graph.py @@ -676,14 +676,14 @@ def __init__(self, name: str) -> None: async def _query(self, q: str, params: Optional[dict] = None): return await self.g.query(q, params) - async def get_sub_graph(self, l: int) -> dict: + async def get_sub_graph(self, limit: int) -> dict: q = """MATCH (src) OPTIONAL MATCH (src)-[e]->(dest) RETURN src, e, dest LIMIT $limit""" sub_graph = {'nodes': [], 'edges': []} - result_set = (await self._query(q, {'limit': l})).result_set + result_set = (await self._query(q, {'limit': limit})).result_set for row in result_set: src = row[0] e = row[1] @@ -744,7 +744,7 @@ async def find_paths(self, src: int, dest: int) -> list: p = row[0] nodes = p.nodes() edges = p.edges() - for n, e in zip(nodes, edges): + for n, e in zip(nodes, edges, strict=True): path.append(encode_node(n)) path.append(encode_edge(e)) path.append(encode_node(nodes[-1])) diff --git a/api/index.py b/api/index.py index b0469bf..5a99ec6 100644 --- a/api/index.py +++ b/api/index.py @@ -3,7 +3,6 @@ import asyncio import logging from pathlib import Path -from functools import partial from dotenv import load_dotenv from fastapi import Depends, FastAPI, Header, HTTPException, Query @@ -242,7 +241,7 @@ def _analyze(): analyzer = SourceAnalyzer() analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() await loop.run_in_executor(None, _analyze) return {"status": "success", "project": proj_name} @@ -259,7 +258,7 @@ def _analyze(): proj.analyze_sources(data.ignore) proj.process_git_history(data.ignore) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() await loop.run_in_executor(None, _analyze) return {"status": "success"} @@ -269,7 +268,7 @@ def _analyze(): async def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): """Switch a repository to a specific commit. Always requires a valid token.""" - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() await loop.run_in_executor(None, git_utils.switch_commit, data.repo, data.commit) return {"status": "success"} diff --git a/api/llm.py b/api/llm.py index 6901c9d..1f84c0d 100644 --- a/api/llm.py +++ b/api/llm.py @@ -269,5 +269,5 @@ def _ask_sync(repo_name: str, question: str) -> str: async def ask(repo_name: str, question: str) -> str: - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() return await loop.run_in_executor(None, _ask_sync, repo_name, question) \ No newline at end of file diff --git a/tests/index.py b/tests/index.py index 23301b9..9721289 100644 --- a/tests/index.py +++ b/tests/index.py @@ -196,7 +196,7 @@ def _analyze(): analyzer = SourceAnalyzer() analyzer.analyze_local_folder(str(resolved_path), g, data.ignore) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() await loop.run_in_executor(None, _analyze) return {"status": "success", "project": proj_name} @@ -209,13 +209,13 @@ def _analyze(): proj.analyze_sources(data.ignore) proj.process_git_history(data.ignore) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() await loop.run_in_executor(None, _analyze) return {"status": "success"} @app.post('/api/switch_commit') async def switch_commit(data: SwitchCommitRequest, _=Depends(token_required)): - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() await loop.run_in_executor(None, git_utils.switch_commit, data.repo, data.commit) return {"status": "success"} From e893b66bfaef548b9f0cfb7483d34425e517ab0c Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 14:41:49 +0200 Subject: [PATCH 10/16] Remove unused sync imports and add async wrapper unit tests - Remove unused sync imports from api/index.py (GitGraph, get_repos, graph_exists, get_repo_info, prefix_search) and tests/index.py (get_repos, graph_exists, get_repo_info, prefix_search) - Add tests/test_async_graph.py with 7 unit tests covering async_graph_exists, async_get_repos, and AsyncGraphQuery (stats, close, error cleanup) using mocked FalkorDB client Co-Authored-By: Claude Opus 4.6 --- api/index.py | 8 +-- tests/index.py | 6 +- tests/test_async_graph.py | 128 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 135 insertions(+), 7 deletions(-) create mode 100644 tests/test_async_graph.py diff --git a/api/index.py b/api/index.py index 5a99ec6..fa71e32 100644 --- a/api/index.py +++ b/api/index.py @@ -11,12 +11,12 @@ from api.analyzers.source_analyzer import SourceAnalyzer from api.git_utils import git_utils -from api.git_utils.git_graph import GitGraph, AsyncGitGraph -from api.graph import Graph, get_repos, graph_exists, AsyncGraphQuery, async_get_repos, async_graph_exists -from api.info import get_repo_info, async_get_repo_info +from api.git_utils.git_graph import AsyncGitGraph +from api.graph import Graph, AsyncGraphQuery, async_get_repos, async_graph_exists +from api.info import async_get_repo_info from api.llm import ask from api.project import Project -from .auto_complete import prefix_search, async_prefix_search +from .auto_complete import async_prefix_search # Load environment variables from .env file load_dotenv() diff --git a/tests/index.py b/tests/index.py index 9721289..f524c92 100644 --- a/tests/index.py +++ b/tests/index.py @@ -3,15 +3,15 @@ import logging from pathlib import Path -from api.graph import Graph, get_repos, graph_exists, AsyncGraphQuery, async_get_repos, async_graph_exists -from api.info import get_repo_info, async_get_repo_info +from api.graph import Graph, AsyncGraphQuery, async_get_repos, async_graph_exists +from api.info import async_get_repo_info from dotenv import load_dotenv from fastapi import Depends, FastAPI, Header, HTTPException, Query from fastapi.responses import JSONResponse from pydantic import BaseModel from api.project import Project -from api.auto_complete import prefix_search, async_prefix_search +from api.auto_complete import async_prefix_search from api.git_utils import git_utils # Load environment variables from .env file diff --git a/tests/test_async_graph.py b/tests/test_async_graph.py new file mode 100644 index 0000000..bea1ede --- /dev/null +++ b/tests/test_async_graph.py @@ -0,0 +1,128 @@ +"""Unit tests for async graph wrappers (AsyncGraphQuery, async_graph_exists, async_get_repos). + +These tests mock the underlying falkordb.asyncio client so they run without +a live FalkorDB instance. +""" + +import pytest +from unittest.mock import AsyncMock, MagicMock, patch + +from api.graph import AsyncGraphQuery, async_graph_exists, async_get_repos + + +# --------------------------------------------------------------------------- +# async_graph_exists +# --------------------------------------------------------------------------- + +@pytest.mark.anyio +async def test_async_graph_exists_true(): + mock_db = MagicMock() + mock_db.list_graphs = AsyncMock(return_value=["my_repo", "other"]) + mock_db.aclose = AsyncMock() + + with patch("api.graph._async_db", return_value=mock_db): + result = await async_graph_exists("my_repo") + + assert result is True + mock_db.aclose.assert_awaited_once() + + +@pytest.mark.anyio +async def test_async_graph_exists_false(): + mock_db = MagicMock() + mock_db.list_graphs = AsyncMock(return_value=["other"]) + mock_db.aclose = AsyncMock() + + with patch("api.graph._async_db", return_value=mock_db): + result = await async_graph_exists("missing") + + assert result is False + mock_db.aclose.assert_awaited_once() + + +@pytest.mark.anyio +async def test_async_graph_exists_closes_on_error(): + mock_db = MagicMock() + mock_db.list_graphs = AsyncMock(side_effect=RuntimeError("conn failed")) + mock_db.aclose = AsyncMock() + + with patch("api.graph._async_db", return_value=mock_db): + with pytest.raises(RuntimeError, match="conn failed"): + await async_graph_exists("any") + + mock_db.aclose.assert_awaited_once() + + +# --------------------------------------------------------------------------- +# async_get_repos +# --------------------------------------------------------------------------- + +@pytest.mark.anyio +async def test_async_get_repos_filters_suffixes(): + mock_db = MagicMock() + mock_db.list_graphs = AsyncMock( + return_value=["repo1", "repo1_git", "repo1_schema", "repo2"] + ) + mock_db.aclose = AsyncMock() + + with patch("api.graph._async_db", return_value=mock_db): + repos = await async_get_repos() + + assert repos == ["repo1", "repo2"] + mock_db.aclose.assert_awaited_once() + + +@pytest.mark.anyio +async def test_async_get_repos_empty(): + mock_db = MagicMock() + mock_db.list_graphs = AsyncMock(return_value=[]) + mock_db.aclose = AsyncMock() + + with patch("api.graph._async_db", return_value=mock_db): + repos = await async_get_repos() + + assert repos == [] + + +# --------------------------------------------------------------------------- +# AsyncGraphQuery +# --------------------------------------------------------------------------- + +def _make_mock_graph_query(): + """Create a mock AsyncGraphQuery with a mocked db and graph.""" + mock_db = MagicMock() + mock_db.aclose = AsyncMock() + mock_graph = MagicMock() + mock_graph.query = AsyncMock() + mock_db.select_graph = MagicMock(return_value=mock_graph) + return mock_db, mock_graph + + +@pytest.mark.anyio +async def test_async_graph_query_stats(): + mock_db, mock_graph = _make_mock_graph_query() + + node_result = MagicMock() + node_result.result_set = [[42]] + edge_result = MagicMock() + edge_result.result_set = [[7]] + mock_graph.query = AsyncMock(side_effect=[node_result, edge_result]) + + with patch("api.graph._async_db", return_value=mock_db): + gq = AsyncGraphQuery("test_repo") + stats = await gq.stats() + await gq.close() + + assert stats == {"node_count": 42, "edge_count": 7} + mock_db.aclose.assert_awaited_once() + + +@pytest.mark.anyio +async def test_async_graph_query_close(): + mock_db, _ = _make_mock_graph_query() + + with patch("api.graph._async_db", return_value=mock_db): + gq = AsyncGraphQuery("test_repo") + await gq.close() + + mock_db.aclose.assert_awaited_once() From 92fa424f1dabfc5daaffb6d4051d9bb71c60cede Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 14:45:20 +0200 Subject: [PATCH 11/16] Add strict=True to zip in sync find_paths for consistency Matches the async find_paths which already uses strict=True, so both versions raise immediately on a nodes/edges length mismatch. Co-Authored-By: Claude Opus 4.6 --- api/graph.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/graph.py b/api/graph.py index 0a90d30..c5aea2f 100644 --- a/api/graph.py +++ b/api/graph.py @@ -581,7 +581,7 @@ def find_paths(self, src: int, dest: int) -> list[Path]: nodes = p.nodes() edges = p.edges() - for n, e in zip(nodes, edges): + for n, e in zip(nodes, edges, strict=True): path.append(encode_node(n)) path.append(encode_edge(e)) From 20a02d532c867e8e6dc59cb16b04f85d282dd925 Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 15:06:10 +0200 Subject: [PATCH 12/16] Address review: fix strict=True zip, revert serve_spa to sync, eliminate double connections MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove strict=True from zip(nodes, edges) in find_paths (both sync and async) — nodes intentionally has one more element than edges - Revert serve_spa to sync def so FastAPI offloads filesystem I/O to thread pool instead of blocking the event loop - Add graph_exists() method to AsyncGraphQuery to reuse the connection instead of opening a separate one via async_graph_exists() - Update all endpoints in api/index.py and tests/index.py to use g.graph_exists() on the query instance - Fix missing trailing newline in api/llm.py Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- api/graph.py | 10 ++++++++-- api/index.py | 49 ++++++++++++++++++++++++++----------------------- api/llm.py | 2 +- tests/index.py | 46 ++++++++++++++++++++++++---------------------- 4 files changed, 59 insertions(+), 48 deletions(-) diff --git a/api/graph.py b/api/graph.py index c5aea2f..085dfde 100644 --- a/api/graph.py +++ b/api/graph.py @@ -581,7 +581,7 @@ def find_paths(self, src: int, dest: int) -> list[Path]: nodes = p.nodes() edges = p.edges() - for n, e in zip(nodes, edges, strict=True): + for n, e in zip(nodes, edges): path.append(encode_node(n)) path.append(encode_edge(e)) @@ -670,9 +670,15 @@ class AsyncGraphQuery: """ def __init__(self, name: str) -> None: + self.name = name self.db = _async_db() self.g = self.db.select_graph(name) + async def graph_exists(self) -> bool: + """Check if this graph exists, reusing the current connection.""" + graphs = await self.db.list_graphs() + return self.name in graphs + async def _query(self, q: str, params: Optional[dict] = None): return await self.g.query(q, params) @@ -744,7 +750,7 @@ async def find_paths(self, src: int, dest: int) -> list: p = row[0] nodes = p.nodes() edges = p.edges() - for n, e in zip(nodes, edges, strict=True): + for n, e in zip(nodes, edges): path.append(encode_node(n)) path.append(encode_edge(e)) path.append(encode_node(nodes[-1])) diff --git a/api/index.py b/api/index.py index fa71e32..b26ba00 100644 --- a/api/index.py +++ b/api/index.py @@ -12,11 +12,11 @@ from api.analyzers.source_analyzer import SourceAnalyzer from api.git_utils import git_utils from api.git_utils.git_graph import AsyncGitGraph -from api.graph import Graph, AsyncGraphQuery, async_get_repos, async_graph_exists +from api.graph import Graph, AsyncGraphQuery, async_get_repos from api.info import async_get_repo_info from api.llm import ask from api.project import Project -from .auto_complete import async_prefix_search + # Load environment variables from .env file load_dotenv() @@ -112,16 +112,13 @@ async def graph_entities(repo: str = Query(None), _=Depends(public_or_auth)): logging.error("Missing 'repo' parameter in request.") return JSONResponse({"status": "Missing 'repo' parameter"}, status_code=400) - if not await async_graph_exists(repo): - logging.error("Missing project %s", repo) - return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) - + g = AsyncGraphQuery(repo) try: - g = AsyncGraphQuery(repo) - try: - sub_graph = await g.get_sub_graph(500) - finally: - await g.close() + if not await g.graph_exists(): + logging.error("Missing project %s", repo) + return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) + + sub_graph = await g.get_sub_graph(500) logging.info("Successfully retrieved sub-graph for repo: %s", repo) return {"status": "success", "entities": sub_graph} @@ -129,18 +126,20 @@ async def graph_entities(repo: str = Query(None), _=Depends(public_or_auth)): except Exception as e: logging.exception("Error retrieving sub-graph for repo '%s': %s", repo, e) return JSONResponse({"status": "Internal server error"}, status_code=500) + finally: + await g.close() @app.post('/api/get_neighbors') async def get_neighbors(data: NeighborsRequest, _=Depends(public_or_auth)): """Get neighbors of a nodes list in the graph.""" - if not await async_graph_exists(data.repo): - logging.error("Missing project %s", data.repo) - return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = AsyncGraphQuery(data.repo) try: + if not await g.graph_exists(): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + neighbors = await g.get_neighbors(data.node_ids) finally: await g.close() @@ -154,10 +153,14 @@ async def get_neighbors(data: NeighborsRequest, _=Depends(public_or_auth)): async def auto_complete(data: AutoCompleteRequest, _=Depends(public_or_auth)): """Process auto-completion requests for a repository based on a prefix.""" - if not await async_graph_exists(data.repo): - return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + g = AsyncGraphQuery(data.repo) + try: + if not await g.graph_exists(): + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - completions = await async_prefix_search(data.repo, data.prefix) + completions = await g.prefix_search(data.prefix) + finally: + await g.close() return {"status": "success", "completions": completions} @@ -191,12 +194,12 @@ async def repo_info(data: RepoRequest, _=Depends(public_or_auth)): async def find_paths(data: FindPathsRequest, _=Depends(public_or_auth)): """Find all paths between a source and destination node in the graph.""" - if not await async_graph_exists(data.repo): - logging.error("Missing project %s", data.repo) - return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = AsyncGraphQuery(data.repo) try: + if not await g.graph_exists(): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + paths = await g.find_paths(data.src, data.dest) finally: await g.close() @@ -291,7 +294,7 @@ async def list_commits(data: RepoRequest, _=Depends(public_or_auth)): INDEX_HTML = STATIC_DIR / "index.html" @app.get("/{full_path:path}") -async def serve_spa(full_path: str): +def serve_spa(full_path: str): """Serve React SPA — static assets or index.html catch-all.""" file = (STATIC_DIR / full_path).resolve() if not file.is_relative_to(STATIC_DIR): diff --git a/api/llm.py b/api/llm.py index 1f84c0d..7c586fa 100644 --- a/api/llm.py +++ b/api/llm.py @@ -270,4 +270,4 @@ def _ask_sync(repo_name: str, question: str) -> str: async def ask(repo_name: str, question: str) -> str: loop = asyncio.get_running_loop() - return await loop.run_in_executor(None, _ask_sync, repo_name, question) \ No newline at end of file + return await loop.run_in_executor(None, _ask_sync, repo_name, question) diff --git a/tests/index.py b/tests/index.py index f524c92..4eff250 100644 --- a/tests/index.py +++ b/tests/index.py @@ -3,7 +3,7 @@ import logging from pathlib import Path -from api.graph import Graph, AsyncGraphQuery, async_get_repos, async_graph_exists +from api.graph import Graph, AsyncGraphQuery, async_get_repos from api.info import async_get_repo_info from dotenv import load_dotenv from fastapi import Depends, FastAPI, Header, HTTPException, Query @@ -11,7 +11,6 @@ from pydantic import BaseModel from api.project import Project -from api.auto_complete import async_prefix_search from api.git_utils import git_utils # Load environment variables from .env file @@ -92,30 +91,29 @@ async def graph_entities(repo: str = Query(None), _=Depends(token_required)): logging.error("Missing 'repo' parameter in request.") return JSONResponse({"status": "Missing 'repo' parameter"}, status_code=400) - if not await async_graph_exists(repo): - logging.error("Missing project %s", repo) - return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) - + g = AsyncGraphQuery(repo) try: - g = AsyncGraphQuery(repo) - try: - sub_graph = await g.get_sub_graph(500) - finally: - await g.close() + if not await g.graph_exists(): + logging.error("Missing project %s", repo) + return JSONResponse({"status": f"Missing project {repo}"}, status_code=400) + + sub_graph = await g.get_sub_graph(500) logging.info("Successfully retrieved sub-graph for repo: %s", repo) return {"status": "success", "entities": sub_graph} except Exception as e: logging.error("Error retrieving sub-graph for repo '%s': %s", repo, e) return JSONResponse({"status": "Internal server error"}, status_code=500) + finally: + await g.close() @app.post('/api/get_neighbors') async def get_neighbors(data: NeighborsRequest, _=Depends(token_required)): - if not await async_graph_exists(data.repo): - logging.error("Missing project %s", data.repo) - return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = AsyncGraphQuery(data.repo) try: + if not await g.graph_exists(): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + neighbors = await g.get_neighbors(data.node_ids) finally: await g.close() @@ -125,10 +123,14 @@ async def get_neighbors(data: NeighborsRequest, _=Depends(token_required)): @app.post('/api/auto_complete') async def auto_complete(data: AutoCompleteRequest, _=Depends(token_required)): - if not await async_graph_exists(data.repo): - return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + g = AsyncGraphQuery(data.repo) + try: + if not await g.graph_exists(): + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - completions = await async_prefix_search(data.repo, data.prefix) + completions = await g.prefix_search(data.prefix) + finally: + await g.close() return {"status": "success", "completions": completions} @app.get('/api/list_repos') @@ -151,12 +153,12 @@ async def repo_info(data: RepoRequest, _=Depends(token_required)): @app.post('/api/find_paths') async def find_paths(data: FindPathsRequest, _=Depends(token_required)): - if not await async_graph_exists(data.repo): - logging.error("Missing project %s", data.repo) - return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) - g = AsyncGraphQuery(data.repo) try: + if not await g.graph_exists(): + logging.error("Missing project %s", data.repo) + return JSONResponse({"status": f"Missing project {data.repo}"}, status_code=400) + paths = await g.find_paths(data.src, data.dest) finally: await g.close() From 24eb501f6d330631a16d12f424c4604c7d850ced Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 15:25:09 +0200 Subject: [PATCH 13/16] Add graph_exists check to repo_info endpoints, fix test cleanup assertion - Replace ineffective 'if stats is None' guard in repo_info with g.graph_exists() check in both api/index.py and tests/index.py - Add missing mock_db.aclose assertion in test_async_get_repos_empty Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- api/index.py | 5 ++++- tests/index.py | 5 ++++- tests/test_async_graph.py | 1 + 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/api/index.py b/api/index.py index b26ba00..38dfb61 100644 --- a/api/index.py +++ b/api/index.py @@ -178,12 +178,15 @@ async def repo_info(data: RepoRequest, _=Depends(public_or_auth)): g = AsyncGraphQuery(data.repo) try: + if not await g.graph_exists(): + return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) + stats = await g.stats() finally: await g.close() info = await async_get_repo_info(data.repo) - if stats is None or info is None: + if info is None: return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) stats |= info diff --git a/tests/index.py b/tests/index.py index 4eff250..9fd020d 100644 --- a/tests/index.py +++ b/tests/index.py @@ -142,11 +142,14 @@ async def list_repos(_=Depends(token_required)): async def repo_info(data: RepoRequest, _=Depends(token_required)): g = AsyncGraphQuery(data.repo) try: + if not await g.graph_exists(): + return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) + stats = await g.stats() finally: await g.close() info = await async_get_repo_info(data.repo) - if stats is None or info is None: + if info is None: return JSONResponse({"status": f'Missing repository "{data.repo}"'}, status_code=400) stats |= info return {"status": "success", "info": stats} diff --git a/tests/test_async_graph.py b/tests/test_async_graph.py index bea1ede..ccd1e00 100644 --- a/tests/test_async_graph.py +++ b/tests/test_async_graph.py @@ -82,6 +82,7 @@ async def test_async_get_repos_empty(): repos = await async_get_repos() assert repos == [] + mock_db.aclose.assert_awaited_once() # --------------------------------------------------------------------------- From c46427e975f38702e7f12d40e5066b2e53c8dcb6 Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 16:09:14 +0200 Subject: [PATCH 14/16] docs: sync project docs with FastAPI implementation Update the README, environment template, and reference docs so they match the current FastAPI/Uvicorn runtime, /api endpoints, supported analyzers, and auth/env behavior. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .env.template | 29 +++-- ARCHITECTURE_ANALYSIS.md | 248 +++++++++++++++++++++++++++++++++++++++ QUICK_REFERENCE.txt | 97 +++++++++++++++ README.md | 220 +++++++++++++++++++--------------- 4 files changed, 489 insertions(+), 105 deletions(-) create mode 100644 ARCHITECTURE_ANALYSIS.md create mode 100644 QUICK_REFERENCE.txt diff --git a/.env.template b/.env.template index 7606777..b5047c9 100644 --- a/.env.template +++ b/.env.template @@ -2,15 +2,28 @@ FALKORDB_HOST=localhost FALKORDB_PORT=6379 -# OpenAI API key for LLM features -OPENAI_API_KEY= +# Optional FalkorDB authentication +FALKORDB_USERNAME= +FALKORDB_PASSWORD= -# Secret token for API authentication +# Token checked by authenticated endpoints. If left empty, the current +# implementation accepts requests without an Authorization header. SECRET_TOKEN= -# Flask server settings -FLASK_RUN_HOST=0.0.0.0 -FLASK_RUN_PORT=5000 - -# Set to 1 to enable public access for analyze_repo/switch_commit endpoints +# Set to 1 to make read-only endpoints public. CODE_GRAPH_PUBLIC=0 + +# Limit /api/analyze_folder to this directory tree. Leave commented to use +# the repository root as the default allowed directory. +# ALLOWED_ANALYSIS_DIR=/absolute/path/to/projects + +# LiteLLM model used by /api/chat +MODEL_NAME=gemini/gemini-flash-lite-latest + +# Provider credential for the default Gemini model. Change this to the +# appropriate provider key if you change MODEL_NAME. +GEMINI_API_KEY= + +# Optional Uvicorn bind settings used by start.sh / make run-* +HOST=0.0.0.0 +PORT=5000 diff --git a/ARCHITECTURE_ANALYSIS.md b/ARCHITECTURE_ANALYSIS.md new file mode 100644 index 0000000..596c068 --- /dev/null +++ b/ARCHITECTURE_ANALYSIS.md @@ -0,0 +1,248 @@ +# CodeGraph FastAPI Backend - Architecture Analysis + +## Executive Summary + +CodeGraph currently exposes a **FastAPI** backend from `api.index:app` and serves the built React UI from the same process when `app/dist` exists. + +The framework-specific HTTP code is concentrated in `api/index.py`. Most of the backend domain modules (`graph.py`, `project.py`, `analyzers/`, `git_utils/`, `info.py`, `llm.py`) are reusable Python components that do not depend on FastAPI. + +## 1. Backend Layout + +``` +api/ +├── __init__.py # Public package exports +├── index.py # FastAPI app, auth dependencies, routes, SPA serving +├── graph.py # FalkorDB graph access (sync + async helpers) +├── llm.py # GraphRAG + LiteLLM chat integration +├── info.py # Repository metadata stored in Redis/FalkorDB +├── project.py # Clone/local repo analysis orchestration +├── auto_complete.py # Prefix search helper +├── prompts.py # Chat/Cypher prompt templates +│ +├── analyzers/ +│ ├── analyzer.py # Abstract analyzer base class +│ ├── source_analyzer.py # File scanning + analyzer dispatch +│ ├── python/analyzer.py # Python analyzer +│ ├── java/analyzer.py # Java analyzer +│ ├── csharp/analyzer.py # C# analyzer +│ └── c/analyzer.py # Present in tree, but not registered +│ +├── entities/ # Entity/File wrappers and encoders +├── git_utils/ # Git history graph and repo utilities +└── code_coverage/ # Coverage helpers +``` + +## 2. HTTP Layer (`api/index.py`) + +### 2.1 Application and routing + +- The backend app is `FastAPI()`. +- All API routes are mounted under `/api/...`. +- A catch-all route serves static files from `app/dist` and falls back to `index.html` for the React SPA. + +### 2.2 Authentication dependencies + +`api/index.py` defines two FastAPI dependencies: + +- `public_or_auth`: used by read-only endpoints. If `CODE_GRAPH_PUBLIC=1`, the request is allowed without auth; otherwise it checks the `Authorization` header against `SECRET_TOKEN`. +- `token_required`: used by mutating endpoints and always checks the `Authorization` header against `SECRET_TOKEN`. + +The current `_verify_token()` helper also treats a missing `SECRET_TOKEN` as allowing requests with no `Authorization` header. + +### 2.3 Request models + +The API uses Pydantic request models for POST bodies, including: + +- `RepoRequest` +- `NeighborsRequest` +- `AutoCompleteRequest` +- `FindPathsRequest` +- `ChatRequest` +- `AnalyzeFolderRequest` +- `AnalyzeRepoRequest` +- `SwitchCommitRequest` + +### 2.4 Endpoint inventory + +**Read endpoints** (`public_or_auth`): + +- `GET /api/graph_entities` +- `POST /api/get_neighbors` +- `POST /api/auto_complete` +- `GET /api/list_repos` +- `POST /api/repo_info` +- `POST /api/find_paths` +- `POST /api/chat` +- `POST /api/list_commits` + +**Mutating endpoints** (`token_required`): + +- `POST /api/analyze_folder` +- `POST /api/analyze_repo` +- `POST /api/switch_commit` + +### 2.5 Async behavior + +The FastAPI handlers are `async def`, but several heavy operations are still blocking and are moved off the event loop with `asyncio.get_running_loop().run_in_executor(...)`: + +- local folder analysis +- repository clone + analysis +- LLM chat work +- commit switching + +## 3. Domain Modules + +### 3.1 `graph.py` + +`Graph` is the core FalkorDB interface used for code-graph mutations and queries. It also exposes helpers such as: + +- `get_sub_graph()` +- `get_neighbors()` +- `add_entity()` +- `connect_entities()` +- `find_paths()` +- `stats()` +- backlog helpers used during git-history processing + +Async route handlers use `AsyncGraphQuery` and `async_get_repos()` for non-blocking access patterns. + +### 3.2 `project.py` + +`Project` represents either: + +- a cloned git repository via `Project.from_git_repository(url)`, or +- a local repository via `Project.from_local_repository(path)`. + +Its two main orchestration steps are: + +- `analyze_sources(ignore)` +- `process_git_history(ignore)` + +### 3.3 `analyzers/source_analyzer.py` + +`SourceAnalyzer` walks the repository tree, picks a registered analyzer by file extension, and builds the code graph. + +Registered analyzers in the current code: + +- `.py` -> `PythonAnalyzer` +- `.java` -> `JavaAnalyzer` +- `.cs` -> `CSharpAnalyzer` + +The C analyzer source exists, but `.c` and `.h` registrations are commented out. + +### 3.4 `git_utils/` + +Git history is modeled as a separate FalkorDB graph per repository (for example `{repo_name}_git`). + +Key pieces: + +- `GitGraph` / `AsyncGitGraph` +- `build_commit_graph(...)` +- `switch_commit(...)` +- helper functions for diff classification and ignore checks + +### 3.5 `info.py` + +Repository metadata is stored via Redis-compatible access backed by FalkorDB connection settings. Stored fields include: + +- `repo_url` +- `commit` + +### 3.6 `llm.py` + +Chat requests use GraphRAG-SDK with LiteLLM: + +- default `MODEL_NAME` is `gemini/gemini-flash-lite-latest` +- the backend creates a `KnowledgeGraph` bound to the repository graph +- `ask()` offloads the synchronous chat session call to a worker thread + +## 4. Runtime and Environment + +### 4.1 Local development + +Typical backend dev command: + +```bash +uv run uvicorn api.index:app --host 127.0.0.1 --port 5000 --reload +``` + +Typical frontend dev command: + +```bash +cd app && npm run dev +``` + +`app/vite.config.ts` proxies `/api` requests to `http://127.0.0.1:5000` during frontend development. + +### 4.2 Production/container startup + +The checked-in production entrypoints use Uvicorn, not Flask: + +- `make run-prod` +- `start.sh` +- Docker image entrypoint (`/start.sh`) + +### 4.3 Important environment variables + +- `FALKORDB_HOST` +- `FALKORDB_PORT` +- `FALKORDB_USERNAME` +- `FALKORDB_PASSWORD` +- `SECRET_TOKEN` +- `CODE_GRAPH_PUBLIC` +- `ALLOWED_ANALYSIS_DIR` +- `MODEL_NAME` +- provider-specific LiteLLM credential(s), such as `GEMINI_API_KEY` for the default model + +## 5. Storage Model + +### 5.1 Code graph + +The main repository graph lives in FalkorDB and contains entities such as: + +- `File` +- `Class` +- `Function` +- `Interface` + +Relationships include: + +- `DEFINES` +- `CALLS` +- `EXTENDS` +- `IMPLEMENTS` + +### 5.2 Git graph + +Commit history is stored in a second graph named `{repo_name}_git`, with commit metadata and parent/child edges. + +### 5.3 Repository metadata + +Repository URL and current commit are stored in Redis-style hashes keyed as `{repo_name}_info`. + +## 6. Request Flows + +### 6.1 `POST /api/analyze_repo` + +1. FastAPI validates the request body with `AnalyzeRepoRequest`. +2. `token_required` checks the `Authorization` header. +3. `Project.from_git_repository()` clones the repo locally. +4. `analyze_sources()` builds the code graph. +5. `process_git_history()` builds the repository's git graph. +6. The endpoint returns `{"status": "success"}`. + +### 6.2 `POST /api/chat` + +1. FastAPI validates `repo` and `msg`. +2. `public_or_auth` enforces auth/public rules. +3. `ask()` creates a GraphRAG chat session for the repository graph. +4. LiteLLM generates Cypher and a natural-language response. +5. The endpoint returns `{"status": "success", "response": ...}`. + +## 7. Key Takeaways + +- The backend is now FastAPI + Uvicorn, not Flask. +- All public API paths are under `/api/...`. +- The React app can be served by the backend from `app/dist`. +- Most backend logic remains framework-agnostic and reusable. +- Supported analyzers are currently Python, Java, and C#. diff --git a/QUICK_REFERENCE.txt b/QUICK_REFERENCE.txt new file mode 100644 index 0000000..9003cf3 --- /dev/null +++ b/QUICK_REFERENCE.txt @@ -0,0 +1,97 @@ +╔════════════════════════════════════════════════════════════════════════════╗ +║ CodeGraph FastAPI Backend - QUICK REFERENCE ║ +╚════════════════════════════════════════════════════════════════════════════╝ + +BACKEND SNAPSHOT +───────────────────────────────────────────────────────────────────────────── +Backend app: api.index:app +Framework: FastAPI +Server: Uvicorn +Frontend: React + Vite in app/ +Built UI serving: FastAPI serves app/dist when it exists +Supported analyzers: Python, Java, C# +C analyzer: Present in source tree, but currently disabled + +KEY PATHS +───────────────────────────────────────────────────────────────────────────── +api/index.py FastAPI app, auth deps, API routes, SPA serving +api/project.py Repo clone/local analysis orchestration +api/analyzers/source_analyzer.py + Registered analyzers (.py, .java, .cs) +api/llm.py GraphRAG + LiteLLM chat integration +api/graph.py FalkorDB graph access +api/git_utils/ Git history graph + commit switching +app/vite.config.ts Dev proxy from /api -> http://127.0.0.1:5000 +Makefile Common install/build/run/test/lint commands +start.sh Container entrypoint (starts uvicorn) + +COMMON COMMANDS +───────────────────────────────────────────────────────────────────────────── +Install backend deps: uv sync --all-extras +Install frontend deps: npm install --prefix ./app +Install e2e deps: npm install + +Backend dev server: uv run uvicorn api.index:app --host 127.0.0.1 --port 5000 --reload +Frontend dev server: cd app && npm run dev +Prod-style serve: npm --prefix ./app run build && uv run uvicorn api.index:app --host 0.0.0.0 --port 5000 + +Make targets: make install | build-dev | build-prod | run-dev | + run-prod | test | lint | e2e | clean + +AUTHENTICATION RULES +───────────────────────────────────────────────────────────────────────────── +Header format: Authorization: Bearer + (raw token string also works) +Read endpoints: public_or_auth dependency +Mutating endpoints: token_required dependency +Public mode: CODE_GRAPH_PUBLIC=1 skips auth on read endpoints +Important detail: If SECRET_TOKEN is unset, requests without an + Authorization header are accepted by the current + implementation + +IMPORTANT ENV VARS +───────────────────────────────────────────────────────────────────────────── +FALKORDB_HOST FalkorDB hostname (default: localhost) +FALKORDB_PORT FalkorDB port (default: 6379) +FALKORDB_USERNAME Optional FalkorDB username +FALKORDB_PASSWORD Optional FalkorDB password +SECRET_TOKEN Token checked by protected endpoints +CODE_GRAPH_PUBLIC Set to 1 to make read endpoints public +ALLOWED_ANALYSIS_DIR Limits /api/analyze_folder to one directory tree +MODEL_NAME LiteLLM model for /api/chat + default: gemini/gemini-flash-lite-latest +GEMINI_API_KEY Example provider credential for the default model +HOST / PORT Optional bind settings for start.sh / make run-* + +API ENDPOINTS +───────────────────────────────────────────────────────────────────────────── +GET /api/list_repos +GET /api/graph_entities?repo= +POST /api/get_neighbors +POST /api/auto_complete +POST /api/repo_info +POST /api/find_paths +POST /api/chat +POST /api/list_commits +POST /api/analyze_folder (requires token dependency) +POST /api/analyze_repo (requires token dependency) +POST /api/switch_commit (requires token dependency) + +ANALYSIS NOTES +───────────────────────────────────────────────────────────────────────────── +/api/analyze_folder The requested path must be inside ALLOWED_ANALYSIS_DIR +/api/analyze_repo Clones the repository, analyzes sources, then builds + git history +/api/chat Uses GraphRAG-SDK + LiteLLM against the repo graph +/api/list_commits Reads from the separate {repo_name}_git graph + +DEVELOPMENT NOTES +───────────────────────────────────────────────────────────────────────────── +Vite dev URL: http://localhost:3000 +Backend API URL: http://127.0.0.1:5000/api/... +Built app URL: http://localhost:5000 +Root package.json: Playwright only +app/package.json: Frontend build/lint/dev scripts +Tests: make test runs pytest, but some legacy analyzer/ + git-history tests still need maintenance on a + clean checkout diff --git a/README.md b/README.md index a24fa74..533b32e 100644 --- a/README.md +++ b/README.md @@ -19,45 +19,42 @@ Connect and ask questions: [![Discord](https://img.shields.io/badge/Discord-%235 ``` code-graph/ -├── api/ # Python backend (Flask) -│ ├── index.py # Main Flask app with API routes -│ ├── graph.py # Graph operations (FalkorDB) -│ ├── llm.py # LLM integration for chat -│ ├── project.py # Project management -│ ├── info.py # Repository info -│ ├── prompts.py # LLM prompts -│ ├── auto_complete.py # Auto-completion -│ ├── analyzers/ # Source code analyzers (Python, Java, C, C#) -│ ├── entities/ # Entity models -│ ├── git_utils/ # Git operations -│ └── code_coverage/ # Code coverage utilities +├── api/ # Python backend (FastAPI) +│ ├── index.py # FastAPI app, auth deps, API routes, SPA serving +│ ├── graph.py # FalkorDB graph operations +│ ├── llm.py # GraphRAG + LiteLLM chat integration +│ ├── project.py # Repository cloning and analysis orchestration +│ ├── info.py # Repository metadata stored in Redis/FalkorDB +│ ├── prompts.py # LLM system and prompt templates +│ ├── auto_complete.py # Prefix search helper +│ ├── analyzers/ # Source analyzers (Python, Java, C#) +│ ├── entities/ # Graph/entity models +│ ├── git_utils/ # Git history graph utilities +│ └── code_coverage/ # Coverage utilities ├── app/ # React frontend (Vite) │ ├── src/ # Frontend source code -│ │ ├── App.tsx # Main application component -│ │ ├── main.tsx # Entry point -│ │ ├── components/ # React components -│ │ └── lib/ # Utility functions │ ├── public/ # Static assets -│ ├── package.json # Frontend dependencies -│ ├── vite.config.ts # Vite configuration -│ └── tsconfig.json # TypeScript configuration -├── tests/ # Backend tests -├── e2e/ # End-to-end tests (Playwright) -├── Dockerfile # Unified Docker build -├── docker-compose.yml # Docker Compose setup -├── Makefile # Development commands -├── start.sh # Container startup script -├── pyproject.toml # Python project configuration -└── .env.template # Environment variables template +│ ├── package.json # Frontend dependencies and scripts +│ ├── vite.config.ts # Vite config and /api proxy for dev mode +│ └── tsconfig*.json # TypeScript config +├── tests/ # Backend/unit and endpoint tests +├── e2e/ # End-to-end helpers and Playwright assets +├── Dockerfile # Unified container image +├── docker-compose.yml # Local FalkorDB + app stack +├── Makefile # Common dev/build/test commands +├── start.sh # Container entrypoint +├── pyproject.toml # Python package and dependency config +└── .env.template # Example environment variables ``` ## Running Locally ### Prerequisites -- Python 3.12+ +- Python `>=3.12,<3.14` - Node.js 20+ -- FalkorDB instance (local or cloud) +- [`uv`](https://docs.astral.sh/uv/) +- A FalkorDB instance (local or cloud) ### 1. Start FalkorDB @@ -69,9 +66,9 @@ code-graph/ docker run -p 6379:6379 -it --rm falkordb/falkordb ``` -### 2. Set Up Environment Variables +### 2. Configure environment variables -Copy the template and configure: +Copy the template and adjust it for your setup: ```bash cp .env.template .env @@ -79,86 +76,98 @@ cp .env.template .env | Variable | Description | Required | Default | |----------|-------------|----------|---------| -| `OPENAI_API_KEY` | Your OpenAI API key for code analysis | Yes | - | -| `SECRET_TOKEN` | User-defined token for request authorization | Yes | - | -| `FALKORDB_HOST` | FalkorDB server hostname | No | localhost | -| `FALKORDB_PORT` | FalkorDB server port | No | 6379 | +| `FALKORDB_HOST` | FalkorDB hostname | No | `localhost` | +| `FALKORDB_PORT` | FalkorDB port | No | `6379` | +| `FALKORDB_USERNAME` | Optional FalkorDB username | No | empty | +| `FALKORDB_PASSWORD` | Optional FalkorDB password | No | empty | +| `SECRET_TOKEN` | Token checked by protected endpoints | No | empty | +| `CODE_GRAPH_PUBLIC` | Set `1` to skip auth on read-only endpoints | No | `0` | +| `ALLOWED_ANALYSIS_DIR` | Root path allowed for `/api/analyze_folder` | No | repository root | +| `MODEL_NAME` | LiteLLM model used by `/api/chat` | No | `gemini/gemini-flash-lite-latest` | +| `HOST` | Optional Uvicorn bind host for `start.sh`/`make run-*` | No | `0.0.0.0` or `127.0.0.1` depending on command | +| `PORT` | Optional Uvicorn bind port for `start.sh`/`make run-*` | No | `5000` | -Edit `.env` with your values: +The chat endpoint also needs the provider credential expected by your chosen `MODEL_NAME`. The default model is Gemini, so set `GEMINI_API_KEY` unless you switch to a different LiteLLM provider/model. -```bash -FALKORDB_HOST=localhost -FALKORDB_PORT=6379 -OPENAI_API_KEY= -SECRET_TOKEN= -``` +### Authentication behavior + +- Send `Authorization: Bearer ` (or the raw token string) when `SECRET_TOKEN` is configured. +- Read endpoints use the `public_or_auth` dependency. +- Mutating endpoints (`/api/analyze_folder`, `/api/analyze_repo`, `/api/switch_commit`) use the `token_required` dependency. +- If `SECRET_TOKEN` is unset, the current implementation accepts requests without an `Authorization` header. +- Setting `CODE_GRAPH_PUBLIC=1` makes the read-only endpoints public even when `SECRET_TOKEN` is configured. -### 3. Install Dependencies +### 3. Install dependencies ```bash # Install backend dependencies -pip install -e ".[test]" +uv sync --all-extras # Install frontend dependencies npm install --prefix ./app + +# Optional: install Playwright dependencies from the repo root +npm install ``` -Or using Make: +If you do not use `uv`, `pip install -e ".[test]"` also installs the backend package and test dependencies. + +### 4. Run the app + +**Backend API with auto-reload:** ```bash -make install +uv run uvicorn api.index:app --host 127.0.0.1 --port 5000 --reload ``` -### 4. Build & Start +**Frontend hot-reload with Vite:** ```bash -# Build the frontend -npm --prefix ./app run build +# Terminal 1: backend API +uv run uvicorn api.index:app --host 127.0.0.1 --port 5000 --reload -# Start the backend (serves both API and frontend) -flask --app api/index.py run --debug +# Terminal 2: Vite dev server +cd app && npm run dev ``` -The application will be available at [http://localhost:5000](http://localhost:5000). - -### Development Mode +The Vite dev server runs on `http://localhost:3000` and proxies `/api/*` requests to `http://127.0.0.1:5000`. -For frontend development with hot-reload: +**Single-process built frontend + backend:** ```bash -# Terminal 1: Start the Python backend -flask --app api/index.py run --debug --port 5000 - -# Terminal 2: Start the Vite dev server (proxies API calls to backend) -cd app && npm run dev +npm --prefix ./app run build +uv run uvicorn api.index:app --host 0.0.0.0 --port 5000 ``` -The Vite dev server runs on `http://localhost:3000` and proxies API requests to the Flask backend on port 5000. +In this mode, the FastAPI app serves the built React SPA from `app/dist` on `http://localhost:5000`. ### Using Make ```bash -make install # Install all dependencies -make build-dev # Build frontend (development) -make build-prod # Build frontend (production) -make run-dev # Build + start dev server -make run-prod # Build + start production server -make test # Run backend tests -make lint # Run frontend linting -make clean # Clean build artifacts +make install # Install backend + frontend dependencies +make build-dev # Build frontend in development mode +make build-prod # Build frontend for production +make run-dev # Build dev frontend + run Uvicorn with reload +make run-prod # Build prod frontend + run Uvicorn +make test # Run backend pytest suite +make lint # Run Ruff + frontend type-check +make e2e # Run Playwright tests from repo root +make clean # Remove build/test artifacts ``` +`make test` currently points at the right backend test entrypoint, but some legacy analyzer/git-history tests still need maintenance before the suite passes on a clean checkout. + ## Running with Docker ### Using Docker Compose ```bash -docker-compose up +docker compose up --build ``` -This starts both FalkorDB and the CodeGraph application. +This starts FalkorDB and the CodeGraph app together. The checked-in compose file sets `CODE_GRAPH_PUBLIC=1` for the app service. -### Using Docker Directly +### Using Docker directly ```bash docker build -t code-graph . @@ -166,55 +175,72 @@ docker build -t code-graph . docker run -p 5000:5000 \ -e FALKORDB_HOST=host.docker.internal \ -e FALKORDB_PORT=6379 \ - -e OPENAI_API_KEY= \ - -e SECRET_TOKEN= \ + -e MODEL_NAME=gemini/gemini-flash-lite-latest \ + -e GEMINI_API_KEY= \ + -e SECRET_TOKEN= \ code-graph ``` ## Creating a Code Graph -### Analyze a Local Folder +### Analyze a local folder + +`analyze_folder` only accepts paths under `ALLOWED_ANALYSIS_DIR` (defaults to the repository root unless you override it). ```bash -curl -X POST http://127.0.0.1:5000/analyze_folder \ +curl -X POST http://127.0.0.1:5000/api/analyze_folder \ -H "Content-Type: application/json" \ - -H "Authorization: " \ + -H "Authorization: Bearer " \ -d '{"path": "", "ignore": [".github", ".git"]}' ``` -### Analyze a GitHub Repository +### Analyze a Git repository ```bash -curl -X POST http://127.0.0.1:5000/analyze_repo \ +curl -X POST http://127.0.0.1:5000/api/analyze_repo \ -H "Content-Type: application/json" \ - -H "Authorization: " \ - -d '{"repo_url": "https://github.com/user/repo"}' + -H "Authorization: Bearer " \ + -d '{"repo_url": "https://github.com/user/repo", "ignore": [".github", ".git"]}' +``` + +### List indexed repositories + +```bash +curl http://127.0.0.1:5000/api/list_repos ``` ## Supported Languages -- Python -- Java -- C -- C# +`api/analyzers/source_analyzer.py` currently enables these analyzers: -Support for additional languages is planned. +- Python (`.py`) +- Java (`.java`) +- C# (`.cs`) + +A C analyzer exists in the source tree, but it is commented out and is not currently registered. ## API Endpoints +### Read endpoints + +| Method | Endpoint | Description | +|--------|----------|-------------| +| GET | `/api/list_repos` | List all indexed repositories | +| GET | `/api/graph_entities?repo=` | Fetch a subgraph for a repository | +| POST | `/api/get_neighbors` | Return neighboring nodes for the provided IDs | +| POST | `/api/auto_complete` | Prefix-search indexed entities | +| POST | `/api/repo_info` | Return repository stats and saved metadata | +| POST | `/api/find_paths` | Find paths between two graph nodes | +| POST | `/api/chat` | Ask questions over the code graph via GraphRAG | +| POST | `/api/list_commits` | List commits from the repository's git graph | + +### Mutating endpoints + | Method | Endpoint | Description | |--------|----------|-------------| -| GET | `/list_repos` | List all available repositories | -| GET | `/graph_entities?repo=` | Get graph entities for a repository | -| POST | `/get_neighbors` | Get neighbors of specified nodes | -| POST | `/auto_complete` | Auto-complete entity names | -| POST | `/repo_info` | Get repository information | -| POST | `/find_paths` | Find paths between two nodes | -| POST | `/chat` | Chat with the code graph using natural language | -| POST | `/analyze_folder` | Analyze a local source folder | -| POST | `/analyze_repo` | Analyze a GitHub repository | -| POST | `/list_commits` | List commits of a repository | -| POST | `/switch_commit` | Switch to a specific commit | +| POST | `/api/analyze_folder` | Analyze a local source folder | +| POST | `/api/analyze_repo` | Clone and analyze a git repository | +| POST | `/api/switch_commit` | Switch the indexed repository to a specific commit | ## License From 4f1a8c25794320439a6c4e2e18ae6e28109a24a0 Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 17:47:35 +0200 Subject: [PATCH 15/16] Delete ARCHITECTURE_ANALYSIS.md --- ARCHITECTURE_ANALYSIS.md | 248 --------------------------------------- 1 file changed, 248 deletions(-) delete mode 100644 ARCHITECTURE_ANALYSIS.md diff --git a/ARCHITECTURE_ANALYSIS.md b/ARCHITECTURE_ANALYSIS.md deleted file mode 100644 index 596c068..0000000 --- a/ARCHITECTURE_ANALYSIS.md +++ /dev/null @@ -1,248 +0,0 @@ -# CodeGraph FastAPI Backend - Architecture Analysis - -## Executive Summary - -CodeGraph currently exposes a **FastAPI** backend from `api.index:app` and serves the built React UI from the same process when `app/dist` exists. - -The framework-specific HTTP code is concentrated in `api/index.py`. Most of the backend domain modules (`graph.py`, `project.py`, `analyzers/`, `git_utils/`, `info.py`, `llm.py`) are reusable Python components that do not depend on FastAPI. - -## 1. Backend Layout - -``` -api/ -├── __init__.py # Public package exports -├── index.py # FastAPI app, auth dependencies, routes, SPA serving -├── graph.py # FalkorDB graph access (sync + async helpers) -├── llm.py # GraphRAG + LiteLLM chat integration -├── info.py # Repository metadata stored in Redis/FalkorDB -├── project.py # Clone/local repo analysis orchestration -├── auto_complete.py # Prefix search helper -├── prompts.py # Chat/Cypher prompt templates -│ -├── analyzers/ -│ ├── analyzer.py # Abstract analyzer base class -│ ├── source_analyzer.py # File scanning + analyzer dispatch -│ ├── python/analyzer.py # Python analyzer -│ ├── java/analyzer.py # Java analyzer -│ ├── csharp/analyzer.py # C# analyzer -│ └── c/analyzer.py # Present in tree, but not registered -│ -├── entities/ # Entity/File wrappers and encoders -├── git_utils/ # Git history graph and repo utilities -└── code_coverage/ # Coverage helpers -``` - -## 2. HTTP Layer (`api/index.py`) - -### 2.1 Application and routing - -- The backend app is `FastAPI()`. -- All API routes are mounted under `/api/...`. -- A catch-all route serves static files from `app/dist` and falls back to `index.html` for the React SPA. - -### 2.2 Authentication dependencies - -`api/index.py` defines two FastAPI dependencies: - -- `public_or_auth`: used by read-only endpoints. If `CODE_GRAPH_PUBLIC=1`, the request is allowed without auth; otherwise it checks the `Authorization` header against `SECRET_TOKEN`. -- `token_required`: used by mutating endpoints and always checks the `Authorization` header against `SECRET_TOKEN`. - -The current `_verify_token()` helper also treats a missing `SECRET_TOKEN` as allowing requests with no `Authorization` header. - -### 2.3 Request models - -The API uses Pydantic request models for POST bodies, including: - -- `RepoRequest` -- `NeighborsRequest` -- `AutoCompleteRequest` -- `FindPathsRequest` -- `ChatRequest` -- `AnalyzeFolderRequest` -- `AnalyzeRepoRequest` -- `SwitchCommitRequest` - -### 2.4 Endpoint inventory - -**Read endpoints** (`public_or_auth`): - -- `GET /api/graph_entities` -- `POST /api/get_neighbors` -- `POST /api/auto_complete` -- `GET /api/list_repos` -- `POST /api/repo_info` -- `POST /api/find_paths` -- `POST /api/chat` -- `POST /api/list_commits` - -**Mutating endpoints** (`token_required`): - -- `POST /api/analyze_folder` -- `POST /api/analyze_repo` -- `POST /api/switch_commit` - -### 2.5 Async behavior - -The FastAPI handlers are `async def`, but several heavy operations are still blocking and are moved off the event loop with `asyncio.get_running_loop().run_in_executor(...)`: - -- local folder analysis -- repository clone + analysis -- LLM chat work -- commit switching - -## 3. Domain Modules - -### 3.1 `graph.py` - -`Graph` is the core FalkorDB interface used for code-graph mutations and queries. It also exposes helpers such as: - -- `get_sub_graph()` -- `get_neighbors()` -- `add_entity()` -- `connect_entities()` -- `find_paths()` -- `stats()` -- backlog helpers used during git-history processing - -Async route handlers use `AsyncGraphQuery` and `async_get_repos()` for non-blocking access patterns. - -### 3.2 `project.py` - -`Project` represents either: - -- a cloned git repository via `Project.from_git_repository(url)`, or -- a local repository via `Project.from_local_repository(path)`. - -Its two main orchestration steps are: - -- `analyze_sources(ignore)` -- `process_git_history(ignore)` - -### 3.3 `analyzers/source_analyzer.py` - -`SourceAnalyzer` walks the repository tree, picks a registered analyzer by file extension, and builds the code graph. - -Registered analyzers in the current code: - -- `.py` -> `PythonAnalyzer` -- `.java` -> `JavaAnalyzer` -- `.cs` -> `CSharpAnalyzer` - -The C analyzer source exists, but `.c` and `.h` registrations are commented out. - -### 3.4 `git_utils/` - -Git history is modeled as a separate FalkorDB graph per repository (for example `{repo_name}_git`). - -Key pieces: - -- `GitGraph` / `AsyncGitGraph` -- `build_commit_graph(...)` -- `switch_commit(...)` -- helper functions for diff classification and ignore checks - -### 3.5 `info.py` - -Repository metadata is stored via Redis-compatible access backed by FalkorDB connection settings. Stored fields include: - -- `repo_url` -- `commit` - -### 3.6 `llm.py` - -Chat requests use GraphRAG-SDK with LiteLLM: - -- default `MODEL_NAME` is `gemini/gemini-flash-lite-latest` -- the backend creates a `KnowledgeGraph` bound to the repository graph -- `ask()` offloads the synchronous chat session call to a worker thread - -## 4. Runtime and Environment - -### 4.1 Local development - -Typical backend dev command: - -```bash -uv run uvicorn api.index:app --host 127.0.0.1 --port 5000 --reload -``` - -Typical frontend dev command: - -```bash -cd app && npm run dev -``` - -`app/vite.config.ts` proxies `/api` requests to `http://127.0.0.1:5000` during frontend development. - -### 4.2 Production/container startup - -The checked-in production entrypoints use Uvicorn, not Flask: - -- `make run-prod` -- `start.sh` -- Docker image entrypoint (`/start.sh`) - -### 4.3 Important environment variables - -- `FALKORDB_HOST` -- `FALKORDB_PORT` -- `FALKORDB_USERNAME` -- `FALKORDB_PASSWORD` -- `SECRET_TOKEN` -- `CODE_GRAPH_PUBLIC` -- `ALLOWED_ANALYSIS_DIR` -- `MODEL_NAME` -- provider-specific LiteLLM credential(s), such as `GEMINI_API_KEY` for the default model - -## 5. Storage Model - -### 5.1 Code graph - -The main repository graph lives in FalkorDB and contains entities such as: - -- `File` -- `Class` -- `Function` -- `Interface` - -Relationships include: - -- `DEFINES` -- `CALLS` -- `EXTENDS` -- `IMPLEMENTS` - -### 5.2 Git graph - -Commit history is stored in a second graph named `{repo_name}_git`, with commit metadata and parent/child edges. - -### 5.3 Repository metadata - -Repository URL and current commit are stored in Redis-style hashes keyed as `{repo_name}_info`. - -## 6. Request Flows - -### 6.1 `POST /api/analyze_repo` - -1. FastAPI validates the request body with `AnalyzeRepoRequest`. -2. `token_required` checks the `Authorization` header. -3. `Project.from_git_repository()` clones the repo locally. -4. `analyze_sources()` builds the code graph. -5. `process_git_history()` builds the repository's git graph. -6. The endpoint returns `{"status": "success"}`. - -### 6.2 `POST /api/chat` - -1. FastAPI validates `repo` and `msg`. -2. `public_or_auth` enforces auth/public rules. -3. `ask()` creates a GraphRAG chat session for the repository graph. -4. LiteLLM generates Cypher and a natural-language response. -5. The endpoint returns `{"status": "success", "response": ...}`. - -## 7. Key Takeaways - -- The backend is now FastAPI + Uvicorn, not Flask. -- All public API paths are under `/api/...`. -- The React app can be served by the backend from `app/dist`. -- Most backend logic remains framework-agnostic and reusable. -- Supported analyzers are currently Python, Java, and C#. From e99d718acd2632598df9dd0f021e4948af54094e Mon Sep 17 00:00:00 2001 From: Guy Korland Date: Fri, 13 Mar 2026 17:47:56 +0200 Subject: [PATCH 16/16] Delete QUICK_REFERENCE.txt --- QUICK_REFERENCE.txt | 97 --------------------------------------------- 1 file changed, 97 deletions(-) delete mode 100644 QUICK_REFERENCE.txt diff --git a/QUICK_REFERENCE.txt b/QUICK_REFERENCE.txt deleted file mode 100644 index 9003cf3..0000000 --- a/QUICK_REFERENCE.txt +++ /dev/null @@ -1,97 +0,0 @@ -╔════════════════════════════════════════════════════════════════════════════╗ -║ CodeGraph FastAPI Backend - QUICK REFERENCE ║ -╚════════════════════════════════════════════════════════════════════════════╝ - -BACKEND SNAPSHOT -───────────────────────────────────────────────────────────────────────────── -Backend app: api.index:app -Framework: FastAPI -Server: Uvicorn -Frontend: React + Vite in app/ -Built UI serving: FastAPI serves app/dist when it exists -Supported analyzers: Python, Java, C# -C analyzer: Present in source tree, but currently disabled - -KEY PATHS -───────────────────────────────────────────────────────────────────────────── -api/index.py FastAPI app, auth deps, API routes, SPA serving -api/project.py Repo clone/local analysis orchestration -api/analyzers/source_analyzer.py - Registered analyzers (.py, .java, .cs) -api/llm.py GraphRAG + LiteLLM chat integration -api/graph.py FalkorDB graph access -api/git_utils/ Git history graph + commit switching -app/vite.config.ts Dev proxy from /api -> http://127.0.0.1:5000 -Makefile Common install/build/run/test/lint commands -start.sh Container entrypoint (starts uvicorn) - -COMMON COMMANDS -───────────────────────────────────────────────────────────────────────────── -Install backend deps: uv sync --all-extras -Install frontend deps: npm install --prefix ./app -Install e2e deps: npm install - -Backend dev server: uv run uvicorn api.index:app --host 127.0.0.1 --port 5000 --reload -Frontend dev server: cd app && npm run dev -Prod-style serve: npm --prefix ./app run build && uv run uvicorn api.index:app --host 0.0.0.0 --port 5000 - -Make targets: make install | build-dev | build-prod | run-dev | - run-prod | test | lint | e2e | clean - -AUTHENTICATION RULES -───────────────────────────────────────────────────────────────────────────── -Header format: Authorization: Bearer - (raw token string also works) -Read endpoints: public_or_auth dependency -Mutating endpoints: token_required dependency -Public mode: CODE_GRAPH_PUBLIC=1 skips auth on read endpoints -Important detail: If SECRET_TOKEN is unset, requests without an - Authorization header are accepted by the current - implementation - -IMPORTANT ENV VARS -───────────────────────────────────────────────────────────────────────────── -FALKORDB_HOST FalkorDB hostname (default: localhost) -FALKORDB_PORT FalkorDB port (default: 6379) -FALKORDB_USERNAME Optional FalkorDB username -FALKORDB_PASSWORD Optional FalkorDB password -SECRET_TOKEN Token checked by protected endpoints -CODE_GRAPH_PUBLIC Set to 1 to make read endpoints public -ALLOWED_ANALYSIS_DIR Limits /api/analyze_folder to one directory tree -MODEL_NAME LiteLLM model for /api/chat - default: gemini/gemini-flash-lite-latest -GEMINI_API_KEY Example provider credential for the default model -HOST / PORT Optional bind settings for start.sh / make run-* - -API ENDPOINTS -───────────────────────────────────────────────────────────────────────────── -GET /api/list_repos -GET /api/graph_entities?repo= -POST /api/get_neighbors -POST /api/auto_complete -POST /api/repo_info -POST /api/find_paths -POST /api/chat -POST /api/list_commits -POST /api/analyze_folder (requires token dependency) -POST /api/analyze_repo (requires token dependency) -POST /api/switch_commit (requires token dependency) - -ANALYSIS NOTES -───────────────────────────────────────────────────────────────────────────── -/api/analyze_folder The requested path must be inside ALLOWED_ANALYSIS_DIR -/api/analyze_repo Clones the repository, analyzes sources, then builds - git history -/api/chat Uses GraphRAG-SDK + LiteLLM against the repo graph -/api/list_commits Reads from the separate {repo_name}_git graph - -DEVELOPMENT NOTES -───────────────────────────────────────────────────────────────────────────── -Vite dev URL: http://localhost:3000 -Backend API URL: http://127.0.0.1:5000/api/... -Built app URL: http://localhost:5000 -Root package.json: Playwright only -app/package.json: Frontend build/lint/dev scripts -Tests: make test runs pytest, but some legacy analyzer/ - git-history tests still need maintenance on a - clean checkout