diff --git a/.env b/.env.example
similarity index 78%
rename from .env
rename to .env.example
index 1d44286e2..fcb506112 100644
--- a/.env
+++ b/.env.example
@@ -1,28 +1,39 @@
# Domain
+
# This would be set to the production domain with an env var on deployment
+
# used by Traefik to transmit traffic and aqcuire TLS certificates
+
DOMAIN=localhost
+
# To test the local Traefik config
+
# DOMAIN=localhost.tiangolo.com
# Used by the backend to generate links in emails to the frontend
+
FRONTEND_HOST=http://localhost:5173
+
# In staging and production, set this env var to the frontend host, e.g.
+
# FRONTEND_HOST=https://dashboard.example.com
# Environment: local, staging, production
+
ENVIRONMENT=local
-PROJECT_NAME="Full Stack FastAPI Project"
-STACK_NAME=full-stack-fastapi-project
+PROJECT_NAME="AI Platform"
+STACK_NAME=ai-platform
# Backend
-BACKEND_CORS_ORIGINS="http://localhost,http://localhost:5173,https://localhost,https://localhost:5173,http://localhost.tiangolo.com"
+
+BACKEND_CORS_ORIGINS="http://localhost:5173"
SECRET_KEY=changethis
FIRST_SUPERUSER=admin@example.com
FIRST_SUPERUSER_PASSWORD=changethis
# Emails
+
SMTP_HOST=
SMTP_USER=
SMTP_PASSWORD=
@@ -32,14 +43,19 @@ SMTP_SSL=False
SMTP_PORT=587
# Postgres
+
POSTGRES_SERVER=localhost
POSTGRES_PORT=5432
-POSTGRES_DB=app
+POSTGRES_DB=ai_platform
POSTGRES_USER=postgres
-POSTGRES_PASSWORD=changethis
+POSTGRES_PASSWORD=postgres
SENTRY_DSN=
# Configure these with your own Docker registry images
+
DOCKER_IMAGE_BACKEND=backend
DOCKER_IMAGE_FRONTEND=frontend
+
+CI=""
+OPENAI_API_KEY="this_is_not_a_secret"
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 51e011339..8b548a424 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,13 +1,13 @@
## Summary
Target issue is #_PLEASE_TYPE_ISSUE_NUMBER_
- Explain the **motivation** for making this change. What existing problem does the pull request solve?
+Explain the **motivation** for making this change. What existing problem does the pull request solve?
## Checklist
Before submitting a pull request, please ensure that you mark these task.
-- [ ] Ran `poetry run uvicorn src.app.main:app --reload` in the repository root and test.
+- [ ] Ran `fastapi run --reload app/main.py` or `docker compose up` in the repository root and test.
- [ ] If you've fixed a bug or added code that is tested and has test cases.
## Notes
diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml
deleted file mode 100644
index dccea83f3..000000000
--- a/.github/workflows/add-to-project.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-name: Add to Project
-
-on:
- pull_request_target:
- issues:
- types:
- - opened
- - reopened
-
-jobs:
- add-to-project:
- name: Add to project
- runs-on: ubuntu-latest
- steps:
- - uses: actions/add-to-project@v1.0.2
- with:
- project-url: https://github.com/orgs/fastapi/projects/2
- github-token: ${{ secrets.PROJECTS_TOKEN }}
diff --git a/.github/workflows/cd-production.yml b/.github/workflows/cd-production.yml
new file mode 100644
index 000000000..8764cf0d8
--- /dev/null
+++ b/.github/workflows/cd-production.yml
@@ -0,0 +1,46 @@
+name: Deploy AI Platform to ECS Production
+
+on:
+ push:
+ tags:
+ - 'v*' # Deploy only when tags like v1.0.0, v2.1.0, etc., are created
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ permissions:
+ packages: write
+ contents: read
+ attestations: write
+ id-token: write
+
+ steps:
+ - name: Checkout the repo
+ uses: actions/checkout@v4
+
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v4 # More information on this action can be found below in the 'AWS Credentials' section
+ with:
+ role-to-assume: arn:aws:iam::024209611402:role/github-action-role
+ aws-region: ap-south-1
+
+ - name: Login to Amazon ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@v2
+
+ - name: Build and Push Docker Image
+ env:
+ REGISTRY: ${{ steps.login-ecr.outputs.registry }}
+ REPOSITORY: ${{ github.event.repository.name }}-repo
+ TAG: ${{ github.ref_name }}
+ run: |
+ docker build -t $REGISTRY/$REPOSITORY:latest ./backend
+ docker push $REGISTRY/$REPOSITORY:latest
+
+ - name: Deploy to ECS
+ run: |
+ aws ecs update-service \
+ --cluster ${{ github.event.repository.name }}-cluster \
+ --service ${{ github.event.repository.name }}-service \
+ --force-new-deployment
diff --git a/.github/workflows/cd-staging.yml b/.github/workflows/cd-staging.yml
new file mode 100644
index 000000000..898b464ce
--- /dev/null
+++ b/.github/workflows/cd-staging.yml
@@ -0,0 +1,44 @@
+name: Deploy AI Platform to ECS
+
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ permissions:
+ packages: write
+ contents: read
+ attestations: write
+ id-token: write
+
+
+ steps:
+ - name: checkout the repo
+ uses: actions/checkout@v4
+
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v4 # More information on this action can be found below in the 'AWS Credentials' section
+ with:
+ role-to-assume: arn:aws:iam::024209611402:role/github-action-role
+ aws-region: ap-south-1
+
+ - name: Login to Amazon ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@v2
+
+
+ - name: Build and Push Docker Image
+ env:
+ REGISTRY: ${{ steps.login-ecr.outputs.registry }}
+ REPOSITORY: ${{ github.event.repository.name }}-staging-repo
+ run: |
+ docker build -t $REGISTRY/$REPOSITORY:latest ./backend
+ docker push $REGISTRY/$REPOSITORY:latest
+
+ - name: Deploy to ECS
+ run: |
+ aws ecs update-service --cluster ${{ github.event.repository.name }}-staging-cluster --service ${{ github.event.repository.name }}-staging-service --force-new-deployment
diff --git a/.github/workflows/continuous_integration.yml b/.github/workflows/continuous_integration.yml
new file mode 100644
index 000000000..4c096882c
--- /dev/null
+++ b/.github/workflows/continuous_integration.yml
@@ -0,0 +1,69 @@
+name: AI Platform CI
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+jobs:
+ checks:
+ runs-on: ubuntu-latest
+ services:
+ postgres:
+ image: postgres:16
+ env:
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: postgres
+ POSTGRES_DB: ai_platform
+ ports:
+ - 5432:5432
+ options: --health-cmd "pg_isready -U postgres" --health-interval 10s --health-timeout 5s --health-retries 5
+
+ strategy:
+ matrix:
+ python-version: ["3.11.7"]
+ redis-version: [6]
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Making env file
+ run: cp .env.example .env
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+ with:
+ version: "0.4.15"
+ enable-cache: true
+
+ - name: Install dependencies
+ run: uv sync
+ working-directory: backend
+
+ - name: Activate virtual environment and run Alembic migrations
+ run: |
+ source .venv/bin/activate
+ alembic upgrade head
+ working-directory: backend
+
+ - name: Run tests
+ run: uv run bash scripts/tests-start.sh "Coverage for ${{ github.sha }}"
+ working-directory: backend
+
+ - name: Upload coverage reports to codecov
+ uses: codecov/codecov-action@v5
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ fail_ci_if_error: true
+
+ - name: Check coverage percentage
+ run: |
+ source .venv/bin/activate
+ coverage report --fail-under=70
+ working-directory: backend
diff --git a/.github/workflows/deploy-production.yml b/.github/workflows/deploy-production.yml
deleted file mode 100644
index a64d02a15..000000000
--- a/.github/workflows/deploy-production.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-name: Deploy to Production
-
-on:
- release:
- types:
- - published
-
-jobs:
- deploy:
- # Do not deploy in the main repository, only in user projects
- if: github.repository_owner != 'fastapi'
- runs-on:
- - self-hosted
- - production
- env:
- ENVIRONMENT: production
- DOMAIN: ${{ secrets.DOMAIN_PRODUCTION }}
- STACK_NAME: ${{ secrets.STACK_NAME_PRODUCTION }}
- SECRET_KEY: ${{ secrets.SECRET_KEY }}
- FIRST_SUPERUSER: ${{ secrets.FIRST_SUPERUSER }}
- FIRST_SUPERUSER_PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}
- SMTP_HOST: ${{ secrets.SMTP_HOST }}
- SMTP_USER: ${{ secrets.SMTP_USER }}
- SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
- EMAILS_FROM_EMAIL: ${{ secrets.EMAILS_FROM_EMAIL }}
- POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
- SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - run: docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_PRODUCTION }} build
- - run: docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_PRODUCTION }} up -d
diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml
deleted file mode 100644
index 26bd692fd..000000000
--- a/.github/workflows/deploy-staging.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-name: Deploy to Staging
-
-on:
- push:
- branches:
- - master
-
-jobs:
- deploy:
- # Do not deploy in the main repository, only in user projects
- if: github.repository_owner != 'fastapi'
- runs-on:
- - self-hosted
- - staging
- env:
- ENVIRONMENT: staging
- DOMAIN: ${{ secrets.DOMAIN_STAGING }}
- STACK_NAME: ${{ secrets.STACK_NAME_STAGING }}
- SECRET_KEY: ${{ secrets.SECRET_KEY }}
- FIRST_SUPERUSER: ${{ secrets.FIRST_SUPERUSER }}
- FIRST_SUPERUSER_PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}
- SMTP_HOST: ${{ secrets.SMTP_HOST }}
- SMTP_USER: ${{ secrets.SMTP_USER }}
- SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
- EMAILS_FROM_EMAIL: ${{ secrets.EMAILS_FROM_EMAIL }}
- POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
- SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - run: docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_STAGING }} build
- - run: docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_STAGING }} up -d
diff --git a/.github/workflows/generate-client.yml b/.github/workflows/generate-client.yml
deleted file mode 100644
index 304363ce9..000000000
--- a/.github/workflows/generate-client.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-name: Generate Client
-
-on:
- pull_request:
- types:
- - opened
- - synchronize
-
-jobs:
- generate-client:
- permissions:
- contents: write
- runs-on: ubuntu-latest
- steps:
- # For PRs from forks
- - uses: actions/checkout@v4
- # For PRs from the same repo
- - uses: actions/checkout@v4
- if: ( github.event_name != 'pull_request' || github.secret_source == 'Actions' )
- with:
- ref: ${{ github.head_ref }}
- token: ${{ secrets.FULL_STACK_FASTAPI_TEMPLATE_REPO_TOKEN }}
- - uses: actions/setup-node@v4
- with:
- node-version: lts/*
- - uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- - name: Install uv
- uses: astral-sh/setup-uv@v5
- with:
- version: "0.4.15"
- enable-cache: true
- - name: Install dependencies
- run: npm ci
- working-directory: frontend
- - run: uv sync
- working-directory: backend
- - run: uv run bash scripts/generate-client.sh
- env:
- VIRTUAL_ENV: backend/.venv
- ENVIRONMENT: production
- SECRET_KEY: just-for-generating-client
- POSTGRES_PASSWORD: just-for-generating-client
- FIRST_SUPERUSER_PASSWORD: just-for-generating-client
- - name: Add changes to git
- run: |
- git config --local user.email "github-actions@github.com"
- git config --local user.name "github-actions"
- git add frontend/src/client
- # Same repo PRs
- - name: Push changes
- if: ( github.event_name != 'pull_request' || github.secret_source == 'Actions' )
- run: |
- git diff --staged --quiet || git commit -m "β¨ Autogenerate frontend client"
- git push
- # Fork PRs
- - name: Check changes
- if: ( github.event_name == 'pull_request' && github.secret_source != 'Actions' )
- run: |
- git diff --staged --quiet || (echo "Changes detected in generated client, run scripts/generate-client.sh and commit the changes" && exit 1)
diff --git a/.github/workflows/issue-manager.yml b/.github/workflows/issue-manager.yml
deleted file mode 100644
index 109ac0e98..000000000
--- a/.github/workflows/issue-manager.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-name: Issue Manager
-
-on:
- schedule:
- - cron: "21 17 * * *"
- issue_comment:
- types:
- - created
- issues:
- types:
- - labeled
- pull_request_target:
- types:
- - labeled
- workflow_dispatch:
-
-permissions:
- issues: write
- pull-requests: write
-
-jobs:
- issue-manager:
- if: github.repository_owner == 'fastapi'
- runs-on: ubuntu-latest
- steps:
- - name: Dump GitHub context
- env:
- GITHUB_CONTEXT: ${{ toJson(github) }}
- run: echo "$GITHUB_CONTEXT"
- - uses: tiangolo/issue-manager@0.5.1
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- config: >
- {
- "answered": {
- "delay": 864000,
- "message": "Assuming the original need was handled, this will be automatically closed now. But feel free to add more comments or create new issues or PRs."
- },
- "waiting": {
- "delay": 2628000,
- "message": "As this PR has been waiting for the original user for a while but seems to be inactive, it's now going to be closed. But if there's anyone interested, feel free to create a new PR."
- },
- "invalid": {
- "delay": 0,
- "message": "This was marked as invalid and will be closed now. If this is an error, please provide additional details."
- }
- }
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
deleted file mode 100644
index e8e58015a..000000000
--- a/.github/workflows/labeler.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-name: Labels
-on:
- pull_request_target:
- types:
- - opened
- - synchronize
- - reopened
- # For label-checker
- - labeled
- - unlabeled
-
-jobs:
- labeler:
- permissions:
- contents: read
- pull-requests: write
- runs-on: ubuntu-latest
- steps:
- - uses: actions/labeler@v5
- if: ${{ github.event.action != 'labeled' && github.event.action != 'unlabeled' }}
- - run: echo "Done adding labels"
- # Run this after labeler applied labels
- check-labels:
- needs:
- - labeler
- permissions:
- pull-requests: read
- runs-on: ubuntu-latest
- steps:
- - uses: docker://agilepathway/pull-request-label-checker:latest
- with:
- one_of: breaking,security,feature,bug,refactor,upgrade,docs,lang-all,internal
- repo_token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/latest-changes.yml b/.github/workflows/latest-changes.yml
deleted file mode 100644
index 607c5243b..000000000
--- a/.github/workflows/latest-changes.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-name: Latest Changes
-
-on:
- pull_request_target:
- branches:
- - master
- types:
- - closed
- workflow_dispatch:
- inputs:
- number:
- description: PR number
- required: true
- debug_enabled:
- description: "Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)"
- required: false
- default: "false"
-
-jobs:
- latest-changes:
- runs-on: ubuntu-latest
- permissions:
- pull-requests: read
- steps:
- - name: Dump GitHub context
- env:
- GITHUB_CONTEXT: ${{ toJson(github) }}
- run: echo "$GITHUB_CONTEXT"
- - uses: actions/checkout@v4
- with:
- # To allow latest-changes to commit to the main branch
- token: ${{ secrets.LATEST_CHANGES }}
- - uses: tiangolo/latest-changes@0.3.2
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- latest_changes_file: ./release-notes.md
- latest_changes_header: "## Latest Changes"
- end_regex: "^## "
- debug_logs: true
- label_header_prefix: "### "
diff --git a/.github/workflows/lint-backend.yml b/.github/workflows/lint-backend.yml
deleted file mode 100644
index a6e536bff..000000000
--- a/.github/workflows/lint-backend.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-name: Lint Backend
-
-on:
- push:
- branches:
- - master
- pull_request:
- types:
- - opened
- - synchronize
-
-jobs:
- lint-backend:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - name: Set up Python
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- - name: Install uv
- uses: astral-sh/setup-uv@v5
- with:
- version: "0.4.15"
- enable-cache: true
- - run: uv run bash scripts/lint.sh
- working-directory: backend
diff --git a/.github/workflows/playwright.yml b/.github/workflows/playwright.yml
deleted file mode 100644
index 5b13c5868..000000000
--- a/.github/workflows/playwright.yml
+++ /dev/null
@@ -1,131 +0,0 @@
-name: Playwright Tests
-
-on:
- push:
- branches:
- - master
- pull_request:
- types:
- - opened
- - synchronize
- workflow_dispatch:
- inputs:
- debug_enabled:
- description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
- required: false
- default: 'false'
-
-jobs:
- changes:
- runs-on: ubuntu-latest
- # Set job outputs to values from filter step
- outputs:
- changed: ${{ steps.filter.outputs.changed }}
- steps:
- - uses: actions/checkout@v4
- # For pull requests it's not necessary to checkout the code but for the main branch it is
- - uses: dorny/paths-filter@v3
- id: filter
- with:
- filters: |
- changed:
- - backend/**
- - frontend/**
- - .env
- - docker-compose*.yml
- - .github/workflows/playwright.yml
-
- test-playwright:
- needs:
- - changes
- if: ${{ needs.changes.outputs.changed == 'true' }}
- timeout-minutes: 60
- runs-on: ubuntu-latest
- strategy:
- matrix:
- shardIndex: [1, 2, 3, 4]
- shardTotal: [4]
- fail-fast: false
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-node@v4
- with:
- node-version: lts/*
- - uses: actions/setup-python@v5
- with:
- python-version: '3.10'
- - name: Setup tmate session
- uses: mxschmitt/action-tmate@v3
- if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.debug_enabled == 'true' }}
- with:
- limit-access-to-actor: true
- - name: Install uv
- uses: astral-sh/setup-uv@v5
- with:
- version: "0.4.15"
- enable-cache: true
- - run: uv sync
- working-directory: backend
- - run: npm ci
- working-directory: frontend
- - run: uv run bash scripts/generate-client.sh
- env:
- VIRTUAL_ENV: backend/.venv
- - run: docker compose build
- - run: docker compose down -v --remove-orphans
- - name: Run Playwright tests
- run: docker compose run --rm playwright npx playwright test --fail-on-flaky-tests --trace=retain-on-failure --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
- - run: docker compose down -v --remove-orphans
- - name: Upload blob report to GitHub Actions Artifacts
- if: ${{ !cancelled() }}
- uses: actions/upload-artifact@v4
- with:
- name: blob-report-${{ matrix.shardIndex }}
- path: frontend/blob-report
- include-hidden-files: true
- retention-days: 1
-
- merge-playwright-reports:
- needs:
- - test-playwright
- - changes
- # Merge reports after playwright-tests, even if some shards have failed
- if: ${{ !cancelled() && needs.changes.outputs.changed == 'true' }}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-node@v4
- with:
- node-version: 20
- - name: Install dependencies
- run: npm ci
- working-directory: frontend
- - name: Download blob reports from GitHub Actions Artifacts
- uses: actions/download-artifact@v4
- with:
- path: frontend/all-blob-reports
- pattern: blob-report-*
- merge-multiple: true
- - name: Merge into HTML Report
- run: npx playwright merge-reports --reporter html ./all-blob-reports
- working-directory: frontend
- - name: Upload HTML report
- uses: actions/upload-artifact@v4
- with:
- name: html-report--attempt-${{ github.run_attempt }}
- path: frontend/playwright-report
- retention-days: 30
- include-hidden-files: true
-
- # https://github.com/marketplace/actions/alls-green#why
- alls-green-playwright: # This job does nothing and is only used for the branch protection
- if: always()
- needs:
- - test-playwright
- runs-on: ubuntu-latest
- steps:
- - name: Decide whether the needed jobs succeeded or failed
- uses: re-actors/alls-green@release/v1
- with:
- jobs: ${{ toJSON(needs) }}
- allowed-skips: test-playwright
diff --git a/.github/workflows/smokeshow.yml b/.github/workflows/smokeshow.yml
deleted file mode 100644
index 61fde520e..000000000
--- a/.github/workflows/smokeshow.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-name: Smokeshow
-
-on:
- workflow_run:
- workflows: [Test Backend]
- types: [completed]
-
-jobs:
- smokeshow:
- if: ${{ github.event.workflow_run.conclusion == 'success' }}
- runs-on: ubuntu-latest
- permissions:
- actions: read
- statuses: write
-
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- - run: pip install smokeshow
- - uses: actions/download-artifact@v4
- with:
- name: coverage-html
- path: backend/htmlcov
- github-token: ${{ secrets.GITHUB_TOKEN }}
- run-id: ${{ github.event.workflow_run.id }}
- - run: smokeshow upload backend/htmlcov
- env:
- SMOKESHOW_GITHUB_STATUS_DESCRIPTION: Coverage {coverage-percentage}
- SMOKESHOW_GITHUB_COVERAGE_THRESHOLD: 90
- SMOKESHOW_GITHUB_CONTEXT: coverage
- SMOKESHOW_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- SMOKESHOW_GITHUB_PR_HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
- SMOKESHOW_AUTH_KEY: ${{ secrets.SMOKESHOW_AUTH_KEY }}
diff --git a/.github/workflows/test-backend.yml b/.github/workflows/test-backend.yml
deleted file mode 100644
index cbbb78de4..000000000
--- a/.github/workflows/test-backend.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-name: Test Backend
-
-on:
- push:
- branches:
- - master
- pull_request:
- types:
- - opened
- - synchronize
-
-jobs:
- test-backend:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - name: Set up Python
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- - name: Install uv
- uses: astral-sh/setup-uv@v5
- with:
- version: "0.4.15"
- enable-cache: true
- - run: docker compose down -v --remove-orphans
- - run: docker compose up -d db mailcatcher
- - name: Migrate DB
- run: uv run bash scripts/prestart.sh
- working-directory: backend
- - name: Run tests
- run: uv run bash scripts/tests-start.sh "Coverage for ${{ github.sha }}"
- working-directory: backend
- - run: docker compose down -v --remove-orphans
- - name: Store coverage files
- uses: actions/upload-artifact@v4
- with:
- name: coverage-html
- path: backend/htmlcov
- include-hidden-files: true
diff --git a/.github/workflows/test-docker-compose.yml b/.github/workflows/test-docker-compose.yml
deleted file mode 100644
index 17792ede5..000000000
--- a/.github/workflows/test-docker-compose.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-name: Test Docker Compose
-
-on:
- push:
- branches:
- - master
- pull_request:
- types:
- - opened
- - synchronize
-
-jobs:
-
- test-docker-compose:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - run: docker compose build
- - run: docker compose down -v --remove-orphans
- - run: docker compose up -d --wait backend frontend adminer
- - name: Test backend is up
- run: curl http://localhost:8000/api/v1/utils/health-check
- - name: Test frontend is up
- run: curl http://localhost:5173
- - run: docker compose down -v --remove-orphans
diff --git a/README.md b/README.md
index afe124f3f..7589d578b 100644
--- a/README.md
+++ b/README.md
@@ -1,133 +1,31 @@
-# Full Stack FastAPI Template
+# AI Platform
-
-
+[](https://www.gnu.org/licenses/agpl-3.0)
+
+[](https://codecov.io/gh/ProjectTech4DevAI/ai-platform/branch/staging)
+
+[](https://codebeat.co/projects/github-com-ProjectTech4DevAI/ai-platform-staging)
+[](https://img.shields.io/github/commit-activity/m/ProjectTech4DevAI/ai-platform)
-## Technology Stack and Features
+## Pre-requisites
-- β‘ [**FastAPI**](https://fastapi.tiangolo.com) for the Python backend API.
- - π§° [SQLModel](https://sqlmodel.tiangolo.com) for the Python SQL database interactions (ORM).
- - π [Pydantic](https://docs.pydantic.dev), used by FastAPI, for the data validation and settings management.
- - πΎ [PostgreSQL](https://www.postgresql.org) as the SQL database.
-- π [React](https://react.dev) for the frontend.
- - π Using TypeScript, hooks, Vite, and other parts of a modern frontend stack.
- - π¨ [Chakra UI](https://chakra-ui.com) for the frontend components.
- - π€ An automatically generated frontend client.
- - π§ͺ [Playwright](https://playwright.dev) for End-to-End testing.
- - π¦ Dark mode support.
-- π [Docker Compose](https://www.docker.com) for development and production.
-- π Secure password hashing by default.
-- π JWT (JSON Web Token) authentication.
-- π« Email based password recovery.
-- β
Tests with [Pytest](https://pytest.org).
-- π [Traefik](https://traefik.io) as a reverse proxy / load balancer.
-- π’ Deployment instructions using Docker Compose, including how to set up a frontend Traefik proxy to handle automatic HTTPS certificates.
-- π CI (continuous integration) and CD (continuous deployment) based on GitHub Actions.
+- [docker](https://docs.docker.com/get-started/get-docker/) Docker
+- [uv](https://docs.astral.sh/uv/) for Python package and environment management.
-### Dashboard Login
-
-[](https://github.com/fastapi/full-stack-fastapi-template)
-
-### Dashboard - Admin
-
-[](https://github.com/fastapi/full-stack-fastapi-template)
-
-### Dashboard - Create User
-
-[](https://github.com/fastapi/full-stack-fastapi-template)
-
-### Dashboard - Items
-
-[](https://github.com/fastapi/full-stack-fastapi-template)
-
-### Dashboard - User Settings
-
-[](https://github.com/fastapi/full-stack-fastapi-template)
-
-### Dashboard - Dark Mode
-
-[](https://github.com/fastapi/full-stack-fastapi-template)
-
-### Interactive API Documentation
-
-[](https://github.com/fastapi/full-stack-fastapi-template)
-
-## How To Use It
+## Project Setup
You can **just fork or clone** this repository and use it as is.
β¨ It just works. β¨
-### How to Use a Private Repository
-
-If you want to have a private repository, GitHub won't allow you to simply fork it as it doesn't allow changing the visibility of forks.
-
-But you can do the following:
-
-- Create a new GitHub repo, for example `my-full-stack`.
-- Clone this repository manually, set the name with the name of the project you want to use, for example `my-full-stack`:
-
-```bash
-git clone git@github.com:fastapi/full-stack-fastapi-template.git my-full-stack
-```
-
-- Enter into the new directory:
-
-```bash
-cd my-full-stack
-```
-
-- Set the new origin to your new repository, copy it from the GitHub interface, for example:
-
-```bash
-git remote set-url origin git@github.com:octocat/my-full-stack.git
-```
-
-- Add this repo as another "remote" to allow you to get updates later:
-
-```bash
-git remote add upstream git@github.com:fastapi/full-stack-fastapi-template.git
-```
-
-- Push the code to your new repository:
-
-```bash
-git push -u origin master
-```
-
-### Update From the Original Template
-
-After cloning the repository, and after doing changes, you might want to get the latest changes from this original template.
-
-- Make sure you added the original repository as a remote, you can check it with:
-
-```bash
-git remote -v
-
-origin git@github.com:octocat/my-full-stack.git (fetch)
-origin git@github.com:octocat/my-full-stack.git (push)
-upstream git@github.com:fastapi/full-stack-fastapi-template.git (fetch)
-upstream git@github.com:fastapi/full-stack-fastapi-template.git (push)
-```
-
-- Pull the latest changes without merging:
-
-```bash
-git pull --no-commit upstream master
-```
-
-This will download the latest changes from this template without committing them, that way you can check everything is right before committing.
-
-- If there are conflicts, solve them in your editor.
+### Configure
-- Once you are done, commit the changes:
+Create env file using example file
```bash
-git merge --continue
+cp .env.example .env
```
-### Configure
-
You can then update configs in the `.env` files to customize your configurations.
Before deploying it, make sure you change at least the values for:
@@ -136,9 +34,7 @@ Before deploying it, make sure you change at least the values for:
- `FIRST_SUPERUSER_PASSWORD`
- `POSTGRES_PASSWORD`
-You can (and should) pass these as environment variables from secrets.
-
-Read the [deployment.md](./deployment.md) docs for more details.
+````bash
### Generate Secret Keys
@@ -148,78 +44,32 @@ You have to change them with a secret key, to generate secret keys you can run t
```bash
python -c "import secrets; print(secrets.token_urlsafe(32))"
-```
+````
Copy the content and use that as password / secret key. And run that again to generate another secure key.
-## How To Use It - Alternative With Copier
-
-This repository also supports generating a new project using [Copier](https://copier.readthedocs.io).
-
-It will copy all the files, ask you configuration questions, and update the `.env` files with your answers.
+## Boostrap & development mode
-### Install Copier
-
-You can install Copier with:
-
-```bash
-pip install copier
-```
-
-Or better, if you have [`pipx`](https://pipx.pypa.io/), you can run it with:
+This is a dockerized setup, hence start the project using below command
```bash
-pipx install copier
+docker compose watch
```
-**Note**: If you have `pipx`, installing copier is optional, you could run it directly.
-
-### Generate a Project With Copier
-
-Decide a name for your new project's directory, you will use it below. For example, `my-awesome-project`.
-
-Go to the directory that will be the parent of your project, and run the command with your project's name:
-
-```bash
-copier copy https://github.com/fastapi/full-stack-fastapi-template my-awesome-project --trust
-```
+This should start all necessary services for the project and will also mount file system as volume for easy development.
-If you have `pipx` and you didn't install `copier`, you can run it directly:
+You verify backend running by doing health-check
```bash
-pipx run copier copy https://github.com/fastapi/full-stack-fastapi-template my-awesome-project --trust
+curl http://[your-domain]:8000/api/v1/utils/health/
```
-**Note** the `--trust` option is necessary to be able to execute a [post-creation script](https://github.com/fastapi/full-stack-fastapi-template/blob/master/.copier/update_dotenv.py) that updates your `.env` files.
-
-### Input Variables
-
-Copier will ask you for some data, you might want to have at hand before generating the project.
-
-But don't worry, you can just update any of that in the `.env` files afterwards.
-
-The input variables, with their default values (some auto generated) are:
-
-- `project_name`: (default: `"FastAPI Project"`) The name of the project, shown to API users (in .env).
-- `stack_name`: (default: `"fastapi-project"`) The name of the stack used for Docker Compose labels and project name (no spaces, no periods) (in .env).
-- `secret_key`: (default: `"changethis"`) The secret key for the project, used for security, stored in .env, you can generate one with the method above.
-- `first_superuser`: (default: `"admin@example.com"`) The email of the first superuser (in .env).
-- `first_superuser_password`: (default: `"changethis"`) The password of the first superuser (in .env).
-- `smtp_host`: (default: "") The SMTP server host to send emails, you can set it later in .env.
-- `smtp_user`: (default: "") The SMTP server user to send emails, you can set it later in .env.
-- `smtp_password`: (default: "") The SMTP server password to send emails, you can set it later in .env.
-- `emails_from_email`: (default: `"info@example.com"`) The email account to send emails from, you can set it later in .env.
-- `postgres_password`: (default: `"changethis"`) The password for the PostgreSQL database, stored in .env, you can generate one with the method above.
-- `sentry_dsn`: (default: "") The DSN for Sentry, if you are using it, you can set it later in .env.
+or by visiting: http://[your-domain]:8000/api/v1/utils/health-check/ in the browser
## Backend Development
Backend docs: [backend/README.md](./backend/README.md).
-## Frontend Development
-
-Frontend docs: [frontend/README.md](./frontend/README.md).
-
## Deployment
Deployment docs: [deployment.md](./deployment.md).
@@ -234,6 +84,6 @@ This includes using Docker Compose, custom local domains, `.env` configurations,
Check the file [release-notes.md](./release-notes.md).
-## License
+## Credits
-The Full Stack FastAPI Template is licensed under the terms of the MIT license.
+This project was created using [full-stack-fastapi-template](https://github.com/fastapi/full-stack-fastapi-template). A big thank you to the team for creating and maintaining the template!!!
diff --git a/backend/Dockerfile b/backend/Dockerfile
index 44c53f036..99db91d25 100644
--- a/backend/Dockerfile
+++ b/backend/Dockerfile
@@ -1,43 +1,42 @@
-FROM python:3.10
+# Use Python 3.12 base image
+FROM python:3.12
+# Set environment variables
ENV PYTHONUNBUFFERED=1
+# Set working directory
WORKDIR /app/
-# Install uv
-# Ref: https://docs.astral.sh/uv/guides/integration/docker/#installing-uv
+# Install system dependencies
+RUN apt-get update && apt-get install -y curl
+
+# Install uv package manager
COPY --from=ghcr.io/astral-sh/uv:0.5.11 /uv /uvx /bin/
# Place executables in the environment at the front of the path
-# Ref: https://docs.astral.sh/uv/guides/integration/docker/#using-the-environment
ENV PATH="/app/.venv/bin:$PATH"
-# Compile bytecode
-# Ref: https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode
+# Enable bytecode compilation and efficient dependency linking
ENV UV_COMPILE_BYTECODE=1
-
-# uv Cache
-# Ref: https://docs.astral.sh/uv/guides/integration/docker/#caching
ENV UV_LINK_MODE=copy
+# Copy dependency files
+COPY pyproject.toml uv.lock ./
+
# Install dependencies
-# Ref: https://docs.astral.sh/uv/guides/integration/docker/#intermediate-layers
RUN --mount=type=cache,target=/root/.cache/uv \
- --mount=type=bind,source=uv.lock,target=uv.lock \
- --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
uv sync --frozen --no-install-project
+# Set Python path
ENV PYTHONPATH=/app
-COPY ./scripts /app/scripts
+# Copy application files
+COPY scripts /app/scripts
+COPY app /app/app
+COPY alembic.ini /app/alembic.ini
-COPY ./pyproject.toml ./uv.lock ./alembic.ini /app/
+# Expose port 80
+EXPOSE 80
-COPY ./app /app/app
-
-# Sync the project
-# Ref: https://docs.astral.sh/uv/guides/integration/docker/#intermediate-layers
-RUN --mount=type=cache,target=/root/.cache/uv \
- uv sync
-CMD ["fastapi", "run", "--workers", "4", "app/main.py"]
+CMD ["uv", "run", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "80", "--workers", "4"]
diff --git a/backend/app/alembic/env.py b/backend/app/alembic/env.py
index 65ad841db..331f40acf 100755
--- a/backend/app/alembic/env.py
+++ b/backend/app/alembic/env.py
@@ -2,7 +2,7 @@
from logging.config import fileConfig
from alembic import context
-from sqlmodel import SQLModel
+from app.models import SQLModel
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
diff --git a/backend/app/alembic/versions/0f205e3779ee_add_api_key_table.py b/backend/app/alembic/versions/0f205e3779ee_add_api_key_table.py
new file mode 100644
index 000000000..76bfb9680
--- /dev/null
+++ b/backend/app/alembic/versions/0f205e3779ee_add_api_key_table.py
@@ -0,0 +1,42 @@
+"""add api key table
+
+Revision ID: 0f205e3779ee
+Revises: 99f4fc325617
+Create Date: 2025-03-21 23:43:34.181704
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+
+
+# revision identifiers, used by Alembic.
+revision = '0f205e3779ee'
+down_revision = '99f4fc325617'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('apikey',
+ sa.Column('organization_id', sa.Integer(), nullable=False),
+ sa.Column('user_id', sa.Uuid(), nullable=False),
+ sa.Column('key', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('created_at', sa.DateTime(), nullable=False),
+ sa.Column('is_deleted', sa.Boolean(), nullable=False),
+ sa.Column('deleted_at', sa.DateTime(), nullable=True),
+ sa.ForeignKeyConstraint(['organization_id'], ['organization.id'], ondelete='CASCADE'),
+ sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_index(op.f('ix_apikey_key'), 'apikey', ['key'], unique=True)
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_index(op.f('ix_apikey_key'), table_name='apikey')
+ op.drop_table('apikey')
+ # ### end Alembic commands ###
diff --git a/backend/app/alembic/versions/99f4fc325617_add_organization_project_setup.py b/backend/app/alembic/versions/99f4fc325617_add_organization_project_setup.py
new file mode 100644
index 000000000..581b881e0
--- /dev/null
+++ b/backend/app/alembic/versions/99f4fc325617_add_organization_project_setup.py
@@ -0,0 +1,64 @@
+"""add organization project setup
+
+Revision ID: 99f4fc325617
+Revises: 1a31ce608336
+Create Date: 2025-03-21 20:51:00.759926
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+
+
+# revision identifiers, used by Alembic.
+revision = '99f4fc325617'
+down_revision = '1a31ce608336'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('organization',
+ sa.Column('name', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
+ sa.Column('is_active', sa.Boolean(), nullable=False),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_index(op.f('ix_organization_name'), 'organization', ['name'], unique=True)
+ op.create_table('project',
+ sa.Column('name', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
+ sa.Column('description', sqlmodel.sql.sqltypes.AutoString(length=500), nullable=True),
+ sa.Column('is_active', sa.Boolean(), nullable=False),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('organization_id', sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(['organization_id'], ['organization.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_index(op.f('ix_project_name'), 'project', ['name'], unique=False)
+ op.create_index(op.f('ix_project_organization_id'), 'project', ['organization_id'], unique=False)
+ op.create_table('projectuser',
+ sa.Column('project_id', sa.Integer(), nullable=False),
+ sa.Column('user_id', sa.Uuid(), nullable=False),
+ sa.Column('is_admin', sa.Boolean(), nullable=False),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('created_at', sa.DateTime(), nullable=False),
+ sa.Column('updated_at', sa.DateTime(), nullable=False),
+ sa.Column('is_deleted', sa.Boolean(), nullable=False),
+ sa.Column('deleted_at', sa.DateTime(), nullable=True),
+ sa.ForeignKeyConstraint(['project_id'], ['project.id'], ondelete='CASCADE'),
+ sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
+ sa.PrimaryKeyConstraint('id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('projectuser')
+ op.drop_index(op.f('ix_project_organization_id'), table_name='project')
+ op.drop_index(op.f('ix_project_name'), table_name='project')
+ op.drop_table('project')
+ op.drop_index(op.f('ix_organization_name'), table_name='organization')
+ op.drop_table('organization')
+ # ### end Alembic commands ###
diff --git a/backend/app/api/deps.py b/backend/app/api/deps.py
index c2b83c841..08c94f58e 100644
--- a/backend/app/api/deps.py
+++ b/backend/app/api/deps.py
@@ -1,20 +1,25 @@
from collections.abc import Generator
-from typing import Annotated
+from typing import Annotated, Optional
import jwt
-from fastapi import Depends, HTTPException, status
-from fastapi.security import OAuth2PasswordBearer
+from fastapi import Depends, HTTPException, status, Request, Header, Security
+from fastapi.responses import JSONResponse
+from fastapi.security import OAuth2PasswordBearer, APIKeyHeader
from jwt.exceptions import InvalidTokenError
from pydantic import ValidationError
-from sqlmodel import Session
+from sqlmodel import Session, select
from app.core import security
from app.core.config import settings
from app.core.db import engine
-from app.models import TokenPayload, User
+from app.utils import APIResponse
+from app.crud.organization import validate_organization
+from app.crud.api_key import get_api_key_by_value
+from app.models import TokenPayload, User, UserProjectOrg, UserOrganization, ProjectUser, Project, Organization
reusable_oauth2 = OAuth2PasswordBearer(
- tokenUrl=f"{settings.API_V1_STR}/login/access-token"
+ tokenUrl=f"{settings.API_V1_STR}/login/access-token",
+ auto_error= False
)
@@ -22,32 +27,70 @@ def get_db() -> Generator[Session, None, None]:
with Session(engine) as session:
yield session
-
+api_key_header = APIKeyHeader(name="X-API-KEY", auto_error=False)
SessionDep = Annotated[Session, Depends(get_db)]
TokenDep = Annotated[str, Depends(reusable_oauth2)]
+def get_current_user(
+ session: SessionDep,
+ token: TokenDep,
+ api_key: Annotated[str, Depends(api_key_header)],
+) -> User:
+ """Authenticate user via API Key first, fallback to JWT token. Returns only User."""
-def get_current_user(session: SessionDep, token: TokenDep) -> User:
- try:
- payload = jwt.decode(
- token, settings.SECRET_KEY, algorithms=[security.ALGORITHM]
- )
- token_data = TokenPayload(**payload)
- except (InvalidTokenError, ValidationError):
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail="Could not validate credentials",
- )
- user = session.get(User, token_data.sub)
- if not user:
- raise HTTPException(status_code=404, detail="User not found")
- if not user.is_active:
- raise HTTPException(status_code=400, detail="Inactive user")
- return user
+ if api_key:
+ api_key_record = get_api_key_by_value(session, api_key)
+ if not api_key_record:
+ raise HTTPException(status_code=401, detail="Invalid API Key")
+
+ user = session.get(User, api_key_record.user_id)
+ if not user:
+ raise HTTPException(status_code=404, detail="User linked to API Key not found")
+
+ return user # Return only User object
+
+ if token:
+ try:
+ payload = jwt.decode(
+ token, settings.SECRET_KEY, algorithms=[security.ALGORITHM]
+ )
+ token_data = TokenPayload(**payload)
+ except (InvalidTokenError, ValidationError):
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Could not validate credentials",
+ )
+ user = session.get(User, token_data.sub)
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+ if not user.is_active:
+ raise HTTPException(status_code=400, detail="Inactive user")
+
+ return user # Return only User object
+
+ raise HTTPException(status_code=401, detail="Invalid Authorization format")
CurrentUser = Annotated[User, Depends(get_current_user)]
+def get_current_user_org(
+ current_user: CurrentUser,
+ session: SessionDep,
+ request: Request
+) -> UserOrganization:
+ """Extend `User` with organization_id if available, otherwise return UserOrganization without it."""
+
+ organization_id = None
+ api_key = request.headers.get("X-API-KEY")
+ if api_key:
+ api_key_record = get_api_key_by_value(session, api_key)
+ if api_key_record:
+ validate_organization(session, api_key_record.organization_id)
+ organization_id = api_key_record.organization_id
+
+ return UserOrganization(**current_user.model_dump(), organization_id=organization_id)
+
+CurrentUserOrg = Annotated[UserOrganization, Depends(get_current_user_org)]
def get_current_active_superuser(current_user: CurrentUser) -> User:
if not current_user.is_superuser:
@@ -55,3 +98,77 @@ def get_current_active_superuser(current_user: CurrentUser) -> User:
status_code=403, detail="The user doesn't have enough privileges"
)
return current_user
+
+def get_current_active_superuser_org(current_user: CurrentUserOrg) -> User:
+ if not current_user.is_superuser:
+ raise HTTPException(
+ status_code=403, detail="The user doesn't have enough privileges"
+ )
+ return current_user
+
+async def http_exception_handler(request: Request, exc: HTTPException):
+ """
+ Global handler for HTTPException to return standardized response format.
+ """
+ return JSONResponse(
+ status_code=exc.status_code,
+ content=APIResponse.failure_response(exc.detail).model_dump() | {"detail": exc.detail}, # TEMPORARY: Keep "detail" for backward compatibility
+ )
+
+def verify_user_project_organization(
+ db: SessionDep,
+ current_user: CurrentUserOrg,
+ project_id: int,
+ organization_id: int,
+) -> UserProjectOrg:
+ """
+ Verify that the authenticated user is part of the project
+ and that the project belongs to the organization.
+ """
+ if current_user.organization_id and current_user.organization_id != organization_id:
+ raise HTTPException(status_code=403, detail="User is not part of organization")
+
+ project_organization = db.exec(
+ select(Project, Organization)
+ .join(Organization, Project.organization_id == Organization.id)
+ .where(Project.id == project_id, Project.is_active==True, Organization.id == organization_id, Organization.is_active==True)
+ ).first()
+
+
+ if not project_organization:
+ # Determine the exact error based on missing data
+ organization = db.exec(select(Organization).where(Organization.id == organization_id)).first()
+ if not organization:
+ raise HTTPException(status_code=404, detail="Organization not found")
+
+ if not organization.is_active:
+ raise HTTPException(status_code=400, detail="Organization is not active") # Use 400 for inactive resources
+
+ project = db.exec(select(Project).where(Project.id == project_id)).first()
+ if not project:
+ raise HTTPException(status_code=404, detail="Project not found")
+
+ if not project.is_active:
+ raise HTTPException(status_code=400, detail="Project is not active") # Use 400 for inactive resources
+
+ raise HTTPException(status_code=403, detail="Project does not belong to the organization")
+
+ # Superuser bypasses all checks and If Api key request we give access to all the project in organization
+ if current_user.is_superuser or current_user.organization_id:
+ current_user.organization_id = organization_id
+ return UserProjectOrg(**current_user.model_dump(), project_id=project_id)
+
+ # Check if the user is part of the project
+ user_in_project = db.exec(
+ select(ProjectUser).where(
+ ProjectUser.user_id == current_user.id,
+ ProjectUser.project_id == project_id,
+ ProjectUser.is_deleted == False
+ )
+ ).first()
+
+ if not user_in_project:
+ raise HTTPException(status_code=403, detail="User is not part of the project")
+
+ current_user.organization_id = organization_id
+ return UserProjectOrg(**current_user.model_dump(), project_id=project_id)
diff --git a/backend/app/api/main.py b/backend/app/api/main.py
index eac18c8e8..e18ac930c 100644
--- a/backend/app/api/main.py
+++ b/backend/app/api/main.py
@@ -1,6 +1,6 @@
from fastapi import APIRouter
-from app.api.routes import items, login, private, users, utils
+from app.api.routes import items, login, private, users, utils, project, organization, project_user, api_keys, threads
from app.core.config import settings
api_router = APIRouter()
@@ -8,6 +8,11 @@
api_router.include_router(users.router)
api_router.include_router(utils.router)
api_router.include_router(items.router)
+api_router.include_router(threads.router)
+api_router.include_router(organization.router)
+api_router.include_router(project.router)
+api_router.include_router(project_user.router)
+api_router.include_router(api_keys.router)
if settings.ENVIRONMENT == "local":
diff --git a/backend/app/api/routes/api_keys.py b/backend/app/api/routes/api_keys.py
new file mode 100644
index 000000000..aa246c46f
--- /dev/null
+++ b/backend/app/api/routes/api_keys.py
@@ -0,0 +1,93 @@
+import uuid
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import Session
+from app.api.deps import get_db, get_current_active_superuser
+from app.crud.api_key import create_api_key, get_api_key, get_api_keys_by_organization, delete_api_key, get_api_key_by_user_org
+from app.crud.organization import validate_organization
+from app.models import APIKeyPublic, User
+from app.utils import APIResponse
+
+router = APIRouter(prefix="/apikeys", tags=["API Keys"])
+
+
+# Create API Key
+@router.post("/", response_model=APIResponse[APIKeyPublic])
+def create_key(
+ organization_id: int,
+ user_id: uuid.UUID,
+ session: Session = Depends(get_db),
+ current_user: User = Depends(get_current_active_superuser)
+):
+ """
+ Generate a new API key for the user's organization.
+ """
+ try:
+ # Validate organization
+ validate_organization(session, organization_id)
+
+ existing_api_key = get_api_key_by_user_org(session, organization_id, user_id)
+ if existing_api_key:
+ raise HTTPException(status_code=400, detail="API Key already exists for this user and organization")
+
+ # Create and return API key
+ api_key = create_api_key(session, organization_id=organization_id, user_id=user_id)
+ return APIResponse.success_response(api_key)
+
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
+
+
+# List API Keys
+@router.get("/", response_model=APIResponse[list[APIKeyPublic]])
+def list_keys(
+ organization_id: int,
+ session: Session = Depends(get_db),
+ current_user: User = Depends(get_current_active_superuser),
+):
+ """
+ Retrieve all API keys for the user's organization.
+ """
+ try:
+ # Validate organization
+ validate_organization(session, organization_id)
+
+ # Retrieve API keys
+ api_keys = get_api_keys_by_organization(session, organization_id)
+ return APIResponse.success_response(api_keys)
+
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
+
+
+# Get API Key by ID
+@router.get("/{api_key_id}", response_model=APIResponse[APIKeyPublic])
+def get_key(
+ api_key_id: int,
+ session: Session = Depends(get_db),
+ current_user: User = Depends(get_current_active_superuser)
+):
+ """
+ Retrieve an API key by ID.
+ """
+ api_key = get_api_key(session, api_key_id)
+ if not api_key:
+ raise HTTPException(status_code=404, detail="API Key does not exist")
+
+ return APIResponse.success_response(api_key)
+
+
+# Revoke API Key (Soft Delete)
+@router.delete("/{api_key_id}", response_model=APIResponse[dict])
+def revoke_key(
+ api_key_id: int,
+ session: Session = Depends(get_db),
+ current_user: User = Depends(get_current_active_superuser)
+):
+ """
+ Soft delete an API key (revoke access).
+ """
+ try:
+ delete_api_key(session, api_key_id)
+ return APIResponse.success_response({"message": "API key revoked successfully"})
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
diff --git a/backend/app/api/routes/login.py b/backend/app/api/routes/login.py
index f55175a5a..ba9ec45eb 100644
--- a/backend/app/api/routes/login.py
+++ b/backend/app/api/routes/login.py
@@ -1,7 +1,7 @@
from datetime import timedelta
-from typing import Annotated, Any
+from typing import Annotated, Any, Optional
-from fastapi import APIRouter, Depends, HTTPException
+from fastapi import APIRouter, Depends, HTTPException, Form
from fastapi.responses import HTMLResponse
from fastapi.security import OAuth2PasswordRequestForm
@@ -23,10 +23,13 @@
@router.post("/login/access-token")
def login_access_token(
- session: SessionDep, form_data: Annotated[OAuth2PasswordRequestForm, Depends()]
+ session: SessionDep,
+ form_data: Annotated[OAuth2PasswordRequestForm, Depends()],
+ token_expiry_minutes: Optional[int] = Form(default=settings.ACCESS_TOKEN_EXPIRE_MINUTES, ge=1, le=60 * 24 * 360),
) -> Token:
"""
- OAuth2 compatible token login, get an access token for future requests
+ OAuth2 compatible token login with customizable expiration time.
+ Specify an expiration time (in minutes), with a default of 30 days and a max of 360 days.
"""
user = authenticate(
session=session, email=form_data.username, password=form_data.password
@@ -35,7 +38,9 @@ def login_access_token(
raise HTTPException(status_code=400, detail="Incorrect email or password")
elif not user.is_active:
raise HTTPException(status_code=400, detail="Inactive user")
- access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
+
+ access_token_expires = timedelta(minutes=token_expiry_minutes)
+
return Token(
access_token=security.create_access_token(
user.id, expires_delta=access_token_expires
diff --git a/backend/app/api/routes/organization.py b/backend/app/api/routes/organization.py
new file mode 100644
index 000000000..a3b198b4a
--- /dev/null
+++ b/backend/app/api/routes/organization.py
@@ -0,0 +1,76 @@
+from typing import Any, List
+
+from fastapi import APIRouter, Depends, HTTPException
+from sqlalchemy import func
+from sqlmodel import Session, select
+
+from app.models import Organization, OrganizationCreate, OrganizationUpdate, OrganizationPublic
+from app.api.deps import (
+ CurrentUser,
+ SessionDep,
+ get_current_active_superuser,
+)
+from app.crud.organization import create_organization, get_organization_by_id
+from app.utils import APIResponse
+
+router = APIRouter(prefix="/organizations", tags=["organizations"])
+
+
+# Retrieve organizations
+@router.get("/", dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[List[OrganizationPublic]])
+def read_organizations(session: SessionDep, skip: int = 0, limit: int = 100):
+ count_statement = select(func.count()).select_from(Organization)
+ count = session.exec(count_statement).one()
+
+ statement = select(Organization).offset(skip).limit(limit)
+ organizations = session.exec(statement).all()
+
+ return APIResponse.success_response(organizations)
+
+# Create a new organization
+@router.post("/", dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[OrganizationPublic])
+def create_new_organization(*, session: SessionDep, org_in: OrganizationCreate):
+ new_org = create_organization(session=session, org_create=org_in)
+ return APIResponse.success_response(new_org)
+
+
+@router.get("/{org_id}", dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[OrganizationPublic])
+def read_organization(*, session: SessionDep, org_id: int):
+ """
+ Retrieve an organization by ID.
+ """
+ org = get_organization_by_id(session=session, org_id=org_id)
+ if org is None:
+ raise HTTPException(status_code=404, detail="Organization not found")
+ return APIResponse.success_response(org)
+
+
+# Update an organization
+@router.patch("/{org_id}", dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[OrganizationPublic])
+def update_organization(*, session: SessionDep, org_id: int, org_in: OrganizationUpdate):
+ org = get_organization_by_id(session=session, org_id=org_id)
+ if org is None:
+ raise HTTPException(status_code=404, detail="Organization not found")
+
+ org_data = org_in.model_dump(exclude_unset=True)
+ org = org.model_copy(update=org_data)
+
+
+ session.add(org)
+ session.commit()
+ session.flush()
+
+ return APIResponse.success_response(org)
+
+
+# Delete an organization
+@router.delete("/{org_id}", dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[None])
+def delete_organization(session: SessionDep, org_id: int):
+ org = get_organization_by_id(session=session, org_id=org_id)
+ if org is None:
+ raise HTTPException(status_code=404, detail="Organization not found")
+
+ session.delete(org)
+ session.commit()
+
+ return APIResponse.success_response(None)
diff --git a/backend/app/api/routes/project.py b/backend/app/api/routes/project.py
new file mode 100644
index 000000000..f2bbbd81e
--- /dev/null
+++ b/backend/app/api/routes/project.py
@@ -0,0 +1,74 @@
+from typing import Any, List
+
+from fastapi import APIRouter, Depends, HTTPException, Query
+from sqlalchemy import func
+from sqlmodel import Session, select
+
+from app.models import Project, ProjectCreate, ProjectUpdate, ProjectPublic
+from app.api.deps import (
+ CurrentUser,
+ SessionDep,
+ get_current_active_superuser,
+)
+from app.crud.project import create_project, get_project_by_id, get_projects_by_organization
+from app.utils import APIResponse
+
+router = APIRouter(prefix="/projects", tags=["projects"])
+
+
+# Retrieve projects
+@router.get("/",dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[List[ProjectPublic]])
+def read_projects(session: SessionDep, skip: int = Query(0, ge=0), limit: int = Query(100, ge=1, le=100)):
+ count_statement = select(func.count()).select_from(Project)
+ count = session.exec(count_statement).one()
+
+ statement = select(Project).offset(skip).limit(limit)
+ projects = session.exec(statement).all()
+
+ return APIResponse.success_response(projects)
+
+
+# Create a new project
+@router.post("/", dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[ProjectPublic])
+def create_new_project(*, session: SessionDep, project_in: ProjectCreate):
+ project = create_project(session=session, project_create=project_in)
+ return APIResponse.success_response(project)
+
+@router.get("/{project_id}", dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[ProjectPublic])
+def read_project(*, session: SessionDep, project_id: int) :
+ """
+ Retrieve a project by ID.
+ """
+ project = get_project_by_id(session=session, project_id=project_id)
+ if project is None:
+ raise HTTPException(status_code=404, detail="Project not found")
+ return APIResponse.success_response(project)
+
+
+# Update a project
+@router.patch("/{project_id}",dependencies=[Depends(get_current_active_superuser)], response_model=APIResponse[ProjectPublic])
+def update_project(*, session: SessionDep, project_id: int, project_in: ProjectUpdate):
+ project = get_project_by_id(session=session, project_id=project_id)
+ if project is None:
+ raise HTTPException(status_code=404, detail="Project not found")
+
+ project_data = project_in.model_dump(exclude_unset=True)
+ project = project.model_copy(update=project_data)
+
+ session.add(project)
+ session.commit()
+ session.flush()
+ return APIResponse.success_response(project)
+
+
+# Delete a project
+@router.delete("/{project_id}",dependencies=[Depends(get_current_active_superuser)])
+def delete_project(session: SessionDep, project_id: int):
+ project = get_project_by_id(session=session, project_id=project_id)
+ if project is None:
+ raise HTTPException(status_code=404, detail="Project not found")
+
+ session.delete(project)
+ session.commit()
+
+ return APIResponse.success_response(None)
diff --git a/backend/app/api/routes/project_user.py b/backend/app/api/routes/project_user.py
new file mode 100644
index 000000000..e1be25000
--- /dev/null
+++ b/backend/app/api/routes/project_user.py
@@ -0,0 +1,105 @@
+import uuid
+from fastapi import APIRouter, Depends, HTTPException, Query, Request
+from sqlmodel import Session
+from typing import Annotated
+from app.api.deps import get_db, verify_user_project_organization
+from app.crud.project_user import add_user_to_project, remove_user_from_project, get_users_by_project, is_project_admin
+from app.models import User, ProjectUserPublic, UserProjectOrg, Message
+from app.utils import APIResponse
+
+
+router = APIRouter(prefix="/project/users", tags=["project_users"])
+
+
+# Add a user to a project
+@router.post("/{user_id}", response_model=APIResponse[ProjectUserPublic])
+def add_user(
+ request: Request,
+ user_id: uuid.UUID,
+ is_admin: bool = False,
+ session: Session = Depends(get_db),
+ current_user: UserProjectOrg = Depends(verify_user_project_organization)
+):
+ """
+ Add a user to a project.
+ """
+ project_id = current_user.project_id
+
+ user = session.get(User, user_id)
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Only allow superusers, project admins, or API key-authenticated requests to add users
+ if (
+ not current_user.is_superuser
+ and not request.headers.get("X-API-KEY")
+ and not is_project_admin(session, current_user.id, project_id)
+ ):
+ raise HTTPException(
+ status_code=403,
+ detail="Only project admins or superusers can add users."
+ )
+
+ try:
+ added_user = add_user_to_project(session, project_id, user_id, is_admin)
+ return APIResponse.success_response(added_user)
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
+
+
+# Get all users in a project
+@router.get("/", response_model=APIResponse[list[ProjectUserPublic]])
+def list_project_users(
+ session: Session = Depends(get_db),
+ current_user: UserProjectOrg = Depends(verify_user_project_organization),
+ skip: int = Query(0, ge=0),
+ limit: int = Query(100, ge=1, le=100)
+):
+ """
+ Get all users in a project.
+ """
+ users, total_count = get_users_by_project(session, current_user.project_id, skip, limit)
+
+ metadata = {
+ "total_count": total_count,
+ "limit": limit,
+ "skip": skip
+ }
+
+ return APIResponse.success_response(data=users, metadata=metadata)
+
+# Remove a user from a project
+@router.delete("/{user_id}", response_model=APIResponse[Message])
+def remove_user(
+ request: Request,
+ user_id: uuid.UUID,
+ session: Session = Depends(get_db),
+ current_user: UserProjectOrg = Depends(verify_user_project_organization)
+):
+ """
+ Remove a user from a project.
+ """
+ # Only allow superusers or project admins to remove user
+ project_id = current_user.project_id
+
+ user = session.get(User, user_id)
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Only allow superusers, project admins, or API key-authenticated requests to remove users
+ if (
+ not current_user.is_superuser
+ and not request.headers.get("X-API-KEY")
+ and not is_project_admin(session, current_user.id, project_id)
+ ):
+ raise HTTPException(
+ status_code=403,
+ detail="Only project admins or superusers can remove users."
+ )
+
+ try:
+ remove_user_from_project(session, project_id, user_id)
+ return APIResponse.success_response({"message": "User removed from project successfully."})
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
+
diff --git a/backend/app/api/routes/threads.py b/backend/app/api/routes/threads.py
new file mode 100644
index 000000000..9da6b3373
--- /dev/null
+++ b/backend/app/api/routes/threads.py
@@ -0,0 +1,140 @@
+import re
+import requests
+
+import openai
+from openai import OpenAI
+from fastapi import APIRouter, BackgroundTasks
+
+from app.utils import APIResponse
+from app.core import settings, logging
+
+logger = logging.getLogger(__name__)
+router = APIRouter(tags=["threads"])
+
+
+def send_callback(callback_url: str, data: dict):
+ """Send results to the callback URL (synchronously)."""
+ try:
+ session = requests.Session()
+ # uncomment this to run locally without SSL
+ # session.verify = False
+ response = session.post(callback_url, json=data)
+ response.raise_for_status()
+ return True
+ except requests.RequestException as e:
+ logger.error(f"Callback failed: {str(e)}")
+ return False
+
+
+def process_run(request: dict, client: OpenAI):
+ """
+ Background task to run create_and_poll, then send the callback with the result.
+ This function is run in the background after we have already returned an initial response.
+ """
+ try:
+ # Start the run
+ run = client.beta.threads.runs.create_and_poll(
+ thread_id=request["thread_id"],
+ assistant_id=request["assistant_id"],
+ )
+
+ if run.status == "completed":
+ messages = client.beta.threads.messages.list(
+ thread_id=request["thread_id"])
+ latest_message = messages.data[0]
+ message_content = latest_message.content[0].text.value
+
+ remove_citation = request.get("remove_citation", False)
+
+ if remove_citation:
+ message = re.sub(r"γ\d+(?::\d+)?β [^γ]*γ", "", message_content)
+ else:
+ message = message_content
+
+ # Update the data dictionary with additional fields from the request, excluding specific keys
+ additional_data = {k: v for k, v in request.items(
+ ) if k not in {"question", "assistant_id", "callback_url", "thread_id"}}
+ callback_response = APIResponse.success_response(data={
+ "status": "success",
+ "message": message,
+ "thread_id": request["thread_id"],
+ "endpoint": getattr(request, "endpoint", "some-default-endpoint"),
+ **additional_data
+ })
+ else:
+ callback_response = APIResponse.failure_response(
+ error=f"Run failed with status: {run.status}")
+
+ # Send callback with results
+ send_callback(request["callback_url"], callback_response.model_dump())
+
+ except openai.OpenAIError as e:
+ # Handle any other OpenAI API errors
+ if isinstance(e.body, dict) and "message" in e.body:
+ error_message = e.body["message"]
+ else:
+ error_message = str(e)
+
+ callback_response = APIResponse.failure_response(error=error_message)
+
+ send_callback(request["callback_url"], callback_response.model_dump())
+
+
+@router.post("/threads")
+async def threads(request: dict, background_tasks: BackgroundTasks):
+ """
+ Accepts a question, assistant_id, callback_url, and optional thread_id from the request body.
+ Returns an immediate "processing" response, then continues to run create_and_poll in background.
+ Once completed, calls send_callback with the final result.
+ """
+ client = OpenAI(api_key=settings.OPENAI_API_KEY)
+
+ # Use get method to safely access thread_id
+ thread_id = request.get("thread_id")
+
+ # 1. Validate or check if there's an existing thread with an in-progress run
+ if thread_id:
+ try:
+ runs = client.beta.threads.runs.list(thread_id=thread_id)
+ # Get the most recent run (first in the list) if any
+ if runs.data and len(runs.data) > 0:
+ latest_run = runs.data[0]
+ if latest_run.status in ["queued", "in_progress", "requires_action"]:
+ return APIResponse.failure_response(error=f"There is an active run on this thread (status: {latest_run.status}). Please wait for it to complete.")
+ except openai.OpenAIError:
+ # Handle invalid thread ID
+ return APIResponse.failure_response(error=f"Invalid thread ID provided {thread_id}")
+
+ # Use existing thread
+ client.beta.threads.messages.create(
+ thread_id=thread_id, role="user", content=request["question"]
+ )
+ else:
+ try:
+ # Create new thread
+ thread = client.beta.threads.create()
+ client.beta.threads.messages.create(
+ thread_id=thread.id, role="user", content=request["question"]
+ )
+ request["thread_id"] = thread.id
+ except openai.OpenAIError as e:
+ # Handle any other OpenAI API errors
+ if isinstance(e.body, dict) and "message" in e.body:
+ error_message = e.body["message"]
+ else:
+ error_message = str(e)
+ return APIResponse.failure_response(error=error_message)
+
+ # 2. Send immediate response to complete the API call
+ initial_response = APIResponse.success_response(data={
+ "status": "processing",
+ "message": "Run started",
+ "thread_id": request.get("thread_id"),
+ "success": True,
+ })
+
+ # 3. Schedule the background task to run create_and_poll and send callback
+ background_tasks.add_task(process_run, request, client)
+
+ # 4. Return immediately so the client knows we've accepted the request
+ return initial_response
diff --git a/backend/app/api/routes/users.py b/backend/app/api/routes/users.py
index 8ea7d4da7..1d6a4f709 100644
--- a/backend/app/api/routes/users.py
+++ b/backend/app/api/routes/users.py
@@ -51,7 +51,7 @@ def read_users(session: SessionDep, skip: int = 0, limit: int = 100) -> Any:
@router.post(
"/", dependencies=[Depends(get_current_active_superuser)], response_model=UserPublic
)
-def create_user(*, session: SessionDep, user_in: UserCreate) -> Any:
+def create_user_endpoint(*, session: SessionDep, user_in: UserCreate) -> Any:
"""
Create new user.
"""
@@ -178,7 +178,7 @@ def read_user_by_id(
dependencies=[Depends(get_current_active_superuser)],
response_model=UserPublic,
)
-def update_user(
+def update_user_endpoint(
*,
session: SessionDep,
user_id: uuid.UUID,
diff --git a/backend/app/api/routes/utils.py b/backend/app/api/routes/utils.py
index fc093419b..eb45bbc20 100644
--- a/backend/app/api/routes/utils.py
+++ b/backend/app/api/routes/utils.py
@@ -26,6 +26,6 @@ def test_email(email_to: EmailStr) -> Message:
return Message(message="Test email sent")
-@router.get("/health-check/")
+@router.get("/health/")
async def health_check() -> bool:
return True
diff --git a/backend/app/core/__init__.py b/backend/app/core/__init__.py
index e69de29bb..8a64c3009 100644
--- a/backend/app/core/__init__.py
+++ b/backend/app/core/__init__.py
@@ -0,0 +1,4 @@
+from .config import settings
+from .logger import logging
+
+__all__ = ['settings', 'logging']
diff --git a/backend/app/core/config.py b/backend/app/core/config.py
index d58e03c87..5e20683bb 100644
--- a/backend/app/core/config.py
+++ b/backend/app/core/config.py
@@ -1,5 +1,6 @@
import secrets
import warnings
+import os
from typing import Annotated, Any, Literal
from pydantic import (
@@ -31,10 +32,11 @@ class Settings(BaseSettings):
env_ignore_empty=True,
extra="ignore",
)
+ OPENAI_API_KEY: str
API_V1_STR: str = "/api/v1"
SECRET_KEY: str = secrets.token_urlsafe(32)
- # 60 minutes * 24 hours * 8 days = 8 days
- ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8
+ # 60 minutes * 24 hours * 1 days = 1 days
+ ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 1
FRONTEND_HOST: str = "http://localhost:5173"
ENVIRONMENT: Literal["local", "staging", "production"] = "local"
@@ -95,6 +97,9 @@ def emails_enabled(self) -> bool:
FIRST_SUPERUSER: EmailStr
FIRST_SUPERUSER_PASSWORD: str
+ LOG_DIR: str = os.path.join(os.path.dirname(
+ os.path.dirname(__file__)), "logs")
+
def _check_default_secret(self, var_name: str, value: str | None) -> None:
if value == "changethis":
message = (
diff --git a/backend/app/core/logger.py b/backend/app/core/logger.py
new file mode 100644
index 000000000..70605b5a1
--- /dev/null
+++ b/backend/app/core/logger.py
@@ -0,0 +1,22 @@
+import logging
+import os
+from logging.handlers import RotatingFileHandler
+from app.core.config import settings
+
+LOG_DIR = settings.LOG_DIR
+if not os.path.exists(LOG_DIR):
+ os.makedirs(LOG_DIR)
+
+LOG_FILE_PATH = os.path.join(LOG_DIR, "app.log")
+
+LOGGING_LEVEL = logging.INFO
+LOGGING_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+
+logging.basicConfig(level=LOGGING_LEVEL, format=LOGGING_FORMAT)
+
+file_handler = RotatingFileHandler(
+ LOG_FILE_PATH, maxBytes=10485760, backupCount=5)
+file_handler.setLevel(LOGGING_LEVEL)
+file_handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
+
+logging.getLogger("").addHandler(file_handler)
diff --git a/backend/app/crud/api_key.py b/backend/app/crud/api_key.py
new file mode 100644
index 000000000..efbfbb1ba
--- /dev/null
+++ b/backend/app/crud/api_key.py
@@ -0,0 +1,82 @@
+import uuid
+import secrets
+from datetime import datetime
+from sqlmodel import Session, select
+
+from app.models import APIKey, APIKeyPublic
+
+
+# Create API Key
+def create_api_key(session: Session, organization_id: uuid.UUID, user_id: uuid.UUID) -> APIKeyPublic:
+ """
+ Generates a new API key for an organization and associates it with a user.
+ """
+ api_key = APIKey(
+ key='ApiKey '+secrets.token_urlsafe(32),
+ organization_id=organization_id,
+ user_id=user_id
+ )
+
+ session.add(api_key)
+ session.commit()
+ session.refresh(api_key)
+
+ return APIKeyPublic.model_validate(api_key)
+
+
+# Get API Key by ID
+def get_api_key(session: Session, api_key_id: int) -> APIKeyPublic | None:
+ """
+ Retrieves an API key by its ID if it exists and is not deleted.
+ """
+ api_key = session.exec(
+ select(APIKey).where(APIKey.id == api_key_id, APIKey.is_deleted == False)
+ ).first()
+
+ return APIKeyPublic.model_validate(api_key) if api_key else None
+
+
+# Get API Keys for an Organization
+def get_api_keys_by_organization(session: Session, organization_id: uuid.UUID) -> list[APIKeyPublic]:
+ """
+ Retrieves all active API keys associated with an organization.
+ """
+ api_keys = session.exec(
+ select(APIKey).where(APIKey.organization_id == organization_id, APIKey.is_deleted == False)
+ ).all()
+
+ return [APIKeyPublic.model_validate(api_key) for api_key in api_keys]
+
+
+# Soft Delete (Revoke) API Key
+def delete_api_key(session: Session, api_key_id: int) -> None:
+ """
+ Soft deletes (revokes) an API key by marking it as deleted.
+ """
+ api_key = session.get(APIKey, api_key_id)
+
+ if not api_key or api_key.is_deleted:
+ raise ValueError("API key not found or already deleted")
+
+ api_key.is_deleted = True
+ api_key.deleted_at = datetime.utcnow()
+
+ session.add(api_key)
+ session.commit()
+
+def get_api_key_by_value(session: Session, api_key_value: str) -> APIKey | None:
+ """
+ Retrieve an API Key record by its value.
+ """
+ return session.exec(select(APIKey).where(APIKey.key == api_key_value, APIKey.is_deleted == False)).first()
+
+def get_api_key_by_user_org(session: Session, organization_id: int, user_id: str) -> APIKey | None:
+ """
+ Retrieve an API key for a specific user and organization.
+ """
+ statement = select(APIKey).where(
+ APIKey.organization_id == organization_id,
+ APIKey.user_id == user_id,
+ APIKey.is_deleted == False
+ )
+ return session.exec(statement).first()
\ No newline at end of file
diff --git a/backend/app/crud/organization.py b/backend/app/crud/organization.py
new file mode 100644
index 000000000..a6a550047
--- /dev/null
+++ b/backend/app/crud/organization.py
@@ -0,0 +1,36 @@
+from typing import Any, Optional
+
+from sqlmodel import Session, select
+
+from app.models import Organization, OrganizationCreate
+
+def create_organization(*, session: Session, org_create: OrganizationCreate) -> Organization:
+ db_org = Organization.model_validate(org_create)
+ session.add(db_org)
+ session.commit()
+ session.refresh(db_org)
+ return db_org
+
+
+# Get organization by ID
+def get_organization_by_id(session: Session, org_id: int) -> Optional[Organization]:
+ statement = select(Organization).where(Organization.id == org_id)
+ return session.exec(statement).first()
+
+def get_organization_by_name(*, session: Session, name: str) -> Optional[Organization]:
+ statement = select(Organization).where(Organization.name == name)
+ return session.exec(statement).first()
+
+# Validate if organization exists and is active
+def validate_organization(session: Session, org_id: int) -> Organization:
+ """
+ Ensures that an organization exists and is active.
+ """
+ organization = get_organization_by_id(session, org_id)
+ if not organization:
+ raise ValueError("Organization not found")
+
+ if not organization.is_active:
+ raise ValueError("Organization is not active")
+
+ return organization
diff --git a/backend/app/crud/project.py b/backend/app/crud/project.py
new file mode 100644
index 000000000..116c6ec83
--- /dev/null
+++ b/backend/app/crud/project.py
@@ -0,0 +1,21 @@
+from typing import List, Optional
+
+from sqlmodel import Session, select
+
+from app.models import Project, ProjectCreate
+
+
+def create_project(*, session: Session, project_create: ProjectCreate) -> Project:
+ db_project = Project.model_validate(project_create)
+ session.add(db_project)
+ session.commit()
+ session.refresh(db_project)
+ return db_project
+
+def get_project_by_id(*, session: Session, project_id: int) -> Optional[Project]:
+ statement = select(Project).where(Project.id == project_id)
+ return session.exec(statement).first()
+
+def get_projects_by_organization(*, session: Session, org_id: int) -> List[Project]:
+ statement = select(Project).where(Project.organization_id == org_id)
+ return session.exec(statement).all()
\ No newline at end of file
diff --git a/backend/app/crud/project_user.py b/backend/app/crud/project_user.py
new file mode 100644
index 000000000..99fc80b5f
--- /dev/null
+++ b/backend/app/crud/project_user.py
@@ -0,0 +1,100 @@
+import uuid
+from sqlmodel import Session, select, delete, func
+from app.models import ProjectUser, ProjectUserPublic, User, Project
+from datetime import datetime
+
+
+def is_project_admin(session: Session, user_id: str, project_id: int) -> bool:
+ """
+ Checks if a user is an admin of the given project.
+ """
+ project_user = session.exec(
+ select(ProjectUser).where(
+ ProjectUser.project_id == project_id,
+ ProjectUser.user_id == user_id,
+ ProjectUser.is_deleted == False
+ )
+ ).first()
+
+ return bool(project_user and project_user.is_admin)
+
+# Add a user to a project
+def add_user_to_project(
+ session: Session, project_id: uuid.UUID, user_id: uuid.UUID, is_admin: bool = False
+) -> ProjectUserPublic:
+ """
+ Adds a user to a project.
+ """
+ existing = session.exec(
+ select(ProjectUser).where(ProjectUser.project_id == project_id, ProjectUser.user_id == user_id)
+ ).first()
+
+ if existing:
+ raise ValueError("User is already a member of this project.")
+
+ project_user = ProjectUser(project_id=project_id, user_id=user_id, is_admin=is_admin)
+ session.add(project_user)
+ session.commit()
+ session.refresh(project_user)
+
+ return ProjectUserPublic.model_validate(project_user)
+
+
+def remove_user_from_project(session: Session, project_id: uuid.UUID, user_id: uuid.UUID) -> None:
+ """
+ Removes a user from a project.
+ """
+ project_user = session.exec(
+ select(ProjectUser).where(
+ ProjectUser.project_id == project_id,
+ ProjectUser.user_id == user_id,
+ ProjectUser.is_deleted == False # Ignore already deleted users
+ )
+ ).first()
+ if not project_user:
+ raise ValueError("User is not a member of this project or already removed.")
+
+ project_user.is_deleted = True
+ project_user.deleted_at = datetime.utcnow()
+ session.add(project_user) # Required to mark as dirty for commit
+ session.commit()
+
+
+def get_users_by_project(
+ session: Session, project_id: uuid.UUID, skip: int = 0, limit: int = 100
+) -> tuple[list[ProjectUserPublic], int]:
+ """
+ Returns paginated users in a given project along with the total count.
+ """
+ count_statement = select(func.count()).select_from(ProjectUser).where(
+ ProjectUser.project_id == project_id, ProjectUser.is_deleted == False
+ )
+ total_count = session.exec(count_statement).one()
+
+ statement = (
+ select(ProjectUser)
+ .where(ProjectUser.project_id == project_id, ProjectUser.is_deleted == False)
+ .offset(skip)
+ .limit(limit)
+ )
+ users = session.exec(statement).all()
+
+ return [ProjectUserPublic.model_validate(user) for user in users], total_count
+
+
+# Check if a user belongs to an at least one project in organization
+def is_user_part_of_organization(session: Session, user_id: uuid.UUID, org_id: int) -> bool:
+ """
+ Checks if a user is part of at least one project within the organization.
+ """
+ user_in_org = session.exec(
+ select(ProjectUser)
+ .join(Project, ProjectUser.project_id == Project.id)
+ .where(
+ Project.organization_id == org_id,
+ ProjectUser.user_id == user_id,
+ ProjectUser.is_deleted == False
+ )
+ ).first()
+
+ return bool(user_in_org)
diff --git a/backend/app/main.py b/backend/app/main.py
index 9a95801e7..4f87bc80e 100644
--- a/backend/app/main.py
+++ b/backend/app/main.py
@@ -1,12 +1,13 @@
import sentry_sdk
-from fastapi import FastAPI
+
+from fastapi import FastAPI, HTTPException
from fastapi.routing import APIRoute
from starlette.middleware.cors import CORSMiddleware
from app.api.main import api_router
+from app.api.deps import http_exception_handler
from app.core.config import settings
-
def custom_generate_unique_id(route: APIRoute) -> str:
return f"{route.tags[0]}-{route.name}"
@@ -31,3 +32,5 @@ def custom_generate_unique_id(route: APIRoute) -> str:
)
app.include_router(api_router, prefix=settings.API_V1_STR)
+
+app.add_exception_handler(HTTPException, http_exception_handler)
diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py
index 5a03fa56c..8f3234282 100644
--- a/backend/app/models/__init__.py
+++ b/backend/app/models/__init__.py
@@ -1,6 +1,37 @@
+from sqlmodel import SQLModel
+
from .auth import Token, TokenPayload
from .item import Item, ItemCreate, ItemPublic, ItemsPublic, ItemUpdate
from .message import Message
+
+from .project_user import (
+ ProjectUser,
+ ProjectUserPublic,
+ ProjectUsersPublic,
+)
+
+from .project import (
+ Project,
+ ProjectCreate,
+ ProjectPublic,
+ ProjectsPublic,
+ ProjectUpdate,
+)
+
+from .api_key import (
+ APIKey,
+ APIKeyBase,
+ APIKeyPublic
+)
+
+from .organization import (
+ Organization,
+ OrganizationCreate,
+ OrganizationPublic,
+ OrganizationsPublic,
+ OrganizationUpdate,
+)
+
from .user import (
User,
UserCreate,
@@ -11,4 +42,6 @@
UserUpdateMe,
NewPassword,
UpdatePassword,
-)
\ No newline at end of file
+ UserProjectOrg,
+ UserOrganization
+)
diff --git a/backend/app/models/api_key.py b/backend/app/models/api_key.py
new file mode 100644
index 000000000..a1082bd3a
--- /dev/null
+++ b/backend/app/models/api_key.py
@@ -0,0 +1,27 @@
+import uuid
+import secrets
+from datetime import datetime
+from typing import Optional, List
+from sqlmodel import SQLModel, Field, Relationship
+
+
+class APIKeyBase(SQLModel):
+ organization_id: int = Field(foreign_key="organization.id", nullable=False, ondelete="CASCADE")
+ user_id: uuid.UUID = Field(foreign_key="user.id", nullable=False, ondelete="CASCADE")
+ key: str = Field(default_factory=lambda: secrets.token_urlsafe(32), unique=True, index=True)
+
+
+class APIKeyPublic(APIKeyBase):
+ id: int
+ created_at: datetime
+
+
+class APIKey(APIKeyBase, table=True):
+ id: int = Field(default=None, primary_key=True)
+ created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
+ is_deleted: bool = Field(default=False, nullable=False)
+ deleted_at: Optional[datetime] = Field(default=None, nullable=True)
+
+ # Relationships
+ organization: "Organization" = Relationship(back_populates="api_keys")
+ user: "User" = Relationship(back_populates="api_keys")
diff --git a/backend/app/models/organization.py b/backend/app/models/organization.py
new file mode 100644
index 000000000..3c073352c
--- /dev/null
+++ b/backend/app/models/organization.py
@@ -0,0 +1,35 @@
+from sqlmodel import Field, Relationship, SQLModel
+
+
+# Shared properties for an Organization
+class OrganizationBase(SQLModel):
+ name: str = Field(unique=True, index=True, max_length=255)
+ is_active: bool = True
+
+
+# Properties to receive via API on creation
+class OrganizationCreate(OrganizationBase):
+ pass
+
+
+# Properties to receive via API on update, all are optional
+class OrganizationUpdate(SQLModel):
+ name: str | None = Field(default=None, max_length=255)
+ is_active: bool | None = Field(default=None)
+
+
+# Database model for Organization
+class Organization(OrganizationBase, table=True):
+ id: int = Field(default=None, primary_key=True)
+
+ api_keys: list["APIKey"] = Relationship(back_populates="organization")
+
+
+# Properties to return via API
+class OrganizationPublic(OrganizationBase):
+ id: int
+
+
+class OrganizationsPublic(SQLModel):
+ data: list[OrganizationPublic]
+ count: int
\ No newline at end of file
diff --git a/backend/app/models/project.py b/backend/app/models/project.py
new file mode 100644
index 000000000..8568cc290
--- /dev/null
+++ b/backend/app/models/project.py
@@ -0,0 +1,39 @@
+from sqlmodel import Field, Relationship, SQLModel
+
+
+# Shared properties for a Project
+class ProjectBase(SQLModel):
+ name: str = Field(index=True, max_length=255)
+ description: str | None = Field(default=None, max_length=500)
+ is_active: bool = True
+
+
+# Properties to receive via API on creation
+class ProjectCreate(ProjectBase):
+ organization_id: int
+
+
+# Properties to receive via API on update, all are optional
+class ProjectUpdate(SQLModel):
+ name: str | None = Field(default=None, max_length=255)
+ description: str | None = Field(default=None, max_length=500)
+ is_active: bool | None = Field(default=None)
+
+
+# Database model for Project
+class Project(ProjectBase, table=True):
+ id: int = Field(default=None, primary_key=True)
+ organization_id: int = Field(foreign_key="organization.id", index=True)
+
+ users: list["ProjectUser"] = Relationship(back_populates="project", cascade_delete=True)
+
+
+# Properties to return via API
+class ProjectPublic(ProjectBase):
+ id: int
+ organization_id: int
+
+
+class ProjectsPublic(SQLModel):
+ data: list[ProjectPublic]
+ count: int
\ No newline at end of file
diff --git a/backend/app/models/project_user.py b/backend/app/models/project_user.py
new file mode 100644
index 000000000..4b06967f2
--- /dev/null
+++ b/backend/app/models/project_user.py
@@ -0,0 +1,36 @@
+import uuid
+from datetime import datetime
+from typing import Optional, List
+from sqlmodel import SQLModel, Field, Relationship
+
+
+# Shared properties
+class ProjectUserBase(SQLModel):
+ project_id: int = Field(foreign_key="project.id", nullable=False, ondelete="CASCADE")
+ user_id: uuid.UUID = Field(foreign_key="user.id", nullable=False, ondelete="CASCADE")
+ is_admin: bool = Field(default=False, nullable=False) # Determines if user is an admin of the project
+
+
+class ProjectUserPublic(ProjectUserBase):
+ id: int
+ created_at: datetime
+ updated_at: datetime
+
+
+# Database model, database table inferred from class name
+class ProjectUser(ProjectUserBase, table=True):
+ id: int = Field(default=None, primary_key=True)
+ created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
+ updated_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
+ is_deleted: bool = Field(default=False, nullable=False)
+ deleted_at: Optional[datetime] = Field(default=None, nullable=True)
+
+ # Relationships
+ project: "Project" = Relationship(back_populates="users")
+ user: "User" = Relationship(back_populates="projects")
+
+
+# Properties to return as a list
+class ProjectUsersPublic(SQLModel):
+ data: List[ProjectUserPublic]
+ count: int
diff --git a/backend/app/models/user.py b/backend/app/models/user.py
index a3abd0174..714d96160 100644
--- a/backend/app/models/user.py
+++ b/backend/app/models/user.py
@@ -49,8 +49,19 @@ class User(UserBase, table=True):
id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
hashed_password: str
items: list["Item"] = Relationship(back_populates="owner", cascade_delete=True)
+ projects: list["ProjectUser"] = Relationship(back_populates="user", cascade_delete=True)
+ api_keys: list["APIKey"] = Relationship(back_populates="user")
+class UserOrganization(UserBase):
+ id : uuid.UUID
+ organization_id: int | None
+
+
+class UserProjectOrg(UserOrganization):
+ project_id: int
+
+
# Properties to return via API, id is always required
class UserPublic(UserBase):
id: uuid.UUID
diff --git a/backend/app/tests/api/routes/test_api_key.py b/backend/app/tests/api/routes/test_api_key.py
new file mode 100644
index 000000000..18f9cb8eb
--- /dev/null
+++ b/backend/app/tests/api/routes/test_api_key.py
@@ -0,0 +1,149 @@
+import uuid
+import pytest
+from fastapi.testclient import TestClient
+from sqlmodel import Session
+from app.main import app
+from app.models import APIKey, User, Organization
+from app.core.config import settings
+from app.crud import api_key as api_key_crud
+from app.tests.utils.utils import random_email
+from app.core.security import get_password_hash
+
+client = TestClient(app)
+
+def create_test_user(db: Session) -> User:
+ user = User(
+ email=random_email(),
+ hashed_password=get_password_hash("password123"),
+ is_superuser=True
+ )
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+ return user
+
+def create_test_organization(db: Session) -> Organization:
+ org = Organization(
+ name=f"Test Organization {uuid.uuid4()}",
+ description="Test Organization"
+ )
+ db.add(org)
+ db.commit()
+ db.refresh(org)
+ return org
+
+
+def test_create_api_key(db: Session, superuser_token_headers: dict[str, str]):
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ response = client.post(
+ f"{settings.API_V1_STR}/apikeys",
+ params={"organization_id": org.id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 200
+ data = response.json()
+ assert data["success"] is True
+ assert "id" in data["data"]
+ assert "key" in data["data"]
+ assert data["data"]["organization_id"] == org.id
+ assert data["data"]["user_id"] == str(user.id)
+
+
+def test_create_duplicate_api_key(db: Session, superuser_token_headers: dict[str, str]):
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ client.post(
+ f"{settings.API_V1_STR}/apikeys",
+ params={"organization_id": org.id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ response = client.post(
+ f"{settings.API_V1_STR}/apikeys",
+ params={"organization_id": org.id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 400
+ assert "API Key already exists" in response.json()["detail"]
+
+
+def test_list_api_keys(db: Session, superuser_token_headers: dict[str, str]):
+ user = create_test_user(db)
+ org = create_test_organization(db)
+ api_key = api_key_crud.create_api_key(db, organization_id=org.id, user_id=user.id)
+
+ response = client.get(
+ f"{settings.API_V1_STR}/apikeys",
+ params={"organization_id": org.id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 200
+ data = response.json()
+ assert data["success"] is True
+ assert isinstance(data["data"], list)
+ assert len(data["data"]) > 0
+ assert data["data"][0]["organization_id"] == org.id
+ assert data["data"][0]["user_id"] == str(user.id)
+
+
+def test_get_api_key(db: Session, superuser_token_headers: dict[str, str]):
+ user = create_test_user(db)
+ org = create_test_organization(db)
+ api_key = api_key_crud.create_api_key(db, organization_id=org.id, user_id=user.id)
+
+ response = client.get(
+ f"{settings.API_V1_STR}/apikeys/{api_key.id}",
+ params={"organization_id": api_key.organization_id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 200
+ data = response.json()
+ assert data["success"] is True
+ assert data["data"]["id"] == api_key.id
+ assert data["data"]["organization_id"] == api_key.organization_id
+ assert data["data"]["user_id"] == str(user.id)
+
+
+def test_get_nonexistent_api_key(db: Session, superuser_token_headers: dict[str, str]):
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ response = client.get(
+ f"{settings.API_V1_STR}/apikeys/999999",
+ params={"organization_id": org.id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 404
+ assert "API Key does not exist" in response.json()["detail"]
+
+
+def test_revoke_api_key(db: Session, superuser_token_headers: dict[str, str]):
+ user = create_test_user(db)
+ org = create_test_organization(db)
+ api_key = api_key_crud.create_api_key(db, organization_id=org.id, user_id=user.id)
+
+ response = client.delete(
+ f"{settings.API_V1_STR}/apikeys/{api_key.id}",
+ params={"organization_id": api_key.organization_id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 200
+ data = response.json()
+ assert data["success"] is True
+ assert "API key revoked successfully" in data["data"]["message"]
+
+
+def test_revoke_nonexistent_api_key(db: Session, superuser_token_headers: dict[str, str]):
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ response = client.delete(
+ f"{settings.API_V1_STR}/apikeys/999999",
+ params={"organization_id": org.id, "user_id": user.id},
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 400
+ assert "API key not found or already deleted" in response.json()["detail"]
+
\ No newline at end of file
diff --git a/backend/app/tests/api/routes/test_org.py b/backend/app/tests/api/routes/test_org.py
new file mode 100644
index 000000000..bfb9021d2
--- /dev/null
+++ b/backend/app/tests/api/routes/test_org.py
@@ -0,0 +1,77 @@
+import pytest
+from fastapi.testclient import TestClient
+from sqlmodel import Session, select
+
+from app import crud
+from app.core.config import settings
+from app.core.security import verify_password
+from app.models import User, UserCreate
+from app.tests.utils.utils import random_email, random_lower_string
+from app.models import Organization, OrganizationCreate, OrganizationUpdate
+from app.api.deps import get_db
+from app.main import app
+from app.crud.organization import create_organization, get_organization_by_id
+
+client = TestClient(app)
+
+@pytest.fixture
+def test_organization(db: Session, superuser_token_headers: dict[str, str]):
+ unique_name = f"TestOrg-{random_lower_string()}"
+ org_data = OrganizationCreate(name=unique_name, is_active=True)
+ organization = create_organization(session=db, org_create=org_data)
+ db.commit()
+ return organization
+
+# Test retrieving organizations
+def test_read_organizations(db: Session, superuser_token_headers: dict[str, str]):
+ response = client.get(f"{settings.API_V1_STR}/organizations/", headers=superuser_token_headers)
+ assert response.status_code == 200
+ response_data = response.json()
+ assert "data" in response_data
+ assert isinstance(response_data["data"], list)
+
+# Test creating an organization
+def test_create_organization(db: Session, superuser_token_headers: dict[str, str]):
+ unique_name = f"Org-{random_lower_string()}"
+ org_data = {"name": unique_name, "is_active": True}
+ response = client.post(
+ f"{settings.API_V1_STR}/organizations/", json=org_data, headers=superuser_token_headers
+ )
+
+ assert 200 <= response.status_code < 300
+ created_org = response.json()
+ assert "data" in created_org # Make sure there's a 'data' field
+ created_org_data = created_org["data"]
+ org = get_organization_by_id(session=db, org_id=created_org_data["id"])
+ assert org is not None # The organization should be found in the DB
+ assert org.name == created_org_data["name"]
+ assert org.is_active == created_org_data["is_active"]
+
+
+def test_update_organization(db: Session, test_organization: Organization, superuser_token_headers: dict[str, str]):
+ unique_name = f"UpdatedOrg-{random_lower_string()}" # Ensure a unique name
+ update_data = {"name": unique_name, "is_active": False}
+
+ response = client.patch(
+ f"{settings.API_V1_STR}/organizations/{test_organization.id}",
+ json=update_data,
+ headers=superuser_token_headers,
+ )
+
+ assert response.status_code == 200
+ updated_org = response.json()["data"]
+ assert "name" in updated_org
+ assert updated_org["name"] == update_data["name"]
+ assert "is_active" in updated_org
+ assert updated_org["is_active"] == update_data["is_active"]
+
+
+# Test deleting an organization
+def test_delete_organization(db: Session, test_organization: Organization, superuser_token_headers: dict[str, str]):
+ response = client.delete(
+ f"{settings.API_V1_STR}/organizations/{test_organization.id}", headers=superuser_token_headers
+ )
+ assert response.status_code == 200
+ response = client.get(f"{settings.API_V1_STR}/organizations/{test_organization.id}", headers=superuser_token_headers)
+ assert response.status_code == 404
+
\ No newline at end of file
diff --git a/backend/app/tests/api/routes/test_project.py b/backend/app/tests/api/routes/test_project.py
new file mode 100644
index 000000000..157dde3d1
--- /dev/null
+++ b/backend/app/tests/api/routes/test_project.py
@@ -0,0 +1,89 @@
+import pytest
+from fastapi.testclient import TestClient
+from sqlmodel import Session
+
+from app.main import app
+from app.core.config import settings
+from app.models import Project, ProjectCreate, ProjectUpdate
+from app.models import Organization, OrganizationCreate, ProjectUpdate
+from app.api.deps import get_db
+from app.tests.utils.utils import random_lower_string, random_email
+from app.crud.project import create_project, get_project_by_id
+from app.crud.organization import create_organization
+
+client = TestClient(app)
+
+@pytest.fixture
+def test_project(db: Session, superuser_token_headers: dict[str, str]):
+ unique_org_name = f"TestOrg-{random_lower_string()}"
+ org_data = OrganizationCreate(name=unique_org_name, is_active=True)
+ organization = create_organization(session=db, org_create=org_data)
+ db.commit()
+
+ unique_project_name = f"TestProject-{random_lower_string()}"
+ project_description = "This is a test project description."
+ project_data = ProjectCreate(name=unique_project_name, description=project_description, is_active=True, organization_id=organization.id)
+ project = create_project(session=db, project_create=project_data)
+ db.commit()
+
+ return project
+
+#Test retrieving projects
+def test_read_projects(db: Session, superuser_token_headers: dict[str, str]):
+ response = client.get(f"{settings.API_V1_STR}/projects/", headers=superuser_token_headers)
+ assert response.status_code == 200
+ response_data = response.json()
+ assert "data" in response_data
+ assert isinstance(response_data["data"], list)
+
+# Test creating a project
+def test_create_new_project(db: Session, superuser_token_headers: dict[str, str]):
+ unique_org_name = f"TestOrg-{random_lower_string()}"
+ org_data = OrganizationCreate(name=unique_org_name, is_active=True)
+ organization = create_organization(session=db, org_create=org_data)
+ db.commit()
+
+ unique_project_name = f"TestProject-{random_lower_string()}"
+ project_description = "This is a test project description."
+ project_data = ProjectCreate(name=unique_project_name, description=project_description, is_active=True, organization_id=organization.id)
+
+ response = client.post(
+ f"{settings.API_V1_STR}/projects/", json=project_data.dict(), headers=superuser_token_headers
+ )
+
+ assert response.status_code == 200
+ created_project = response.json()
+
+ # Adjusted for a nested structure, if needed
+ assert "data" in created_project # Check if response contains a 'data' field
+ assert created_project["data"]["name"] == unique_project_name # Now checking 'name' inside 'data'
+ assert created_project["data"]["description"] == project_description
+ assert created_project["data"]["organization_id"] == organization.id
+
+
+# Test updating a project
+def test_update_project(db: Session, test_project: Project, superuser_token_headers: dict[str, str]):
+ update_data = {"name": "Updated Project Name", "is_active": False}
+
+ response = client.patch(
+ f"{settings.API_V1_STR}/projects/{test_project.id}",
+ json=update_data,
+ headers=superuser_token_headers,
+ )
+
+ assert response.status_code == 200
+ updated_project = response.json()["data"]
+ assert "name" in updated_project
+ assert updated_project["name"] == update_data["name"]
+ assert "is_active" in updated_project
+ assert updated_project["is_active"] == update_data["is_active"]
+
+
+# Test deleting a project
+def test_delete_project(db: Session, test_project: Project, superuser_token_headers: dict[str, str]):
+ response = client.delete(
+ f"{settings.API_V1_STR}/projects/{test_project.id}", headers=superuser_token_headers
+ )
+ assert response.status_code == 200
+ response = client.get(f"{settings.API_V1_STR}/projects/{test_project.id}", headers=superuser_token_headers)
+ assert response.status_code == 404
\ No newline at end of file
diff --git a/backend/app/tests/api/routes/test_project_user.py b/backend/app/tests/api/routes/test_project_user.py
new file mode 100644
index 000000000..bd271db5d
--- /dev/null
+++ b/backend/app/tests/api/routes/test_project_user.py
@@ -0,0 +1,189 @@
+import uuid
+import pytest
+from fastapi.testclient import TestClient
+from sqlmodel import Session, select
+from app.core.config import settings
+from app.models import User, Project, ProjectUser, Organization
+from app.crud.project_user import add_user_to_project
+from app.tests.utils.utils import random_email
+from app.tests.utils.user import authentication_token_from_email
+from app.core.security import get_password_hash
+from app.main import app
+
+client = TestClient(app)
+
+
+def create_user(db: Session) -> User:
+ """Helper function to create a user."""
+ user = User(email=random_email(), hashed_password=get_password_hash("password123"))
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+ return user
+
+
+def create_organization_and_project(db: Session) -> tuple[Organization, Project]:
+ """Helper function to create an organization and a project."""
+
+ organization = Organization(name=f"Test Organization {uuid.uuid4()}", is_active=True)
+ db.add(organization)
+ db.commit()
+ db.refresh(organization)
+
+ # Ensure project with unique name
+ project_name = f"Test Project {uuid.uuid4()}" # Ensuring unique project name
+ project = Project(name=project_name, description="A test project", organization_id=organization.id, is_active=True)
+ db.add(project)
+ db.commit()
+ db.refresh(project)
+
+ return organization, project
+
+
+def test_add_user_to_project(client: TestClient, db: Session, superuser_token_headers: dict[str, str]) -> None:
+ """
+ Test adding a user to a project successfully.
+ """
+ user = create_user(db)
+ organization, project = create_organization_and_project(db)
+
+ response = client.post(
+ f"{settings.API_V1_STR}/project/users/{user.id}?is_admin=true&project_id={project.id}&organization_id={organization.id}",
+ headers=superuser_token_headers,
+ )
+
+ assert response.status_code == 200, response.text
+ added_user = response.json()['data']
+ assert added_user["user_id"] == str(user.id)
+ assert added_user["project_id"] == project.id
+ assert added_user["is_admin"] is True
+
+
+def test_add_user_not_found(client: TestClient, db: Session, superuser_token_headers: dict[str, str]) -> None:
+ """
+ Test adding a non-existing user to a project (should return 404).
+ """
+ organization, project = create_organization_and_project(db)
+
+ response = client.post(
+ f"{settings.API_V1_STR}/project/users/{uuid.uuid4()}?is_admin=false&project_id={project.id}&organization_id={organization.id}",
+ headers=superuser_token_headers,
+ )
+
+ assert response.status_code == 404
+ assert response.json()["error"] == "User not found"
+
+
+def test_add_existing_user_to_project(client: TestClient, db: Session, superuser_token_headers: dict[str, str]) -> None:
+ """
+ Test adding a user who is already in the project (should return 400).
+ """
+ user = create_user(db)
+ organization, project = create_organization_and_project(db)
+
+ # Add user to project
+ project_user = ProjectUser(project_id=project.id, user_id=user.id, is_admin=False)
+ db.add(project_user)
+ db.commit()
+
+ # Try to add the same user again
+ response = client.post(
+ f"{settings.API_V1_STR}/project/users/{user.id}?is_admin=false&project_id={project.id}&organization_id={organization.id}",
+ headers=superuser_token_headers,
+ )
+
+ assert response.status_code == 400
+ assert "User is already a member of this project" in response.json()["error"]
+
+
+def test_remove_user_from_project(
+ client: TestClient, db: Session, superuser_token_headers: dict[str, str]
+) -> None:
+ """
+ Test removing a user from a project successfully.
+ """
+ # Create organization and project
+ organization, project = create_organization_and_project(db)
+
+ # Create a user
+ user = create_user(db)
+
+ # Add user to project
+ add_user_to_project(db, project.id, user.id, is_admin=False)
+
+ # Remove user via API
+ response = client.delete(
+ f"{settings.API_V1_STR}/project/users/{user.id}?project_id={project.id}&organization_id={organization.id}",
+ headers=superuser_token_headers,
+ )
+
+ # Assertions
+ assert response.status_code == 200, response.text
+ assert response.json()['data'] == {"message": "User removed from project successfully."}
+
+ # Ensure user is marked as deleted in the database (Fixed)
+ project_user = db.exec(
+ select(ProjectUser).where(
+ ProjectUser.project_id == project.id,
+ ProjectUser.user_id == user.id,
+ )
+ ).first()
+
+ assert project_user is not None
+ assert project_user.is_deleted is True
+ assert project_user.deleted_at is not None
+
+
+def test_normal_user_cannot_add_user(
+ client: TestClient, db: Session, superuser_token_headers: dict[str, str]
+) -> None:
+ """
+ Test that a normal user (not admin) cannot add a user to a project.
+ """
+
+ organization, project = create_organization_and_project(db)
+
+ normal_user_email = random_email()
+ normal_user_token_headers = authentication_token_from_email(client=client, email=normal_user_email, db=db)
+
+ normal_user = db.exec(select(User).where(User.email == normal_user_email)).first()
+ add_user_to_project(db, project.id, normal_user.id, is_admin=False)
+
+ target_user = create_user(db)
+
+ # Normal user attempts to add target user to the project
+ response = client.post(
+ f"{settings.API_V1_STR}/project/users/{target_user.id}?is_admin=false&project_id={project.id}&organization_id={organization.id}",
+ headers=normal_user_token_headers,
+ )
+
+ assert response.status_code == 403
+ assert response.json()["error"] == "Only project admins or superusers can add users."
+
+
+def test_normal_user_cannot_remove_user(
+ client: TestClient, db: Session, superuser_token_headers: dict[str, str]
+) -> None:
+ """
+ Test that a normal user (not admin) cannot remove a user from a project.
+ """
+ organization, project = create_organization_and_project(db)
+
+ normal_user_email = random_email()
+ normal_user_token_headers = authentication_token_from_email(client=client, email=normal_user_email, db=db)
+
+ normal_user = db.exec(select(User).where(User.email == normal_user_email)).first()
+ add_user_to_project(db, project.id, normal_user.id, is_admin=False)
+
+ target_user = create_user(db)
+ add_user_to_project(db, project.id, target_user.id, is_admin=False)
+
+ # Normal user attempts to remove the target user
+ response = client.delete(
+ f"{settings.API_V1_STR}/project/users/{target_user.id}?project_id={project.id}&organization_id={organization.id}",
+ headers=normal_user_token_headers,
+ )
+
+ # Assertions
+ assert response.status_code == 403
+ assert response.json()["error"] == "Only project admins or superusers can remove users."
diff --git a/backend/app/tests/api/routes/test_threads.py b/backend/app/tests/api/routes/test_threads.py
new file mode 100644
index 000000000..78e406ab7
--- /dev/null
+++ b/backend/app/tests/api/routes/test_threads.py
@@ -0,0 +1,111 @@
+import pytest
+import openai
+
+from unittest.mock import MagicMock, patch
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+
+from app.api.routes.threads import router, process_run
+from app.utils import APIResponse
+
+# Wrap the router in a FastAPI app instance.
+app = FastAPI()
+app.include_router(router)
+client = TestClient(app)
+
+
+@patch("app.api.routes.threads.OpenAI")
+def test_threads_endpoint(mock_openai):
+ """
+ Test the /threads endpoint when creating a new thread.
+ The patched OpenAI client simulates:
+ - A successful assistant ID validation.
+ - New thread creation with a dummy thread id.
+ - No existing runs.
+ The expected response should have status "processing" and include a thread_id.
+ """
+ # Create a dummy client to simulate OpenAI API behavior.
+ dummy_client = MagicMock()
+ # Simulate a valid assistant ID by ensuring retrieve doesn't raise an error.
+ dummy_client.beta.assistants.retrieve.return_value = None
+ # Simulate thread creation.
+ dummy_thread = MagicMock()
+ dummy_thread.id = "dummy_thread_id"
+ dummy_client.beta.threads.create.return_value = dummy_thread
+ # Simulate message creation.
+ dummy_client.beta.threads.messages.create.return_value = None
+ # Simulate that no active run exists.
+ dummy_client.beta.threads.runs.list.return_value = MagicMock(data=[])
+
+ mock_openai.return_value = dummy_client
+
+ request_data = {
+ "question": "What is Glific?",
+ "assistant_id": "assistant_123",
+ "callback_url": "http://example.com/callback",
+ }
+ response = client.post("/threads", json=request_data)
+ assert response.status_code == 200
+ response_json = response.json()
+ assert response_json["success"] is True
+ assert response_json["data"]["status"] == "processing"
+ assert response_json["data"]["message"] == "Run started"
+ assert response_json["data"]["thread_id"] == "dummy_thread_id"
+
+
+@patch("app.api.routes.threads.OpenAI")
+@pytest.mark.parametrize(
+ "remove_citation, expected_message",
+ [
+ (
+ True,
+ "Glific is an open-source, two-way messaging platform designed for nonprofits to scale their outreach via WhatsApp",
+ ),
+ (
+ False,
+ "Glific is an open-source, two-way messaging platform designed for nonprofits to scale their outreach via WhatsAppγ1:2β citationγ",
+ ),
+ ],
+)
+def test_process_run_variants(mock_openai, remove_citation, expected_message):
+ """
+ Test process_run for both remove_citation variants:
+ - Mocks the OpenAI client to simulate a completed run.
+ - Verifies that send_callback is called with the expected message based on the remove_citation flag.
+ """
+ # Setup the mock client.
+ mock_client = MagicMock()
+ mock_openai.return_value = mock_client
+
+ # Create the request with the variable remove_citation flag.
+ request = {
+ "question": "What is Glific?",
+ "assistant_id": "assistant_123",
+ "callback_url": "http://example.com/callback",
+ "thread_id": "thread_123",
+ "remove_citation": remove_citation,
+ }
+
+ # Simulate a completed run.
+ mock_run = MagicMock()
+ mock_run.status = "completed"
+ mock_client.beta.threads.runs.create_and_poll.return_value = mock_run
+
+ # Set up the dummy message based on the remove_citation flag.
+ base_message = "Glific is an open-source, two-way messaging platform designed for nonprofits to scale their outreach via WhatsApp"
+ citation_message = base_message if remove_citation else f"{base_message}γ1:2β citationγ"
+ dummy_message = MagicMock()
+ dummy_message.content = [MagicMock(text=MagicMock(value=citation_message))]
+ mock_client.beta.threads.messages.list.return_value.data = [dummy_message]
+
+ # Patch send_callback and invoke process_run.
+ with patch("app.api.routes.threads.send_callback") as mock_send_callback:
+ process_run(request, mock_client)
+ mock_send_callback.assert_called_once()
+ callback_url, payload = mock_send_callback.call_args[0]
+ print(payload)
+ assert callback_url == request["callback_url"]
+ assert payload["data"]["message"] == expected_message
+ assert payload["data"]["status"] == "success"
+ assert payload["data"]["thread_id"] == "thread_123"
+ assert payload["success"] is True
\ No newline at end of file
diff --git a/backend/app/tests/api/routes/test_users.py b/backend/app/tests/api/routes/test_users.py
index ba9be6542..68cb3adab 100644
--- a/backend/app/tests/api/routes/test_users.py
+++ b/backend/app/tests/api/routes/test_users.py
@@ -110,7 +110,7 @@ def test_get_existing_user_permissions_error(
headers=normal_user_token_headers,
)
assert r.status_code == 403
- assert r.json() == {"detail": "The user doesn't have enough privileges"}
+ assert r.json()["detail"] == "The user doesn't have enough privileges"
def test_create_user_existing_username(
diff --git a/backend/app/tests/api/test_deps.py b/backend/app/tests/api/test_deps.py
new file mode 100644
index 000000000..140906867
--- /dev/null
+++ b/backend/app/tests/api/test_deps.py
@@ -0,0 +1,143 @@
+import pytest
+import uuid
+from sqlmodel import Session, select
+from fastapi import HTTPException
+from app.api.deps import verify_user_project_organization
+from app.models import User, Organization, Project, ProjectUser, UserProjectOrg, UserOrganization
+from app.tests.utils.utils import random_email
+from app.core.security import get_password_hash
+
+
+def create_org_project(db: Session, org_active=True, proj_active=True) -> tuple[Organization, Project]:
+ """Helper function to create an organization and a project with customizable active states."""
+ org = Organization(name=f"Test Org {uuid.uuid4()}", is_active=org_active)
+ db.add(org)
+ db.commit()
+ db.refresh(org)
+
+ proj = Project(
+ name=f"Test Proj {uuid.uuid4()}",
+ description="A test project",
+ organization_id=org.id,
+ is_active=proj_active
+ )
+ db.add(proj)
+ db.commit()
+ db.refresh(proj)
+
+ return org, proj
+
+
+def create_user(db: Session, is_superuser=False) -> User:
+ """Helper function to create a user."""
+ user = User(email=random_email(), hashed_password=get_password_hash("password123"), is_superuser=is_superuser)
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+ user_org = UserOrganization(**user.model_dump(), organization_id=None)
+ return user_org
+
+
+def test_verify_success(db: Session):
+ """Valid user in a project passes verification."""
+ user = create_user(db)
+ org, proj = create_org_project(db)
+
+ db.add(ProjectUser(project_id=proj.id, user_id=user.id, is_admin=False))
+ db.commit()
+
+ result = verify_user_project_organization(db, user, proj.id, org.id)
+
+ assert isinstance(result, UserProjectOrg)
+ assert result.project_id == proj.id
+ assert result.organization_id == org.id
+
+
+def test_verify_superuser_bypass(db: Session):
+ """Superuser bypasses project membership check."""
+ superuser = create_user(db, is_superuser=True)
+ org, proj = create_org_project(db)
+
+ result = verify_user_project_organization(db, superuser, proj.id, org.id)
+
+ assert isinstance(result, UserProjectOrg)
+ assert result.project_id == proj.id
+ assert result.organization_id == org.id
+
+
+def test_verify_no_org(db: Session):
+ """Missing organization results in a 404 error."""
+ user = create_user(db)
+ invalid_org_id = 9999
+
+ assert db.exec(select(Organization).where(Organization.id == invalid_org_id)).first() is None
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_user_project_organization(db, user, project_id=1, organization_id=invalid_org_id)
+
+ assert exc_info.value.status_code == 404
+ assert exc_info.value.detail == "Organization not found"
+
+
+def test_verify_no_project(db: Session):
+ """Missing project results in a 404 error."""
+ user = create_user(db)
+ org = Organization(name=f"Test Org {uuid.uuid4()}", is_active=True)
+ db.add(org)
+ db.commit()
+ db.refresh(org)
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_user_project_organization(db, user, 9999, org.id)
+
+ assert exc_info.value.status_code == 404
+ assert exc_info.value.detail == "Project not found"
+
+
+def test_verify_project_not_in_org(db: Session):
+ """Project not belonging to organization results in a 403 error."""
+ user = create_user(db)
+ org1, proj1 = create_org_project(db)
+ org2, proj2 = create_org_project(db)
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_user_project_organization(db, user, proj2.id, org1.id)
+
+ assert exc_info.value.status_code == 403
+ assert exc_info.value.detail == "Project does not belong to the organization"
+
+
+def test_verify_user_not_in_project(db: Session):
+ """User not in project results in a 403 error."""
+ user = create_user(db)
+ org, proj = create_org_project(db)
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_user_project_organization(db, user, proj.id, org.id)
+
+ assert exc_info.value.status_code == 403
+ assert exc_info.value.detail == "User is not part of the project"
+
+
+def test_verify_inactive_organization(db: Session):
+ """Inactive organization results in a 400 error."""
+ user = create_user(db)
+ org, proj = create_org_project(db, org_active=False)
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_user_project_organization(db, user, proj.id, org.id)
+
+ assert exc_info.value.status_code == 400
+ assert exc_info.value.detail == "Organization is not active"
+
+
+def test_verify_inactive_project(db: Session):
+ """Inactive project results in a 400 error."""
+ user = create_user(db)
+ org, proj = create_org_project(db, proj_active=False)
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_user_project_organization(db, user, proj.id, org.id)
+
+ assert exc_info.value.status_code == 400
+ assert exc_info.value.detail == "Project is not active"
diff --git a/backend/app/tests/conftest.py b/backend/app/tests/conftest.py
index 90ab39a35..1293ca85b 100644
--- a/backend/app/tests/conftest.py
+++ b/backend/app/tests/conftest.py
@@ -7,7 +7,14 @@
from app.core.config import settings
from app.core.db import engine, init_db
from app.main import app
-from app.models import Item, User
+from app.models import (
+ APIKey,
+ Item,
+ Organization,
+ Project,
+ ProjectUser,
+ User,
+)
from app.tests.utils.user import authentication_token_from_email
from app.tests.utils.utils import get_superuser_token_headers
@@ -17,13 +24,17 @@ def db() -> Generator[Session, None, None]:
with Session(engine) as session:
init_db(session)
yield session
- statement = delete(Item)
- session.execute(statement)
- statement = delete(User)
- session.execute(statement)
+ # Delete data in reverse dependency order
+ session.execute(delete(ProjectUser)) # Many-to-many relationship
+ session.execute(delete(Project))
+ session.execute(delete(Organization))
+ session.execute(delete(Item))
+ session.execute(delete(APIKey))
+ session.execute(delete(User))
session.commit()
+
@pytest.fixture(scope="module")
def client() -> Generator[TestClient, None, None]:
with TestClient(app) as c:
diff --git a/backend/app/tests/crud/test_api_key.py b/backend/app/tests/crud/test_api_key.py
new file mode 100644
index 000000000..48cc571e6
--- /dev/null
+++ b/backend/app/tests/crud/test_api_key.py
@@ -0,0 +1,118 @@
+import uuid
+import pytest
+from sqlmodel import Session, select
+from app.crud import api_key as api_key_crud
+from app.models import APIKey, User, Organization
+from app.tests.utils.utils import random_email
+from app.core.security import get_password_hash
+
+# Helper function to create a user
+def create_test_user(db: Session) -> User:
+ user = User(email=random_email(), hashed_password=get_password_hash("password123"))
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+ return user
+
+
+# Helper function to create an organization with a random name
+def create_test_organization(db: Session) -> Organization:
+ org = Organization(name=f"Test Organization {uuid.uuid4()}", description="Test Organization")
+ db.add(org)
+ db.commit()
+ db.refresh(org)
+ return org
+
+def test_create_api_key(db: Session) -> None:
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ api_key = api_key_crud.create_api_key(db, org.id, user.id)
+
+ assert api_key.key.startswith("ApiKey ")
+ assert len(api_key.key) > 32
+ assert api_key.organization_id == org.id
+ assert api_key.user_id == user.id
+
+def test_get_api_key(db: Session) -> None:
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ created_key = api_key_crud.create_api_key(db, org.id, user.id)
+ retrieved_key = api_key_crud.get_api_key(db, created_key.id)
+
+ assert retrieved_key is not None
+ assert retrieved_key.id == created_key.id
+ assert retrieved_key.key == created_key.key
+
+def test_get_api_key_not_found(db: Session) -> None:
+ result = api_key_crud.get_api_key(db, 9999) # Non-existent ID
+ assert result is None
+
+def test_get_api_keys_by_organization(db: Session) -> None:
+ user1 = create_test_user(db)
+ user2 = create_test_user(db)
+ org = create_test_organization(db)
+
+ api_key1 = api_key_crud.create_api_key(db, org.id, user1.id)
+ api_key2 = api_key_crud.create_api_key(db, org.id, user2.id)
+
+ api_keys = api_key_crud.get_api_keys_by_organization(db, org.id)
+
+ assert len(api_keys) == 2
+ assert any(key.id == api_key1.id for key in api_keys)
+ assert any(key.id == api_key2.id for key in api_keys)
+
+def test_delete_api_key(db: Session) -> None:
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ api_key = api_key_crud.create_api_key(db, org.id, user.id)
+ api_key_crud.delete_api_key(db, api_key.id)
+
+ deleted_key = db.exec(
+ select(APIKey).where(APIKey.id == api_key.id)
+ ).first()
+
+ assert deleted_key is not None
+ assert deleted_key.is_deleted is True
+ assert deleted_key.deleted_at is not None
+
+def test_delete_api_key_already_deleted(db: Session) -> None:
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ api_key = api_key_crud.create_api_key(db, org.id, user.id)
+ api_key_crud.delete_api_key(db, api_key.id)
+
+ with pytest.raises(ValueError, match="API key not found or already deleted"):
+ api_key_crud.delete_api_key(db, api_key.id)
+
+def test_get_api_key_by_value(db: Session) -> None:
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ api_key = api_key_crud.create_api_key(db, org.id, user.id)
+ retrieved_key = api_key_crud.get_api_key_by_value(db, api_key.key)
+
+ assert retrieved_key is not None
+ assert retrieved_key.id == api_key.id
+ assert retrieved_key.key == api_key.key
+
+def test_get_api_key_by_user_org(db: Session) -> None:
+ user = create_test_user(db)
+ org = create_test_organization(db)
+
+ api_key = api_key_crud.create_api_key(db, org.id, user.id)
+ retrieved_key = api_key_crud.get_api_key_by_user_org(db, org.id, user.id)
+
+ assert retrieved_key is not None
+ assert retrieved_key.id == api_key.id
+ assert retrieved_key.organization_id == org.id
+ assert retrieved_key.user_id == user.id
+
+def test_get_api_key_by_user_org_not_found(db: Session) -> None:
+ org = create_test_organization(db)
+ user_id = uuid.uuid4()
+ result = api_key_crud.get_api_key_by_user_org(db, org.id, user_id)
+ assert result is None
diff --git a/backend/app/tests/crud/test_org.py b/backend/app/tests/crud/test_org.py
new file mode 100644
index 000000000..0b7eaded1
--- /dev/null
+++ b/backend/app/tests/crud/test_org.py
@@ -0,0 +1,34 @@
+from sqlmodel import Session
+
+from app.crud.organization import create_organization, get_organization_by_id
+from app.models import Organization, OrganizationCreate
+from app.tests.utils.utils import random_lower_string
+
+
+def test_create_organization(db: Session) -> None:
+ """Test creating an organization."""
+ name = random_lower_string()
+ org_in = OrganizationCreate(name=name)
+ org = create_organization(session=db, org_create=org_in)
+
+ assert org.name == name
+ assert org.id is not None
+ assert org.is_active is True # Default should be active
+
+
+def test_get_organization_by_id(db: Session) -> None:
+ """Test retrieving an organization by ID."""
+ name = random_lower_string()
+ org_in = OrganizationCreate(name=name)
+ org = create_organization(session=db, org_create=org_in)
+
+ fetched_org = get_organization_by_id(session=db, org_id=org.id)
+ assert fetched_org
+ assert fetched_org.id == org.id
+ assert fetched_org.name == org.name
+
+
+def test_get_non_existent_organization(db: Session) -> None:
+ """Test retrieving a non-existent organization should return None."""
+ fetched_org = get_organization_by_id(session=db, org_id=999) # Assuming ID 999 does not exist
+ assert fetched_org is None
\ No newline at end of file
diff --git a/backend/app/tests/crud/test_project.py b/backend/app/tests/crud/test_project.py
new file mode 100644
index 000000000..9f3a543d5
--- /dev/null
+++ b/backend/app/tests/crud/test_project.py
@@ -0,0 +1,65 @@
+import pytest
+from sqlmodel import SQLModel, Session, create_engine
+
+from app.models import Project, ProjectCreate, Organization
+from app.crud.project import create_project, get_project_by_id, get_projects_by_organization
+from app.tests.utils.utils import random_lower_string
+
+def test_create_project(db: Session) -> None:
+ """Test creating a project linked to an organization."""
+ org = Organization(name=random_lower_string())
+ db.add(org)
+ db.commit()
+ db.refresh(org)
+
+ project_name = random_lower_string()
+ project_data = ProjectCreate(name=project_name, description="Test description", is_active=True, organization_id=org.id)
+
+ project = create_project(session=db, project_create=project_data)
+
+ assert project.id is not None
+ assert project.name == project_name
+ assert project.description == "Test description"
+ assert project.organization_id == org.id
+
+
+def test_get_project_by_id(db: Session) -> None:
+ """Test retrieving a project by ID."""
+ org = Organization(name=random_lower_string())
+ db.add(org)
+ db.commit()
+ db.refresh(org)
+
+ project_name = random_lower_string()
+ project_data = ProjectCreate(name=project_name, description="Test", organization_id=org.id)
+
+ project = create_project(session=db, project_create=project_data)
+
+ fetched_project = get_project_by_id(session=db, project_id=project.id)
+ assert fetched_project is not None
+ assert fetched_project.id == project.id
+ assert fetched_project.name == project.name
+
+
+
+def test_get_projects_by_organization(db: Session) -> None:
+ """Test retrieving all projects for an organization."""
+ org = Organization(name=random_lower_string())
+ db.add(org)
+ db.commit()
+ db.refresh(org)
+
+ project_1 = create_project(session=db, project_create=ProjectCreate(name=random_lower_string(), organization_id=org.id))
+ project_2 = create_project(session=db, project_create=ProjectCreate(name=random_lower_string(), organization_id=org.id))
+
+ projects = get_projects_by_organization(session=db, org_id=org.id)
+
+ assert len(projects) == 2
+ assert project_1 in projects
+ assert project_2 in projects
+
+
+def test_get_non_existent_project(db: Session) -> None:
+ """Test retrieving a non-existent project should return None."""
+ fetched_project = get_project_by_id(session=db, project_id=999)
+ assert fetched_project is None
diff --git a/backend/app/tests/crud/test_project_user.py b/backend/app/tests/crud/test_project_user.py
new file mode 100644
index 000000000..a59e3a15e
--- /dev/null
+++ b/backend/app/tests/crud/test_project_user.py
@@ -0,0 +1,128 @@
+import uuid
+from sqlmodel import Session, select
+from datetime import datetime
+import pytest
+
+from app.crud import project_user as project_user_crud
+from app.models import ProjectUser, ProjectUserPublic, User, Project, Organization
+from app.tests.utils.utils import random_email
+from app.core.security import get_password_hash
+
+
+def create_organization_and_project(db: Session) -> tuple[Organization, Project]:
+ """Helper function to create an organization and a project."""
+
+ organization = Organization(name=f"Test Organization {uuid.uuid4()}", is_active=True)
+ db.add(organization)
+ db.commit()
+ db.refresh(organization)
+
+ # Ensure project with unique name
+ project_name = f"Test Project {uuid.uuid4()}" # Ensuring unique project name
+ project = Project(name=project_name, description="A test project", organization_id=organization.id, is_active=True)
+ db.add(project)
+ db.commit()
+ db.refresh(project)
+
+ return organization, project
+
+def test_is_project_admin(db: Session) -> None:
+ organization, project = create_organization_and_project(db)
+
+ user = User(email=random_email(), hashed_password=get_password_hash("password123"))
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+
+ project_user = ProjectUser(project_id=project.id, user_id=user.id, is_admin=True)
+ db.add(project_user)
+ db.commit()
+ db.refresh(project_user)
+
+ assert project_user_crud.is_project_admin(db, user.id, project.id) is True
+
+
+def test_add_user_to_project(db: Session) -> None:
+ organization, project = create_organization_and_project(db)
+
+ user = User(email=random_email(), hashed_password=get_password_hash("password123"))
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+
+ project_user = project_user_crud.add_user_to_project(db, project.id, user.id, is_admin=True)
+
+ assert project_user.user_id == user.id
+ assert project_user.project_id == project.id
+ assert project_user.is_admin is True
+
+
+def test_add_user_to_project_duplicate(db: Session) -> None:
+ organization, project = create_organization_and_project(db)
+
+ user = User(email=random_email(), hashed_password=get_password_hash("password123"))
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+
+ project_user_crud.add_user_to_project(db, project.id, user.id)
+
+ with pytest.raises(ValueError, match="User is already a member of this project"):
+ project_user_crud.add_user_to_project(db, project.id, user.id)
+
+
+def test_remove_user_from_project(db: Session) -> None:
+ organization, project = create_organization_and_project(db)
+
+ user = User(email=random_email(), hashed_password=get_password_hash("password123"))
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+
+ # Add user to project
+ project_user_crud.add_user_to_project(db, project.id, user.id)
+
+ # Remove user from project
+ project_user_crud.remove_user_from_project(db, project.id, user.id)
+
+ # Retrieve project user with both project_id and user_id
+ project_user = db.exec(
+ select(ProjectUser).where(
+ ProjectUser.project_id == project.id,
+ ProjectUser.user_id == user.id
+ )
+ ).first()
+
+ assert project_user is not None # Ensure the record still exists (soft delete)
+ assert project_user.is_deleted is True
+ assert project_user.deleted_at is not None
+
+
+def test_remove_user_from_project_not_member(db: Session) -> None:
+ organization, project = create_organization_and_project(db)
+
+ project_id = project.id
+ user_id = uuid.uuid4()
+
+ with pytest.raises(ValueError, match="User is not a member of this project or already removed"):
+ project_user_crud.remove_user_from_project(db, project_id, user_id)
+
+
+def test_get_users_by_project(db: Session) -> None:
+ organization, project = create_organization_and_project(db)
+
+ user1 = User(email=random_email(), hashed_password=get_password_hash("password123"))
+ user2 = User(email=random_email(), hashed_password=get_password_hash("password123"))
+
+ db.add_all([user1, user2])
+ db.commit()
+ db.refresh(user1)
+ db.refresh(user2)
+
+ project_user_crud.add_user_to_project(db, project.id, user1.id)
+ project_user_crud.add_user_to_project(db, project.id, user2.id)
+
+ users, total_count = project_user_crud.get_users_by_project(db, project.id, skip=0, limit=10)
+
+ assert total_count == 2
+ assert len(users) == 2
\ No newline at end of file
diff --git a/backend/app/utils.py b/backend/app/utils.py
index ac029f634..6af3f10cb 100644
--- a/backend/app/utils.py
+++ b/backend/app/utils.py
@@ -2,7 +2,7 @@
from dataclasses import dataclass
from datetime import datetime, timedelta, timezone
from pathlib import Path
-from typing import Any
+from typing import Any, Dict, Generic, Optional, TypeVar
import emails # type: ignore
import jwt
@@ -12,9 +12,28 @@
from app.core import security
from app.core.config import settings
+from typing import Generic, Optional, TypeVar
+from pydantic import BaseModel
+
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
+T = TypeVar("T")
+
+class APIResponse(BaseModel, Generic[T]):
+ success: bool
+ data: Optional[T] = None
+ error: Optional[str] = None
+ metadata: Optional[Dict[str, Any]] = None
+
+ @classmethod
+ def success_response(cls, data: T, metadata: Optional[Dict[str, Any]] = None) -> "APIResponse[T]":
+ return cls(success=True, data=data, error=None, metadata=metadata)
+
+ @classmethod
+ def failure_response(cls, error: str) -> "APIResponse[None]":
+ return cls(success=False, data=None, error=error)
+
@dataclass
class EmailData:
@@ -120,4 +139,4 @@ def verify_password_reset_token(token: str) -> str | None:
)
return str(decoded_token["sub"])
except InvalidTokenError:
- return None
+ return None
\ No newline at end of file
diff --git a/backend/pyproject.toml b/backend/pyproject.toml
index 1c77b83de..bfe699646 100644
--- a/backend/pyproject.toml
+++ b/backend/pyproject.toml
@@ -21,6 +21,8 @@ dependencies = [
"pydantic-settings<3.0.0,>=2.2.1",
"sentry-sdk[fastapi]<2.0.0,>=1.40.6",
"pyjwt<3.0.0,>=2.8.0",
+ "openai>=1.67.0",
+ "pytest>=7.4.4",
]
[tool.uv]
diff --git a/backend/scripts/test.sh b/backend/scripts/test.sh
index df23f702e..8355f8e12 100755
--- a/backend/scripts/test.sh
+++ b/backend/scripts/test.sh
@@ -1,8 +1,15 @@
-#!/usr/bin/env bash
-
+#!/bin/bash
set -e
set -x
+# Run tests with coverage tracking
coverage run --source=app -m pytest
+
+# Generate a human-readable coverage report in the terminal
coverage report --show-missing
+
+# Generate an HTML report for local viewing
coverage html --title "${@-coverage}"
+
+# Generate the XML report for Codecov
+coverage xml
\ No newline at end of file
diff --git a/backend/uv.lock b/backend/uv.lock
index cfc200d3c..48108f893 100644
--- a/backend/uv.lock
+++ b/backend/uv.lock
@@ -1,4 +1,5 @@
version = 1
+revision = 1
requires-python = ">=3.10, <4.0"
resolution-markers = [
"python_full_version < '3.13'",
@@ -55,11 +56,13 @@ dependencies = [
{ name = "fastapi", extra = ["standard"] },
{ name = "httpx" },
{ name = "jinja2" },
+ { name = "openai" },
{ name = "passlib", extra = ["bcrypt"] },
{ name = "psycopg", extra = ["binary"] },
{ name = "pydantic" },
{ name = "pydantic-settings" },
{ name = "pyjwt" },
+ { name = "pytest" },
{ name = "python-multipart" },
{ name = "sentry-sdk", extra = ["fastapi"] },
{ name = "sqlmodel" },
@@ -85,11 +88,13 @@ requires-dist = [
{ name = "fastapi", extras = ["standard"], specifier = ">=0.114.2,<1.0.0" },
{ name = "httpx", specifier = ">=0.25.1,<1.0.0" },
{ name = "jinja2", specifier = ">=3.1.4,<4.0.0" },
+ { name = "openai", specifier = ">=1.67.0" },
{ name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4,<2.0.0" },
{ name = "psycopg", extras = ["binary"], specifier = ">=3.1.13,<4.0.0" },
{ name = "pydantic", specifier = ">2.0" },
{ name = "pydantic-settings", specifier = ">=2.2.1,<3.0.0" },
{ name = "pyjwt", specifier = ">=2.8.0,<3.0.0" },
+ { name = "pytest", specifier = ">=7.4.4" },
{ name = "python-multipart", specifier = ">=0.0.7,<1.0.0" },
{ name = "sentry-sdk", extras = ["fastapi"], specifier = ">=1.40.6,<2.0.0" },
{ name = "sqlmodel", specifier = ">=0.0.21,<1.0.0" },
@@ -220,7 +225,7 @@ name = "click"
version = "8.1.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "colorama", marker = "platform_system == 'Windows'" },
+ { name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 }
wheels = [
@@ -325,6 +330,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8e/41/9307e4f5f9976bc8b7fea0b66367734e8faf3ec84bc0d412d8cfabbb66cd/distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784", size = 468850 },
]
+[[package]]
+name = "distro"
+version = "1.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 },
+]
+
[[package]]
name = "dnspython"
version = "2.6.1"
@@ -581,6 +595,65 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 },
]
+[[package]]
+name = "jiter"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1e/c2/e4562507f52f0af7036da125bb699602ead37a2332af0788f8e0a3417f36/jiter-0.9.0.tar.gz", hash = "sha256:aadba0964deb424daa24492abc3d229c60c4a31bfee205aedbf1acc7639d7893", size = 162604 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b0/82/39f7c9e67b3b0121f02a0b90d433626caa95a565c3d2449fea6bcfa3f5f5/jiter-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:816ec9b60fdfd1fec87da1d7ed46c66c44ffec37ab2ef7de5b147b2fce3fd5ad", size = 314540 },
+ { url = "https://files.pythonhosted.org/packages/01/07/7bf6022c5a152fca767cf5c086bb41f7c28f70cf33ad259d023b53c0b858/jiter-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b1d3086f8a3ee0194ecf2008cf81286a5c3e540d977fa038ff23576c023c0ea", size = 321065 },
+ { url = "https://files.pythonhosted.org/packages/6c/b2/de3f3446ecba7c48f317568e111cc112613da36c7b29a6de45a1df365556/jiter-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1339f839b91ae30b37c409bf16ccd3dc453e8b8c3ed4bd1d6a567193651a4a51", size = 341664 },
+ { url = "https://files.pythonhosted.org/packages/13/cf/6485a4012af5d407689c91296105fcdb080a3538e0658d2abf679619c72f/jiter-0.9.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ffba79584b3b670fefae66ceb3a28822365d25b7bf811e030609a3d5b876f538", size = 364635 },
+ { url = "https://files.pythonhosted.org/packages/0d/f7/4a491c568f005553240b486f8e05c82547340572d5018ef79414b4449327/jiter-0.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cfc7d0a8e899089d11f065e289cb5b2daf3d82fbe028f49b20d7b809193958d", size = 406288 },
+ { url = "https://files.pythonhosted.org/packages/d3/ca/f4263ecbce7f5e6bded8f52a9f1a66540b270c300b5c9f5353d163f9ac61/jiter-0.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e00a1a2bbfaaf237e13c3d1592356eab3e9015d7efd59359ac8b51eb56390a12", size = 397499 },
+ { url = "https://files.pythonhosted.org/packages/ac/a2/522039e522a10bac2f2194f50e183a49a360d5f63ebf46f6d890ef8aa3f9/jiter-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1d9870561eb26b11448854dce0ff27a9a27cb616b632468cafc938de25e9e51", size = 352926 },
+ { url = "https://files.pythonhosted.org/packages/b1/67/306a5c5abc82f2e32bd47333a1c9799499c1c3a415f8dde19dbf876f00cb/jiter-0.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9872aeff3f21e437651df378cb75aeb7043e5297261222b6441a620218b58708", size = 384506 },
+ { url = "https://files.pythonhosted.org/packages/0f/89/c12fe7b65a4fb74f6c0d7b5119576f1f16c79fc2953641f31b288fad8a04/jiter-0.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1fd19112d1049bdd47f17bfbb44a2c0001061312dcf0e72765bfa8abd4aa30e5", size = 520621 },
+ { url = "https://files.pythonhosted.org/packages/c4/2b/d57900c5c06e6273fbaa76a19efa74dbc6e70c7427ab421bf0095dfe5d4a/jiter-0.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6ef5da104664e526836070e4a23b5f68dec1cc673b60bf1edb1bfbe8a55d0678", size = 512613 },
+ { url = "https://files.pythonhosted.org/packages/89/05/d8b90bfb21e58097d5a4e0224f2940568366f68488a079ae77d4b2653500/jiter-0.9.0-cp310-cp310-win32.whl", hash = "sha256:cb12e6d65ebbefe5518de819f3eda53b73187b7089040b2d17f5b39001ff31c4", size = 206613 },
+ { url = "https://files.pythonhosted.org/packages/2c/1d/5767f23f88e4f885090d74bbd2755518050a63040c0f59aa059947035711/jiter-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:c43ca669493626d8672be3b645dbb406ef25af3f4b6384cfd306da7eb2e70322", size = 208371 },
+ { url = "https://files.pythonhosted.org/packages/23/44/e241a043f114299254e44d7e777ead311da400517f179665e59611ab0ee4/jiter-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6c4d99c71508912a7e556d631768dcdef43648a93660670986916b297f1c54af", size = 314654 },
+ { url = "https://files.pythonhosted.org/packages/fb/1b/a7e5e42db9fa262baaa9489d8d14ca93f8663e7f164ed5e9acc9f467fc00/jiter-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f60fb8ce7df529812bf6c625635a19d27f30806885139e367af93f6e734ef58", size = 320909 },
+ { url = "https://files.pythonhosted.org/packages/60/bf/8ebdfce77bc04b81abf2ea316e9c03b4a866a7d739cf355eae4d6fd9f6fe/jiter-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51c4e1a4f8ea84d98b7b98912aa4290ac3d1eabfde8e3c34541fae30e9d1f08b", size = 341733 },
+ { url = "https://files.pythonhosted.org/packages/a8/4e/754ebce77cff9ab34d1d0fa0fe98f5d42590fd33622509a3ba6ec37ff466/jiter-0.9.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f4c677c424dc76684fea3e7285a7a2a7493424bea89ac441045e6a1fb1d7b3b", size = 365097 },
+ { url = "https://files.pythonhosted.org/packages/32/2c/6019587e6f5844c612ae18ca892f4cd7b3d8bbf49461ed29e384a0f13d98/jiter-0.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2221176dfec87f3470b21e6abca056e6b04ce9bff72315cb0b243ca9e835a4b5", size = 406603 },
+ { url = "https://files.pythonhosted.org/packages/da/e9/c9e6546c817ab75a1a7dab6dcc698e62e375e1017113e8e983fccbd56115/jiter-0.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c7adb66f899ffa25e3c92bfcb593391ee1947dbdd6a9a970e0d7e713237d572", size = 396625 },
+ { url = "https://files.pythonhosted.org/packages/be/bd/976b458add04271ebb5a255e992bd008546ea04bb4dcadc042a16279b4b4/jiter-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98d27330fdfb77913c1097a7aab07f38ff2259048949f499c9901700789ac15", size = 351832 },
+ { url = "https://files.pythonhosted.org/packages/07/51/fe59e307aaebec9265dbad44d9d4381d030947e47b0f23531579b9a7c2df/jiter-0.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eda3f8cc74df66892b1d06b5d41a71670c22d95a1ca2cbab73654745ce9d0419", size = 384590 },
+ { url = "https://files.pythonhosted.org/packages/db/55/5dcd2693794d8e6f4889389ff66ef3be557a77f8aeeca8973a97a7c00557/jiter-0.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dd5ab5ddc11418dce28343123644a100f487eaccf1de27a459ab36d6cca31043", size = 520690 },
+ { url = "https://files.pythonhosted.org/packages/54/d5/9f51dc90985e9eb251fbbb747ab2b13b26601f16c595a7b8baba964043bd/jiter-0.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42f8a68a69f047b310319ef8e2f52fdb2e7976fb3313ef27df495cf77bcad965", size = 512649 },
+ { url = "https://files.pythonhosted.org/packages/a6/e5/4e385945179bcf128fa10ad8dca9053d717cbe09e258110e39045c881fe5/jiter-0.9.0-cp311-cp311-win32.whl", hash = "sha256:a25519efb78a42254d59326ee417d6f5161b06f5da827d94cf521fed961b1ff2", size = 206920 },
+ { url = "https://files.pythonhosted.org/packages/4c/47/5e0b94c603d8e54dd1faab439b40b832c277d3b90743e7835879ab663757/jiter-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:923b54afdd697dfd00d368b7ccad008cccfeb1efb4e621f32860c75e9f25edbd", size = 210119 },
+ { url = "https://files.pythonhosted.org/packages/af/d7/c55086103d6f29b694ec79156242304adf521577530d9031317ce5338c59/jiter-0.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7b46249cfd6c48da28f89eb0be3f52d6fdb40ab88e2c66804f546674e539ec11", size = 309203 },
+ { url = "https://files.pythonhosted.org/packages/b0/01/f775dfee50beb420adfd6baf58d1c4d437de41c9b666ddf127c065e5a488/jiter-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:609cf3c78852f1189894383cf0b0b977665f54cb38788e3e6b941fa6d982c00e", size = 319678 },
+ { url = "https://files.pythonhosted.org/packages/ab/b8/09b73a793714726893e5d46d5c534a63709261af3d24444ad07885ce87cb/jiter-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d726a3890a54561e55a9c5faea1f7655eda7f105bd165067575ace6e65f80bb2", size = 341816 },
+ { url = "https://files.pythonhosted.org/packages/35/6f/b8f89ec5398b2b0d344257138182cc090302854ed63ed9c9051e9c673441/jiter-0.9.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e89dc075c1fef8fa9be219e249f14040270dbc507df4215c324a1839522ea75", size = 364152 },
+ { url = "https://files.pythonhosted.org/packages/9b/ca/978cc3183113b8e4484cc7e210a9ad3c6614396e7abd5407ea8aa1458eef/jiter-0.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e8ffa3c353b1bc4134f96f167a2082494351e42888dfcf06e944f2729cbe1d", size = 406991 },
+ { url = "https://files.pythonhosted.org/packages/13/3a/72861883e11a36d6aa314b4922125f6ae90bdccc225cd96d24cc78a66385/jiter-0.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:203f28a72a05ae0e129b3ed1f75f56bc419d5f91dfacd057519a8bd137b00c42", size = 395824 },
+ { url = "https://files.pythonhosted.org/packages/87/67/22728a86ef53589c3720225778f7c5fdb617080e3deaed58b04789418212/jiter-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fca1a02ad60ec30bb230f65bc01f611c8608b02d269f998bc29cca8619a919dc", size = 351318 },
+ { url = "https://files.pythonhosted.org/packages/69/b9/f39728e2e2007276806d7a6609cda7fac44ffa28ca0d02c49a4f397cc0d9/jiter-0.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:237e5cee4d5d2659aaf91bbf8ec45052cc217d9446070699441a91b386ae27dc", size = 384591 },
+ { url = "https://files.pythonhosted.org/packages/eb/8f/8a708bc7fd87b8a5d861f1c118a995eccbe6d672fe10c9753e67362d0dd0/jiter-0.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:528b6b71745e7326eed73c53d4aa57e2a522242320b6f7d65b9c5af83cf49b6e", size = 520746 },
+ { url = "https://files.pythonhosted.org/packages/95/1e/65680c7488bd2365dbd2980adaf63c562d3d41d3faac192ebc7ef5b4ae25/jiter-0.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9f48e86b57bc711eb5acdfd12b6cb580a59cc9a993f6e7dcb6d8b50522dcd50d", size = 512754 },
+ { url = "https://files.pythonhosted.org/packages/78/f3/fdc43547a9ee6e93c837685da704fb6da7dba311fc022e2766d5277dfde5/jiter-0.9.0-cp312-cp312-win32.whl", hash = "sha256:699edfde481e191d81f9cf6d2211debbfe4bd92f06410e7637dffb8dd5dfde06", size = 207075 },
+ { url = "https://files.pythonhosted.org/packages/cd/9d/742b289016d155f49028fe1bfbeb935c9bf0ffeefdf77daf4a63a42bb72b/jiter-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:099500d07b43f61d8bd780466d429c45a7b25411b334c60ca875fa775f68ccb0", size = 207999 },
+ { url = "https://files.pythonhosted.org/packages/e7/1b/4cd165c362e8f2f520fdb43245e2b414f42a255921248b4f8b9c8d871ff1/jiter-0.9.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2764891d3f3e8b18dce2cff24949153ee30c9239da7c00f032511091ba688ff7", size = 308197 },
+ { url = "https://files.pythonhosted.org/packages/13/aa/7a890dfe29c84c9a82064a9fe36079c7c0309c91b70c380dc138f9bea44a/jiter-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:387b22fbfd7a62418d5212b4638026d01723761c75c1c8232a8b8c37c2f1003b", size = 318160 },
+ { url = "https://files.pythonhosted.org/packages/6a/38/5888b43fc01102f733f085673c4f0be5a298f69808ec63de55051754e390/jiter-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d8da8629ccae3606c61d9184970423655fb4e33d03330bcdfe52d234d32f69", size = 341259 },
+ { url = "https://files.pythonhosted.org/packages/3d/5e/bbdbb63305bcc01006de683b6228cd061458b9b7bb9b8d9bc348a58e5dc2/jiter-0.9.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1be73d8982bdc278b7b9377426a4b44ceb5c7952073dd7488e4ae96b88e1103", size = 363730 },
+ { url = "https://files.pythonhosted.org/packages/75/85/53a3edc616992fe4af6814c25f91ee3b1e22f7678e979b6ea82d3bc0667e/jiter-0.9.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2228eaaaa111ec54b9e89f7481bffb3972e9059301a878d085b2b449fbbde635", size = 405126 },
+ { url = "https://files.pythonhosted.org/packages/ae/b3/1ee26b12b2693bd3f0b71d3188e4e5d817b12e3c630a09e099e0a89e28fa/jiter-0.9.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11509bfecbc319459647d4ac3fd391d26fdf530dad00c13c4dadabf5b81f01a4", size = 393668 },
+ { url = "https://files.pythonhosted.org/packages/11/87/e084ce261950c1861773ab534d49127d1517b629478304d328493f980791/jiter-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f22238da568be8bbd8e0650e12feeb2cfea15eda4f9fc271d3b362a4fa0604d", size = 352350 },
+ { url = "https://files.pythonhosted.org/packages/f0/06/7dca84b04987e9df563610aa0bc154ea176e50358af532ab40ffb87434df/jiter-0.9.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17f5d55eb856597607562257c8e36c42bc87f16bef52ef7129b7da11afc779f3", size = 384204 },
+ { url = "https://files.pythonhosted.org/packages/16/2f/82e1c6020db72f397dd070eec0c85ebc4df7c88967bc86d3ce9864148f28/jiter-0.9.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6a99bed9fbb02f5bed416d137944419a69aa4c423e44189bc49718859ea83bc5", size = 520322 },
+ { url = "https://files.pythonhosted.org/packages/36/fd/4f0cd3abe83ce208991ca61e7e5df915aa35b67f1c0633eb7cf2f2e88ec7/jiter-0.9.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e057adb0cd1bd39606100be0eafe742de2de88c79df632955b9ab53a086b3c8d", size = 512184 },
+ { url = "https://files.pythonhosted.org/packages/a0/3c/8a56f6d547731a0b4410a2d9d16bf39c861046f91f57c98f7cab3d2aa9ce/jiter-0.9.0-cp313-cp313-win32.whl", hash = "sha256:f7e6850991f3940f62d387ccfa54d1a92bd4bb9f89690b53aea36b4364bcab53", size = 206504 },
+ { url = "https://files.pythonhosted.org/packages/f4/1c/0c996fd90639acda75ed7fa698ee5fd7d80243057185dc2f63d4c1c9f6b9/jiter-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:c8ae3bf27cd1ac5e6e8b7a27487bf3ab5f82318211ec2e1346a5b058756361f7", size = 204943 },
+ { url = "https://files.pythonhosted.org/packages/78/0f/77a63ca7aa5fed9a1b9135af57e190d905bcd3702b36aca46a01090d39ad/jiter-0.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0b2827fb88dda2cbecbbc3e596ef08d69bda06c6f57930aec8e79505dc17001", size = 317281 },
+ { url = "https://files.pythonhosted.org/packages/f9/39/a3a1571712c2bf6ec4c657f0d66da114a63a2e32b7e4eb8e0b83295ee034/jiter-0.9.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062b756ceb1d40b0b28f326cba26cfd575a4918415b036464a52f08632731e5a", size = 350273 },
+ { url = "https://files.pythonhosted.org/packages/ee/47/3729f00f35a696e68da15d64eb9283c330e776f3b5789bac7f2c0c4df209/jiter-0.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6f7838bc467ab7e8ef9f387bd6de195c43bad82a569c1699cb822f6609dd4cdf", size = 206867 },
+]
+
[[package]]
name = "lxml"
version = "5.3.0"
@@ -790,6 +863,25 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 },
]
+[[package]]
+name = "openai"
+version = "1.67.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "distro" },
+ { name = "httpx" },
+ { name = "jiter" },
+ { name = "pydantic" },
+ { name = "sniffio" },
+ { name = "tqdm" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a7/63/6fd027fa4cb7c3b6bee4c3150f44803b3a7e4335f0b6e49e83a0c51c321b/openai-1.67.0.tar.gz", hash = "sha256:3b386a866396daa4bf80e05a891c50a7746ecd7863b8a27423b62136e3b8f6bc", size = 403596 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/42/de/b42ddabe211411645105ae99ad93f4f3984f53be7ced2ad441378c27f62e/openai-1.67.0-py3-none-any.whl", hash = "sha256:dbbb144f38739fc0e1d951bc67864647fca0b9ffa05aef6b70eeea9f71d79663", size = 580168 },
+]
+
[[package]]
name = "packaging"
version = "24.1"
@@ -1313,6 +1405,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757 },
]
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 },
+]
+
[[package]]
name = "typer"
version = "0.12.5"
diff --git a/deployment.md b/deployment.md
index eadf76dda..b75a42137 100644
--- a/deployment.md
+++ b/deployment.md
@@ -1,4 +1,4 @@
-# FastAPI Project - Deployment
+# AI Platform - Deployment
You can deploy the project using Docker Compose to a remote server.
diff --git a/development.md b/development.md
index d7d41d73f..412c31927 100644
--- a/development.md
+++ b/development.md
@@ -1,4 +1,4 @@
-# FastAPI Project - Development
+# AI Platform - Development
## Docker Compose
diff --git a/docker-compose.yml b/docker-compose.yml
index c92d5d445..64cf88678 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,7 +1,7 @@
services:
db:
- image: postgres:12
+ image: postgres:16
restart: always
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
@@ -168,4 +168,4 @@ volumes:
networks:
traefik-public:
# Allow setting it to false for testing
- external: true
+ external: true
\ No newline at end of file