diff --git a/apps/api/plane/api/serializers/__init__.py b/apps/api/plane/api/serializers/__init__.py index 44e527a2dc5..8af82ce3188 100644 --- a/apps/api/plane/api/serializers/__init__.py +++ b/apps/api/plane/api/serializers/__init__.py @@ -60,3 +60,8 @@ from .invite import WorkspaceInviteSerializer from .member import ProjectMemberSerializer from .sticky import StickySerializer +from .page import ( + PageSerializer, + PageCreateSerializer, + PageUpdateSerializer, +) diff --git a/apps/api/plane/api/serializers/page.py b/apps/api/plane/api/serializers/page.py new file mode 100644 index 00000000000..b5b41a5db60 --- /dev/null +++ b/apps/api/plane/api/serializers/page.py @@ -0,0 +1,73 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from plane.db.models import Page + + +class PageCreateSerializer(BaseSerializer): + """Serializer for creating pages via the v1 API.""" + + class Meta: + model = Page + fields = [ + "name", + "description_html", + "color", + "access", + "parent", + "external_source", + "external_id", + ] + read_only_fields = [ + "id", + "workspace", + "owned_by", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + ] + + +class PageUpdateSerializer(PageCreateSerializer): + """ + Serializer for updating pages via the v1 API. + + Extends PageCreateSerializer for partial update support. + """ + + class Meta(PageCreateSerializer.Meta): + pass + + +class PageSerializer(BaseSerializer): + """ + Full read serializer for pages in the v1 API. + + Returns all page fields including description_html, lock status, + archive state, and associated label/project IDs. + """ + + label_ids = serializers.ListField(child=serializers.UUIDField(), read_only=True) + project_ids = serializers.ListField(child=serializers.UUIDField(), read_only=True) + + class Meta: + model = Page + fields = "__all__" + read_only_fields = [ + "id", + "workspace", + "owned_by", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + ] diff --git a/apps/api/plane/api/urls/__init__.py b/apps/api/plane/api/urls/__init__.py index 4a202431bc7..fc98a9454ab 100644 --- a/apps/api/plane/api/urls/__init__.py +++ b/apps/api/plane/api/urls/__init__.py @@ -14,6 +14,7 @@ from .work_item import urlpatterns as work_item_patterns from .invite import urlpatterns as invite_patterns from .sticky import urlpatterns as sticky_patterns +from .page import urlpatterns as page_patterns urlpatterns = [ *asset_patterns, @@ -22,6 +23,7 @@ *label_patterns, *member_patterns, *module_patterns, + *page_patterns, *project_patterns, *state_patterns, *user_patterns, diff --git a/apps/api/plane/api/urls/page.py b/apps/api/plane/api/urls/page.py new file mode 100644 index 00000000000..3ef74e569f6 --- /dev/null +++ b/apps/api/plane/api/urls/page.py @@ -0,0 +1,39 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +from django.urls import path + +from plane.api.views.page import ( + PageListCreateAPIEndpoint, + PageDetailAPIEndpoint, + PageArchiveUnarchiveAPIEndpoint, +) + +urlpatterns = [ + path( + "workspaces//projects//pages/", + PageListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), + name="pages", + ), + path( + "workspaces//projects//pages//", + PageDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), + name="pages", + ), + path( + "workspaces//projects//pages//archive/", + PageArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]), + name="page-archive", + ), + path( + "workspaces//projects//archived-pages/", + PageArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]), + name="page-archive-list", + ), + path( + "workspaces//projects//archived-pages//unarchive/", + PageArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]), + name="page-unarchive", + ), +] diff --git a/apps/api/plane/api/views/__init__.py b/apps/api/plane/api/views/__init__.py index 305ebfdb39a..7739ebb5bba 100644 --- a/apps/api/plane/api/views/__init__.py +++ b/apps/api/plane/api/views/__init__.py @@ -61,3 +61,9 @@ from .invite import WorkspaceInvitationsViewset from .sticky import StickyViewSet + +from .page import ( + PageListCreateAPIEndpoint, + PageDetailAPIEndpoint, + PageArchiveUnarchiveAPIEndpoint, +) diff --git a/apps/api/plane/api/views/page.py b/apps/api/plane/api/views/page.py new file mode 100644 index 00000000000..77ae5f617a6 --- /dev/null +++ b/apps/api/plane/api/views/page.py @@ -0,0 +1,603 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +# Django imports +from django.db.models import ( + Q, + Value, + UUIDField, +) +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models.functions import Coalesce +from django.db import transaction +from django.utils import timezone + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.api.serializers import ( + PageSerializer, + PageCreateSerializer, + PageUpdateSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Page, + ProjectPage, + ProjectMember, + UserFavorite, + UserRecentVisit, + Project, +) +from plane.db.models.project import ROLE +from .base import BaseAPIView +from plane.app.views.page.base import unarchive_archive_page_and_descendants +from plane.bgtasks.page_transaction_task import page_transaction +from plane.utils.openapi import ( + page_docs, + CONFLICT_RESPONSE, +) + + +class PageListCreateAPIEndpoint(BaseAPIView): + """Page List and Create Endpoint""" + + serializer_class = PageSerializer + model = Page + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + Page.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter( + projects__id=self.kwargs.get("project_id"), + project_pages__deleted_at__isnull=True, + ) + .filter( + projects__project_projectmember__member=self.request.user, + projects__project_projectmember__is_active=True, + ) + .filter(Q(owned_by=self.request.user) | Q(access=0)) + .filter(archived_at__isnull=True) + .select_related("workspace", "owned_by") + .prefetch_related("projects", "labels") + .annotate( + label_ids=Coalesce( + ArrayAgg( + "page_labels__label_id", + distinct=True, + filter=~Q(page_labels__label_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + project_ids=Coalesce( + ArrayAgg( + "projects__id", + distinct=True, + filter=~Q(projects__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .order_by("-created_at") + .distinct() + ) + + @page_docs( + operation_id="list_pages", + summary="List pages", + description="Retrieve all non-archived pages in a project that the user has access to.", + ) + def get(self, request, slug, project_id): + """List pages + + Retrieve all non-archived pages in a project that the user has access to. + """ + return self.paginate( + request=request, + queryset=self.get_queryset(), + on_results=lambda pages: ( + PageSerializer( + pages, + many=True, + fields=self.fields, + expand=self.expand, + ).data + ), + ) + + @page_docs( + operation_id="create_page", + summary="Create page", + description="Create a new page within a project.", + responses={201: PageSerializer, 409: CONFLICT_RESPONSE}, + ) + def post(self, request, slug, project_id): + """Create page + + Create a new page within a project. Creates the Page record and + associates it with the project via ProjectPage. + """ + serializer = PageCreateSerializer(data=request.data) + if serializer.is_valid(): + # Validate parent belongs to the same project and is not a cycle + parent = serializer.validated_data.get("parent") + if parent and not Page.objects.filter( + pk=parent.id, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ).exists(): + return Response( + {"error": "Parent page does not belong to this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + with transaction.atomic(): + project = Project.objects.select_for_update().get( + pk=project_id, workspace__slug=slug, + ) + + # Check for duplicate external_id + external_source + if request.data.get("external_id") and request.data.get("external_source"): + existing = Page.objects.filter( + workspace__slug=slug, + projects__id=project_id, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).first() + if existing: + return Response( + { + "error": "Page with the same external id and external source already exists", + "id": str(existing.id), + }, + status=status.HTTP_409_CONFLICT, + ) + + page = serializer.save( + workspace_id=project.workspace_id, + owned_by=request.user, + ) + + # Create the project-page association + ProjectPage.objects.create( + workspace_id=project.workspace_id, + project_id=project_id, + page_id=page.id, + ) + + description_html = request.data.get("description_html", "

") + page_id = page.id + transaction.on_commit( + lambda: page_transaction.delay( + new_description_html=description_html, + old_description_html=None, + page_id=page_id, + ) + ) + + # Re-fetch with annotations for the response + page = self.get_queryset().get(pk=page.id) + return Response( + PageSerializer(page).data, + status=status.HTTP_201_CREATED, + ) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class PageDetailAPIEndpoint(BaseAPIView): + """Page Detail Endpoint — retrieve, update, delete""" + + serializer_class = PageSerializer + model = Page + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + Page.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter( + projects__id=self.kwargs.get("project_id"), + project_pages__deleted_at__isnull=True, + ) + .filter( + projects__project_projectmember__member=self.request.user, + projects__project_projectmember__is_active=True, + ) + .filter(Q(owned_by=self.request.user) | Q(access=0)) + .select_related("workspace", "owned_by") + .prefetch_related("projects", "labels") + .annotate( + label_ids=Coalesce( + ArrayAgg( + "page_labels__label_id", + distinct=True, + filter=~Q(page_labels__label_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + project_ids=Coalesce( + ArrayAgg( + "projects__id", + distinct=True, + filter=~Q(projects__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .distinct() + ) + + @page_docs( + operation_id="get_page", + summary="Get page", + description="Retrieve a single page with full details.", + ) + def get(self, request, slug, project_id, pk): + """Retrieve page + + Get a single page with full details including description_html. + """ + page = self.get_queryset().get(pk=pk) + return Response( + PageSerializer(page, fields=self.fields, expand=self.expand).data, + status=status.HTTP_200_OK, + ) + + @page_docs( + operation_id="update_page", + summary="Update page", + description="Update page properties. Locked pages cannot be updated.", + responses={409: CONFLICT_RESPONSE}, + ) + def patch(self, request, slug, project_id, pk): + """Update page + + Update page properties. Locked pages cannot be updated. + Only the page owner can change the access level. + """ + page = Page.objects.get( + Q(owned_by=request.user) | Q(access=0), + pk=pk, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + if page.is_locked: + return Response( + {"error": "Page is locked"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if page.archived_at is not None: + return Response( + {"error": "Cannot update an archived page"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Only the owner can change access + if page.access != request.data.get("access", page.access) and page.owned_by_id != request.user.id: + return Response( + {"error": "Access cannot be updated since this page is owned by someone else"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + page_description = page.description_html + + serializer = PageUpdateSerializer(page, data=request.data, partial=True) + if serializer.is_valid(): + # Validate parent exists in the same project and is not a cycle + parent = serializer.validated_data.get("parent") + if parent is not None: + if not Page.objects.filter( + pk=parent.id, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ).exists(): + return Response( + {"error": "Parent page does not belong to this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + # Block hierarchy cycles + if parent.id == pk: + return Response( + {"error": "A page cannot be its own parent"}, + status=status.HTTP_400_BAD_REQUEST, + ) + ancestor = parent + while ancestor.parent_id: + if ancestor.parent_id == pk: + return Response( + {"error": "Setting this parent would create a cycle"}, + status=status.HTTP_400_BAD_REQUEST, + ) + ancestor = ancestor.parent + + with transaction.atomic(): + # Check for external_id/external_source conflicts when either changes + new_external_id = request.data.get("external_id", page.external_id) + new_external_source = request.data.get("external_source", page.external_source) + if ( + new_external_id + and new_external_source + and ( + new_external_id != page.external_id + or new_external_source != page.external_source + ) + ): + existing = Page.objects.filter( + workspace__slug=slug, + projects__id=project_id, + external_source=new_external_source, + external_id=new_external_id, + ).exclude(pk=pk).first() + if existing: + return Response( + { + "error": "Page with the same external id and external source already exists", + "id": str(existing.id), + }, + status=status.HTTP_409_CONFLICT, + ) + + serializer.save() + + # Fire page transaction on description change + if "description_html" in request.data: + desc_html = request.data.get("description_html", "

") + old_desc = page_description + page_pk = pk + transaction.on_commit( + lambda: page_transaction.delay( + new_description_html=desc_html, + old_description_html=old_desc, + page_id=page_pk, + ) + ) + + # Re-fetch with annotations + page = self.get_queryset().get(pk=pk) + return Response( + PageSerializer(page).data, + status=status.HTTP_200_OK, + ) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + @page_docs( + operation_id="delete_page", + summary="Delete page", + description="Permanently delete a page. The page must be archived first.", + ) + def delete(self, request, slug, project_id, pk): + """Delete page + + Permanently delete a page. The page must be archived first. + Only the owner or a project admin can delete. + """ + page = Page.objects.get( + pk=pk, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + if page.archived_at is None: + return Response( + {"error": "The page should be archived before deleting"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if page.owned_by_id != request.user.id and ( + not ProjectMember.objects.filter( + workspace__slug=slug, + member=request.user, + role=ROLE.ADMIN.value, + project_id=project_id, + is_active=True, + ).exists() + ): + return Response( + {"error": "Only admin or owner can delete the page"}, + status=status.HTTP_403_FORBIDDEN, + ) + + # Remove parent from all children + Page.objects.filter( + parent_id=pk, + projects__id=project_id, + workspace__slug=slug, + project_pages__deleted_at__isnull=True, + ).update(parent=None) + + page.delete() + + # Clean up favorites + UserFavorite.objects.filter( + project=project_id, + workspace__slug=slug, + entity_identifier=pk, + entity_type="page", + ).delete() + + # Clean up recent visits + UserRecentVisit.objects.filter( + project_id=project_id, + workspace__slug=slug, + entity_identifier=pk, + entity_name="page", + ).delete(soft=False) + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class PageArchiveUnarchiveAPIEndpoint(BaseAPIView): + """Page Archive and Unarchive Endpoint""" + + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + Page.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter( + projects__id=self.kwargs.get("project_id"), + project_pages__deleted_at__isnull=True, + ) + .filter( + projects__project_projectmember__member=self.request.user, + projects__project_projectmember__is_active=True, + ) + .filter(Q(owned_by=self.request.user) | Q(access=0)) + .filter(archived_at__isnull=False) + .select_related("workspace", "owned_by") + .prefetch_related("projects", "labels") + .annotate( + label_ids=Coalesce( + ArrayAgg( + "page_labels__label_id", + distinct=True, + filter=~Q(page_labels__label_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + project_ids=Coalesce( + ArrayAgg( + "projects__id", + distinct=True, + filter=~Q(projects__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .order_by("-created_at") + .distinct() + ) + + @page_docs( + operation_id="list_archived_pages", + summary="List archived pages", + description="Retrieve all pages that have been archived in the project.", + ) + def get(self, request, slug, project_id): + """List archived pages + + Retrieve all pages that have been archived in the project. + """ + return self.paginate( + request=request, + queryset=self.get_queryset(), + on_results=lambda pages: ( + PageSerializer( + pages, + many=True, + fields=self.fields, + expand=self.expand, + ).data + ), + ) + + @page_docs( + operation_id="archive_page", + summary="Archive page", + description="Move a page and its descendants to archived status.", + ) + def post(self, request, slug, project_id, page_id): + """Archive page + + Move a page and its descendants to archived status. + Only the page owner or a project admin can archive. + """ + page = Page.objects.get( + pk=page_id, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + # Only the owner or admin can archive + if ( + ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + role__lte=ROLE.MEMBER.value, + ).exists() + and request.user.id != page.owned_by_id + ): + return Response( + {"error": "Only the owner or admin can archive the page"}, + status=status.HTTP_403_FORBIDDEN, + ) + + UserFavorite.objects.filter( + entity_type="page", + entity_identifier=page_id, + project_id=project_id, + workspace__slug=slug, + ).delete() + + today = timezone.now().date() + unarchive_archive_page_and_descendants(page_id, today) + + return Response( + {"archived_at": str(today)}, + status=status.HTTP_200_OK, + ) + + @page_docs( + operation_id="unarchive_page", + summary="Unarchive page", + description="Restore an archived page and its descendants to active status.", + ) + def delete(self, request, slug, project_id, page_id): + """Unarchive page + + Restore an archived page and its descendants to active status. + Only the page owner or a project admin can unarchive. + """ + page = Page.objects.get( + pk=page_id, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + if page.archived_at is None: + return Response( + {"error": "Page is not archived"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Only the owner or admin can unarchive + if ( + ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + role__lte=ROLE.MEMBER.value, + ).exists() + and request.user.id != page.owned_by_id + ): + return Response( + {"error": "Only the owner or admin can unarchive the page"}, + status=status.HTTP_403_FORBIDDEN, + ) + + # If parent was deleted or is still archived, break the hierarchy + if page.parent_id: + parent = Page.objects.filter(pk=page.parent_id).first() + if not parent or parent.archived_at: + page.parent = None + page.save(update_fields=["parent"]) + + unarchive_archive_page_and_descendants(page_id, None) + + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apps/api/plane/settings/openapi.py b/apps/api/plane/settings/openapi.py index a1961a0c582..58fc567836f 100644 --- a/apps/api/plane/settings/openapi.py +++ b/apps/api/plane/settings/openapi.py @@ -141,6 +141,21 @@ "*Use Cases:* Feature planning, release organization, progress tracking, team coordination." ), }, + # Knowledge Management + { + "name": "Pages", + "description": ( + "**Pages & Documentation**\n\n" + "Create and manage pages for project documentation, notes, and knowledge sharing. " + "Organize pages in hierarchies with parent-child relationships.\n\n" + "*Key Features:*\n" + "- Create, update, and delete pages\n" + "- Archive and unarchive pages with descendants\n" + "- Control page access (public or private)\n" + "- Organize pages in parent-child hierarchies\n\n" + "*Use Cases:* Project documentation, meeting notes, knowledge bases, team wikis." + ), + }, # Core Project Management { "name": "Projects", diff --git a/apps/api/plane/tests/contract/api/test_pages.py b/apps/api/plane/tests/contract/api/test_pages.py new file mode 100644 index 00000000000..31fb125ebb9 --- /dev/null +++ b/apps/api/plane/tests/contract/api/test_pages.py @@ -0,0 +1,894 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +import pytest +from unittest.mock import MagicMock, patch +from rest_framework import status +from uuid import uuid4 +from django.utils import timezone + +from plane.db.models import Page, ProjectPage, Project, ProjectMember + + +@pytest.fixture +def project(db, workspace, create_user): + """Create a test project with the user as a member""" + project = Project.objects.create( + name="Test Project", + identifier="TP", + workspace=workspace, + created_by=create_user, + ) + ProjectMember.objects.create( + project=project, + member=create_user, + role=20, # Admin role + is_active=True, + ) + return project + + +@pytest.fixture +def page_data(): + """Sample page data for tests""" + return { + "name": "Test Page", + "description_html": "

Test page content

", + } + + +@pytest.fixture(autouse=True) +def _mock_celery(): + """Prevent all celery tasks from hitting a broker""" + with patch("celery.app.task.Task.delay", return_value=MagicMock(id="mock-task-id")): + yield + + +@pytest.fixture +def create_page(db, project, create_user): + """Create a test page with project association""" + page = Page.objects.create( + name="Existing Page", + description_html="

Existing content

", + workspace=project.workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=project.workspace, + project=project, + page=page, + ) + return page + + +@pytest.mark.contract +class TestPageListCreateAPIEndpoint: + """Test Page List and Create API Endpoint""" + + def get_page_url(self, workspace_slug, project_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/pages/" + + @pytest.mark.django_db + def test_create_page_success(self, api_key_client, workspace, project, page_data): + """Test successful page creation""" + url = self.get_page_url(workspace.slug, project.id) + + response = api_key_client.post(url, page_data, format="json") + + assert response.status_code == status.HTTP_201_CREATED + assert Page.objects.count() == 1 + + created_page = Page.objects.first() + assert created_page.name == page_data["name"] + assert created_page.description_html == page_data["description_html"] + assert created_page.owned_by is not None + + # Verify ProjectPage association was created + assert ProjectPage.objects.filter(page=created_page, project=project).exists() + + @pytest.mark.django_db + def test_create_page_with_external_id(self, api_key_client, workspace, project): + """Test creating page with external ID""" + url = self.get_page_url(workspace.slug, project.id) + + data = { + "name": "External Page", + "external_id": "ext-123", + "external_source": "confluence", + } + + response = api_key_client.post(url, data, format="json") + + assert response.status_code == status.HTTP_201_CREATED + created_page = Page.objects.first() + assert created_page.external_id == "ext-123" + assert created_page.external_source == "confluence" + + @pytest.mark.django_db + def test_create_page_duplicate_external_id(self, api_key_client, workspace, project, create_user): + """Test creating page with duplicate external ID returns 409""" + url = self.get_page_url(workspace.slug, project.id) + + # Create first page with external ID + page = Page.objects.create( + name="First Page", + workspace=workspace, + owned_by=create_user, + external_id="ext-123", + external_source="confluence", + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Try to create second page with same external ID + data = { + "name": "Second Page", + "external_id": "ext-123", + "external_source": "confluence", + } + + response = api_key_client.post(url, data, format="json") + + assert response.status_code == status.HTTP_409_CONFLICT + assert "same external id" in response.data["error"] + + @pytest.mark.django_db + def test_create_page_parent_outside_project_rejected(self, api_key_client, workspace, project, create_user): + """Test creating page with parent from different project is rejected""" + # Create a second project + other_project = Project.objects.create( + name="Other Project", + identifier="OP", + workspace=workspace, + created_by=create_user, + ) + ProjectMember.objects.create( + project=other_project, + member=create_user, + role=20, + is_active=True, + ) + + # Create a page in the other project + other_page = Page.objects.create( + name="Other Project Page", + workspace=workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=workspace, + project=other_project, + page=other_page, + ) + + url = self.get_page_url(workspace.slug, project.id) + data = { + "name": "Child Page", + "parent": str(other_page.id), + } + + response = api_key_client.post(url, data, format="json") + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "parent" in response.data["error"].lower() + + @pytest.mark.django_db + def test_list_pages_success(self, api_key_client, workspace, project, create_page, create_user): + """Test successful page listing""" + url = self.get_page_url(workspace.slug, project.id) + + # Create additional pages + for i in range(2): + page = Page.objects.create( + name=f"Page {i + 2}", + workspace=workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert "results" in response.data + assert len(response.data["results"]) == 3 # Including create_page fixture + + @pytest.mark.django_db + def test_list_pages_excludes_archived(self, api_key_client, workspace, project, create_user): + """Test that archived pages are excluded from listing""" + url = self.get_page_url(workspace.slug, project.id) + + # Create a non-archived page + active_page = Page.objects.create( + name="Active Page", + workspace=workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=active_page, + ) + + # Create an archived page + archived_page = Page.objects.create( + name="Archived Page", + workspace=workspace, + owned_by=create_user, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=archived_page, + ) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + page_ids = [str(p["id"]) for p in response.data["results"]] + assert str(active_page.id) in page_ids + assert str(archived_page.id) not in page_ids + + @pytest.mark.django_db + def test_list_pages_excludes_private_pages_of_other_users(self, api_key_client, workspace, project, create_user): + """Test that private pages owned by other users are excluded""" + url = self.get_page_url(workspace.slug, project.id) + + from plane.db.models import User + + other_user = User.objects.create( + email="other@plane.so", + username=f"other_{uuid4().hex[:8]}", + first_name="Other", + last_name="User", + ) + + # Public page by other user -- should be visible + public_page = Page.objects.create( + name="Public Page", + workspace=workspace, + owned_by=other_user, + access=0, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=public_page, + ) + + # Private page by other user -- should be hidden + private_page = Page.objects.create( + name="Private Page", + workspace=workspace, + owned_by=other_user, + access=1, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=private_page, + ) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + page_ids = [str(p["id"]) for p in response.data["results"]] + assert str(public_page.id) in page_ids + assert str(private_page.id) not in page_ids + + +@pytest.mark.contract +class TestPageDetailAPIEndpoint: + """Test Page Detail API Endpoint""" + + def get_page_detail_url(self, workspace_slug, project_id, page_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/pages/{page_id}/" + + @pytest.mark.django_db + def test_get_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page retrieval""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert str(response.data["id"]) == str(create_page.id) + assert response.data["name"] == create_page.name + + @pytest.mark.django_db + def test_get_page_not_found(self, api_key_client, workspace, project): + """Test getting non-existent page""" + fake_id = uuid4() + url = self.get_page_detail_url(workspace.slug, project.id, fake_id) + + response = api_key_client.get(url) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.django_db + def test_update_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page update""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + update_data = { + "name": f"Updated Page {uuid4()}", + } + + response = api_key_client.patch(url, update_data, format="json") + + assert response.status_code == status.HTTP_200_OK + + create_page.refresh_from_db() + assert create_page.name == update_data["name"] + + @pytest.mark.django_db + def test_update_page_description(self, api_key_client, workspace, project, create_page): + """Test updating page description fires page_transaction""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + update_data = { + "description_html": "

Updated content

", + } + + response = api_key_client.patch(url, update_data, format="json") + + assert response.status_code == status.HTTP_200_OK + + create_page.refresh_from_db() + assert create_page.description_html == "

Updated content

" + + @pytest.mark.django_db + def test_update_locked_page(self, api_key_client, workspace, project, create_page): + """Test that locked pages cannot be updated""" + create_page.is_locked = True + create_page.save() + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.patch(url, {"name": "New Name"}, format="json") + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "locked" in response.data["error"].lower() + + @pytest.mark.django_db + def test_update_archived_page(self, api_key_client, workspace, project, create_page): + """Test that archived pages cannot be updated""" + create_page.archived_at = timezone.now().date() + create_page.save() + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.patch(url, {"name": "New Name"}, format="json") + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "archived" in response.data["error"].lower() + + @pytest.mark.django_db + def test_update_page_access_by_non_owner(self, api_key_client, workspace, project, create_user): + """Test that non-owners cannot change page access level""" + from plane.db.models import User + + other_user = User.objects.create( + email="owner@plane.so", + username=f"owner_{uuid4().hex[:8]}", + first_name="Owner", + last_name="User", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, # Public + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + url = self.get_page_detail_url(workspace.slug, project.id, page.id) + + response = api_key_client.patch(url, {"access": 1}, format="json") + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "owned by someone else" in response.data["error"] + + @pytest.mark.django_db + def test_update_page_external_id_conflict(self, api_key_client, workspace, project, create_page, create_user): + """Test updating page with conflicting external ID""" + # Create another page with an external ID + other_page = Page.objects.create( + name="Other Page", + workspace=workspace, + owned_by=create_user, + external_id="ext-456", + external_source="confluence", + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=other_page, + ) + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.patch( + url, + {"external_id": "ext-456", "external_source": "confluence"}, + format="json", + ) + + assert response.status_code == status.HTTP_409_CONFLICT + assert "same external id" in response.data["error"] + + @pytest.mark.django_db + def test_update_external_source_only_conflict(self, api_key_client, workspace, project, create_user): + """Test updating only external_source to create a conflict is detected""" + # Create page A with ext-100 / source-a + page_a = Page.objects.create( + name="Page A", + workspace=workspace, + owned_by=create_user, + external_id="ext-100", + external_source="source-a", + ) + ProjectPage.objects.create(workspace=workspace, project=project, page=page_a) + + # Create page B with ext-100 / source-b + page_b = Page.objects.create( + name="Page B", + workspace=workspace, + owned_by=create_user, + external_id="ext-100", + external_source="source-b", + ) + ProjectPage.objects.create(workspace=workspace, project=project, page=page_b) + + url = self.get_page_detail_url(workspace.slug, project.id, page_b.id) + + # Change only external_source to collide with page A + response = api_key_client.patch( + url, + {"external_source": "source-a"}, + format="json", + ) + + assert response.status_code == status.HTTP_409_CONFLICT + assert response.data["id"] == str(page_a.id) + + @pytest.mark.django_db + def test_patch_private_page_by_non_owner(self, api_key_client, workspace, project, create_user): + """Test that non-owners cannot patch another user's private page""" + from plane.db.models import User + + other_user = User.objects.create( + email="private_owner@plane.so", + username=f"priv_{uuid4().hex[:8]}", + first_name="Private", + last_name="Owner", + ) + + private_page = Page.objects.create( + name="Private Page", + workspace=workspace, + owned_by=other_user, + access=1, # Private + ) + ProjectPage.objects.create(workspace=workspace, project=project, page=private_page) + + url = self.get_page_detail_url(workspace.slug, project.id, private_page.id) + response = api_key_client.patch(url, {"name": "Hacked"}, format="json") + + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.django_db + def test_delete_archived_page_success(self, api_key_client, workspace, project, create_page): + """Test successful deletion of an archived page""" + create_page.archived_at = timezone.now().date() + create_page.save() + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + assert not Page.objects.filter(id=create_page.id).exists() + + @pytest.mark.django_db + def test_delete_non_archived_page(self, api_key_client, workspace, project, create_page): + """Test that non-archived pages cannot be deleted""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "archived before deleting" in response.data["error"] + + @pytest.mark.django_db + def test_delete_page_non_owner_non_admin(self, api_key_client, workspace, project, create_user): + """Test that non-owner non-admin cannot delete a page""" + from plane.db.models import User + + other_user = User.objects.create( + email="owner2@plane.so", + username=f"owner2_{uuid4().hex[:8]}", + first_name="Page", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Downgrade the API user to member (role=15) so they are not admin + ProjectMember.objects.filter(project=project, member=create_user).update(role=15) + + url = self.get_page_detail_url(workspace.slug, project.id, page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + assert "Only admin or owner" in response.data["error"] + + @pytest.mark.django_db + def test_delete_page_clears_children_parent(self, api_key_client, workspace, project, create_page, create_user): + """Test that deleting a page sets parent=None on its children""" + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + # Archive the parent so it can be deleted + create_page.archived_at = timezone.now().date() + create_page.save() + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + child_page.refresh_from_db() + assert child_page.parent is None + + +@pytest.mark.contract +class TestPageArchiveUnarchiveAPIEndpoint: + """Test Page Archive and Unarchive API Endpoint""" + + def get_archive_url(self, workspace_slug, project_id, page_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/pages/{page_id}/archive/" + + def get_unarchive_url(self, workspace_slug, project_id, page_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/archived-pages/{page_id}/unarchive/" + + def get_archived_list_url(self, workspace_slug, project_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/archived-pages/" + + @pytest.mark.django_db + def test_archive_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page archiving""" + url = self.get_archive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_200_OK + assert "archived_at" in response.data + + # Verify the response returns a date string (YYYY-MM-DD), not a datetime + import re + + assert re.fullmatch(r"\d{4}-\d{2}-\d{2}", response.data["archived_at"]) + + create_page.refresh_from_db() + assert create_page.archived_at is not None + # Verify the response matches what's stored in the DB + assert response.data["archived_at"] == str(create_page.archived_at) + + @pytest.mark.django_db + def test_archive_page_archives_descendants(self, api_key_client, workspace, project, create_page, create_user): + """Test that archiving a page also archives its descendants""" + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + url = self.get_archive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_200_OK + + child_page.refresh_from_db() + assert child_page.archived_at is not None + + @pytest.mark.django_db + def test_archive_page_non_owner_non_admin(self, api_key_client, workspace, project, create_user): + """Test that non-owner non-admin cannot archive a page""" + from plane.db.models import User + + other_user = User.objects.create( + email="archowner@plane.so", + username=f"archowner_{uuid4().hex[:8]}", + first_name="Arch", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Downgrade to member role + ProjectMember.objects.filter(project=project, member=create_user).update(role=15) + + url = self.get_archive_url(workspace.slug, project.id, page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + assert "owner or admin" in response.data["error"].lower() + + @pytest.mark.django_db + def test_archive_page_non_owner_admin_success(self, api_key_client, workspace, project, create_user): + """Test that a project admin can archive a page they don't own""" + from plane.db.models import User + + other_user = User.objects.create( + email="pageowner@plane.so", + username=f"pageowner_{uuid4().hex[:8]}", + first_name="Page", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # create_user is already admin (role=20) from the project fixture + url = self.get_archive_url(workspace.slug, project.id, page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_200_OK + page.refresh_from_db() + assert page.archived_at is not None + + @pytest.mark.django_db + def test_unarchive_page_non_owner_non_admin(self, api_key_client, workspace, project, create_user): + """Test that non-owner non-admin cannot unarchive a page""" + from plane.db.models import User + + other_user = User.objects.create( + email="unarchowner@plane.so", + username=f"unarchowner_{uuid4().hex[:8]}", + first_name="Unarch", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Downgrade to member role + ProjectMember.objects.filter(project=project, member=create_user).update(role=15) + + url = self.get_unarchive_url(workspace.slug, project.id, page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + assert "owner or admin" in response.data["error"].lower() + + @pytest.mark.django_db + def test_unarchive_page_non_owner_admin_success(self, api_key_client, workspace, project, create_user): + """Test that a project admin can unarchive a page they don't own""" + from plane.db.models import User + + other_user = User.objects.create( + email="unarchowner2@plane.so", + username=f"unarchowner2_{uuid4().hex[:8]}", + first_name="Unarch2", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # create_user is already admin (role=20) from the project fixture + url = self.get_unarchive_url(workspace.slug, project.id, page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + page.refresh_from_db() + assert page.archived_at is None + + @pytest.mark.django_db + def test_unarchive_non_archived_page(self, api_key_client, workspace, project, create_page): + """Test that unarchiving a non-archived page returns 400""" + url = self.get_unarchive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "not archived" in response.data["error"].lower() + + @pytest.mark.django_db + def test_unarchive_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page unarchiving""" + create_page.archived_at = timezone.now().date() + create_page.save() + + url = self.get_unarchive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + create_page.refresh_from_db() + assert create_page.archived_at is None + + @pytest.mark.django_db + def test_unarchive_page_unarchives_descendants(self, api_key_client, workspace, project, create_page, create_user): + """Test that unarchiving a page also unarchives its descendants""" + now = timezone.now().date() + create_page.archived_at = now + create_page.save() + + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + archived_at=now, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + url = self.get_unarchive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + child_page.refresh_from_db() + assert child_page.archived_at is None + + @pytest.mark.django_db + def test_unarchive_page_breaks_archived_parent_hierarchy( + self, api_key_client, workspace, project, create_page, create_user + ): + """Test that unarchiving a child with an archived parent sets parent=None""" + now = timezone.now().date() + create_page.archived_at = now + create_page.save() + + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + archived_at=now, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + # Unarchive only the child, not the parent + url = self.get_unarchive_url(workspace.slug, project.id, child_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + child_page.refresh_from_db() + assert child_page.archived_at is None + assert child_page.parent is None + + @pytest.mark.django_db + def test_list_archived_pages(self, api_key_client, workspace, project, create_user): + """Test listing archived pages""" + # Create an active page + active_page = Page.objects.create( + name="Active Page", + workspace=workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=active_page, + ) + + # Create an archived page + archived_page = Page.objects.create( + name="Archived Page", + workspace=workspace, + owned_by=create_user, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=archived_page, + ) + + url = self.get_archived_list_url(workspace.slug, project.id) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + page_ids = [str(p["id"]) for p in response.data["results"]] + assert str(archived_page.id) in page_ids + assert str(active_page.id) not in page_ids diff --git a/apps/api/plane/utils/openapi/__init__.py b/apps/api/plane/utils/openapi/__init__.py index d54caf584eb..ecef3d86348 100644 --- a/apps/api/plane/utils/openapi/__init__.py +++ b/apps/api/plane/utils/openapi/__init__.py @@ -164,6 +164,7 @@ issue_attachment_docs, module_docs, module_issue_docs, + page_docs, state_docs, ) @@ -314,6 +315,7 @@ "issue_attachment_docs", "module_docs", "module_issue_docs", + "page_docs", "state_docs", # Hooks "preprocess_filter_api_v1_paths", diff --git a/apps/api/plane/utils/openapi/decorators.py b/apps/api/plane/utils/openapi/decorators.py index 8b016f4c016..e15d9ecbf80 100644 --- a/apps/api/plane/utils/openapi/decorators.py +++ b/apps/api/plane/utils/openapi/decorators.py @@ -268,6 +268,21 @@ def state_docs(**kwargs): return extend_schema(**_merge_schema_options(defaults, kwargs)) +def page_docs(**kwargs): + """Decorator for page management endpoints""" + defaults = { + "tags": ["Pages"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + def sticky_docs(**kwargs): """Decorator for sticky management endpoints""" defaults = {