2024-01-09 15:30:36 +01:00
|
|
|
"""API endpoints"""
|
2025-11-19 14:49:24 +01:00
|
|
|
|
2024-11-09 18:33:48 +01:00
|
|
|
# pylint: disable=too-many-lines
|
2024-08-20 16:42:27 +02:00
|
|
|
|
2025-07-24 14:08:18 +02:00
|
|
|
import base64
|
2025-05-07 11:50:04 +02:00
|
|
|
import json
|
2024-11-18 07:59:55 +01:00
|
|
|
import logging
|
2024-08-19 22:35:48 +02:00
|
|
|
import uuid
|
2025-05-02 18:30:12 +02:00
|
|
|
from collections import defaultdict
|
2025-05-21 15:09:40 +02:00
|
|
|
from urllib.parse import unquote, urlencode, urlparse
|
2024-08-19 22:35:48 +02:00
|
|
|
|
|
|
|
|
from django.conf import settings
|
2024-03-03 08:49:27 +01:00
|
|
|
from django.contrib.postgres.aggregates import ArrayAgg
|
2024-11-06 08:09:05 +01:00
|
|
|
from django.contrib.postgres.search import TrigramSimilarity
|
2025-05-07 11:50:04 +02:00
|
|
|
from django.core.cache import cache
|
2025-09-11 14:05:56 +02:00
|
|
|
from django.core.exceptions import ImproperlyConfigured, ValidationError
|
2024-08-19 22:35:48 +02:00
|
|
|
from django.core.files.storage import default_storage
|
2025-08-25 16:15:16 +02:00
|
|
|
from django.core.validators import URLValidator
|
2025-02-12 10:13:41 +01:00
|
|
|
from django.db import connection, transaction
|
2024-11-18 07:59:55 +01:00
|
|
|
from django.db import models as db
|
2025-01-25 10:51:30 +01:00
|
|
|
from django.db.models.expressions import RawSQL
|
2025-11-19 14:49:24 +01:00
|
|
|
from django.db.models.functions import Greatest, Left, Length
|
2025-03-10 10:11:38 +01:00
|
|
|
from django.http import Http404, StreamingHttpResponse
|
2025-05-21 15:09:40 +02:00
|
|
|
from django.urls import reverse
|
2025-09-12 15:28:25 +02:00
|
|
|
from django.utils import timezone
|
2025-08-13 06:50:58 +02:00
|
|
|
from django.utils.decorators import method_decorator
|
2025-05-06 09:41:16 +02:00
|
|
|
from django.utils.functional import cached_property
|
2025-05-07 11:50:04 +02:00
|
|
|
from django.utils.text import capfirst, slugify
|
2025-01-20 10:23:18 +01:00
|
|
|
from django.utils.translation import gettext_lazy as _
|
2024-01-09 15:30:36 +01:00
|
|
|
|
2025-03-10 10:11:38 +01:00
|
|
|
import requests
|
2024-11-18 07:59:55 +01:00
|
|
|
import rest_framework as drf
|
2024-04-08 23:37:15 +02:00
|
|
|
from botocore.exceptions import ClientError
|
2025-06-30 10:42:48 +02:00
|
|
|
from csp.constants import NONE
|
|
|
|
|
from csp.decorators import csp_update
|
2025-05-05 16:01:12 +02:00
|
|
|
from lasuite.malware_detection import malware_detection
|
2025-08-13 06:50:58 +02:00
|
|
|
from lasuite.oidc_login.decorators import refresh_oidc_access_token
|
2025-01-02 17:20:09 +01:00
|
|
|
from rest_framework import filters, status, viewsets
|
2024-12-01 11:25:01 +01:00
|
|
|
from rest_framework import response as drf_response
|
2024-11-15 09:29:07 +01:00
|
|
|
from rest_framework.permissions import AllowAny
|
2024-01-09 15:30:36 +01:00
|
|
|
|
2025-04-23 22:47:24 +02:00
|
|
|
from core import authentication, choices, enums, models
|
2025-11-19 14:49:24 +01:00
|
|
|
from core.api.filters import remove_accents
|
2024-09-20 22:42:46 +02:00
|
|
|
from core.services.ai_services import AIService
|
2024-11-28 16:35:48 +01:00
|
|
|
from core.services.collaboration_services import CollaborationService
|
2025-07-24 14:08:18 +02:00
|
|
|
from core.services.converter_services import (
|
2025-07-24 02:31:50 +02:00
|
|
|
ServiceUnavailableError as YProviderServiceUnavailableError,
|
|
|
|
|
)
|
2025-07-24 14:08:18 +02:00
|
|
|
from core.services.converter_services import (
|
2025-07-24 02:31:50 +02:00
|
|
|
ValidationError as YProviderValidationError,
|
|
|
|
|
)
|
2025-07-24 14:08:18 +02:00
|
|
|
from core.services.converter_services import (
|
|
|
|
|
YdocConverter,
|
2025-07-24 02:31:50 +02:00
|
|
|
)
|
2025-09-11 14:05:56 +02:00
|
|
|
from core.services.search_indexers import get_document_indexer_class
|
2025-06-20 15:24:46 +02:00
|
|
|
from core.tasks.mail import send_ask_for_access_mail
|
2025-01-20 10:23:18 +01:00
|
|
|
from core.utils import extract_attachments, filter_descendants
|
2024-01-09 15:30:36 +01:00
|
|
|
|
2024-08-19 22:38:41 +02:00
|
|
|
from . import permissions, serializers, utils
|
2025-09-08 16:12:55 +02:00
|
|
|
from .filters import DocumentFilter, ListDocumentFilter, UserSearchFilter
|
2025-12-12 09:24:04 +01:00
|
|
|
from .throttling import (
|
|
|
|
|
DocumentThrottle,
|
|
|
|
|
UserListThrottleBurst,
|
|
|
|
|
UserListThrottleSustained,
|
|
|
|
|
)
|
2024-08-19 22:38:41 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
2024-02-09 19:32:12 +01:00
|
|
|
# pylint: disable=too-many-ancestors
|
|
|
|
|
|
2024-01-09 15:30:36 +01:00
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
class NestedGenericViewSet(viewsets.GenericViewSet):
|
2024-01-09 15:30:36 +01:00
|
|
|
"""
|
|
|
|
|
A generic Viewset aims to be used in a nested route context.
|
|
|
|
|
e.g: `/api/v1.0/resource_1/<resource_1_pk>/resource_2/<resource_2_pk>/`
|
|
|
|
|
|
|
|
|
|
It allows to define all url kwargs and lookup fields to perform the lookup.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
lookup_fields: list[str] = ["pk"]
|
|
|
|
|
lookup_url_kwargs: list[str] = []
|
|
|
|
|
|
|
|
|
|
def __getattribute__(self, item):
|
|
|
|
|
"""
|
|
|
|
|
This method is overridden to allow to get the last lookup field or lookup url kwarg
|
|
|
|
|
when accessing the `lookup_field` or `lookup_url_kwarg` attribute. This is useful
|
|
|
|
|
to keep compatibility with all methods used by the parent class `GenericViewSet`.
|
|
|
|
|
"""
|
|
|
|
|
if item in ["lookup_field", "lookup_url_kwarg"]:
|
|
|
|
|
return getattr(self, item + "s", [None])[-1]
|
|
|
|
|
|
|
|
|
|
return super().__getattribute__(item)
|
|
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
|
"""
|
|
|
|
|
Get the list of items for this view.
|
|
|
|
|
|
|
|
|
|
`lookup_fields` attribute is enumerated here to perform the nested lookup.
|
|
|
|
|
"""
|
|
|
|
|
queryset = super().get_queryset()
|
|
|
|
|
|
|
|
|
|
# The last lookup field is removed to perform the nested lookup as it corresponds
|
|
|
|
|
# to the object pk, it is used within get_object method.
|
|
|
|
|
lookup_url_kwargs = (
|
|
|
|
|
self.lookup_url_kwargs[:-1]
|
|
|
|
|
if self.lookup_url_kwargs
|
|
|
|
|
else self.lookup_fields[:-1]
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
filter_kwargs = {}
|
|
|
|
|
for index, lookup_url_kwarg in enumerate(lookup_url_kwargs):
|
|
|
|
|
if lookup_url_kwarg not in self.kwargs:
|
|
|
|
|
raise KeyError(
|
|
|
|
|
f"Expected view {self.__class__.__name__} to be called with a URL "
|
|
|
|
|
f'keyword argument named "{lookup_url_kwarg}". Fix your URL conf, or '
|
|
|
|
|
"set the `.lookup_fields` attribute on the view correctly."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
filter_kwargs.update(
|
|
|
|
|
{self.lookup_fields[index]: self.kwargs[lookup_url_kwarg]}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return queryset.filter(**filter_kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class SerializerPerActionMixin:
|
|
|
|
|
"""
|
|
|
|
|
A mixin to allow to define serializer classes for each action.
|
|
|
|
|
|
|
|
|
|
This mixin is useful to avoid to define a serializer class for each action in the
|
|
|
|
|
`get_serializer_class` method.
|
|
|
|
|
|
2025-01-17 19:50:03 +01:00
|
|
|
Example:
|
|
|
|
|
```
|
|
|
|
|
class MyViewSet(SerializerPerActionMixin, viewsets.GenericViewSet):
|
|
|
|
|
serializer_class = MySerializer
|
|
|
|
|
list_serializer_class = MyListSerializer
|
|
|
|
|
retrieve_serializer_class = MyRetrieveSerializer
|
|
|
|
|
```
|
|
|
|
|
"""
|
2024-01-09 15:30:36 +01:00
|
|
|
|
|
|
|
|
def get_serializer_class(self):
|
|
|
|
|
"""
|
|
|
|
|
Return the serializer class to use depending on the action.
|
|
|
|
|
"""
|
2025-01-17 19:50:03 +01:00
|
|
|
if serializer_class := getattr(self, f"{self.action}_serializer_class", None):
|
|
|
|
|
return serializer_class
|
|
|
|
|
return super().get_serializer_class()
|
2024-01-09 15:30:36 +01:00
|
|
|
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
class Pagination(drf.pagination.PageNumberPagination):
|
2024-01-09 15:30:36 +01:00
|
|
|
"""Pagination to display no more than 100 objects per page sorted by creation date."""
|
|
|
|
|
|
|
|
|
|
ordering = "-created_on"
|
2025-01-17 17:15:17 +01:00
|
|
|
max_page_size = 200
|
2024-01-09 15:30:36 +01:00
|
|
|
page_size_query_param = "page_size"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class UserViewSet(
|
2025-01-02 17:20:09 +01:00
|
|
|
drf.mixins.UpdateModelMixin, viewsets.GenericViewSet, drf.mixins.ListModelMixin
|
2024-01-09 15:30:36 +01:00
|
|
|
):
|
|
|
|
|
"""User ViewSet"""
|
|
|
|
|
|
|
|
|
|
permission_classes = [permissions.IsSelf]
|
2025-02-13 00:00:13 +01:00
|
|
|
queryset = models.User.objects.filter(is_active=True)
|
2024-01-09 15:30:36 +01:00
|
|
|
serializer_class = serializers.UserSerializer
|
2025-03-21 09:55:19 +01:00
|
|
|
pagination_class = None
|
2025-03-21 10:39:11 +01:00
|
|
|
throttle_classes = []
|
|
|
|
|
|
|
|
|
|
def get_throttles(self):
|
|
|
|
|
self.throttle_classes = []
|
|
|
|
|
if self.action == "list":
|
|
|
|
|
self.throttle_classes = [UserListThrottleBurst, UserListThrottleSustained]
|
|
|
|
|
|
|
|
|
|
return super().get_throttles()
|
2024-01-09 15:30:36 +01:00
|
|
|
|
2024-05-29 14:48:12 +02:00
|
|
|
def get_queryset(self):
|
|
|
|
|
"""
|
|
|
|
|
Limit listed users by querying the email field with a trigram similarity
|
|
|
|
|
search if a query is provided.
|
|
|
|
|
Limit listed users by excluding users already in the document if a document_id
|
|
|
|
|
is provided.
|
|
|
|
|
"""
|
|
|
|
|
queryset = self.queryset
|
|
|
|
|
|
2025-01-25 10:51:30 +01:00
|
|
|
if self.action != "list":
|
|
|
|
|
return queryset
|
2024-05-29 14:48:12 +02:00
|
|
|
|
2025-09-08 16:12:55 +02:00
|
|
|
filterset = UserSearchFilter(
|
|
|
|
|
self.request.GET, queryset=queryset, request=self.request
|
|
|
|
|
)
|
|
|
|
|
if not filterset.is_valid():
|
|
|
|
|
raise drf.exceptions.ValidationError(filterset.errors)
|
|
|
|
|
|
2025-01-25 10:51:30 +01:00
|
|
|
# Exclude all users already in the given document
|
2025-03-21 09:55:19 +01:00
|
|
|
if document_id := self.request.query_params.get("document_id", ""):
|
2025-01-25 10:51:30 +01:00
|
|
|
queryset = queryset.exclude(documentaccess__document_id=document_id)
|
2024-05-29 14:48:12 +02:00
|
|
|
|
2025-09-08 16:12:55 +02:00
|
|
|
filter_data = filterset.form.cleaned_data
|
2025-11-19 14:49:24 +01:00
|
|
|
query = remove_accents(filter_data["q"])
|
2024-11-06 08:09:05 +01:00
|
|
|
|
2025-01-25 10:51:30 +01:00
|
|
|
# For emails, match emails by Levenstein distance to prevent typing errors
|
|
|
|
|
if "@" in query:
|
|
|
|
|
return (
|
|
|
|
|
queryset.annotate(
|
2025-11-19 14:49:24 +01:00
|
|
|
distance=RawSQL(
|
|
|
|
|
"levenshtein(unaccent(email::text), %s::text)", (query,)
|
|
|
|
|
)
|
2024-11-06 08:09:05 +01:00
|
|
|
)
|
2025-01-25 10:51:30 +01:00
|
|
|
.filter(distance__lte=3)
|
2025-03-21 09:55:19 +01:00
|
|
|
.order_by("distance", "email")[: settings.API_USERS_LIST_LIMIT]
|
2025-01-25 10:51:30 +01:00
|
|
|
)
|
2024-11-06 08:09:05 +01:00
|
|
|
|
2025-01-25 10:51:30 +01:00
|
|
|
# Use trigram similarity for non-email-like queries
|
|
|
|
|
# For performance reasons we filter first by similarity, which relies on an
|
|
|
|
|
# index, then only calculate precise similarity scores for sorting purposes
|
2025-11-19 14:49:24 +01:00
|
|
|
|
2025-01-25 10:51:30 +01:00
|
|
|
return (
|
2025-11-19 14:49:24 +01:00
|
|
|
queryset.annotate(
|
|
|
|
|
sim_email=TrigramSimilarity("email", query),
|
|
|
|
|
sim_name=TrigramSimilarity("full_name", query),
|
|
|
|
|
)
|
|
|
|
|
.annotate(similarity=Greatest("sim_email", "sim_name"))
|
2025-01-25 10:51:30 +01:00
|
|
|
.filter(similarity__gt=0.2)
|
2025-11-19 14:49:24 +01:00
|
|
|
.order_by("-similarity")[: settings.API_USERS_LIST_LIMIT]
|
2025-01-25 10:51:30 +01:00
|
|
|
)
|
2024-05-29 14:48:12 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(
|
2024-01-09 15:30:36 +01:00
|
|
|
detail=False,
|
|
|
|
|
methods=["get"],
|
|
|
|
|
url_name="me",
|
|
|
|
|
url_path="me",
|
|
|
|
|
permission_classes=[permissions.IsAuthenticated],
|
|
|
|
|
)
|
|
|
|
|
def get_me(self, request):
|
|
|
|
|
"""
|
|
|
|
|
Return information on currently logged user
|
|
|
|
|
"""
|
|
|
|
|
context = {"request": request}
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(
|
2024-01-09 15:30:36 +01:00
|
|
|
self.serializer_class(request.user, context=context).data
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2024-04-03 18:50:28 +02:00
|
|
|
class ResourceAccessViewsetMixin:
|
|
|
|
|
"""Mixin with methods common to all access viewsets."""
|
2024-01-09 15:30:36 +01:00
|
|
|
|
2025-04-12 09:11:33 +02:00
|
|
|
def filter_queryset(self, queryset):
|
|
|
|
|
"""Override to filter on related resource."""
|
|
|
|
|
queryset = super().filter_queryset(queryset)
|
|
|
|
|
return queryset.filter(**{self.resource_field_name: self.kwargs["resource_id"]})
|
2024-01-09 15:30:36 +01:00
|
|
|
|
|
|
|
|
def get_serializer_context(self):
|
|
|
|
|
"""Extra context provided to the serializer class."""
|
|
|
|
|
context = super().get_serializer_context()
|
2024-04-03 18:50:28 +02:00
|
|
|
context["resource_id"] = self.kwargs["resource_id"]
|
2024-01-09 15:30:36 +01:00
|
|
|
return context
|
|
|
|
|
|
2024-04-03 18:50:28 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
class DocumentMetadata(drf.metadata.SimpleMetadata):
|
2024-09-20 22:42:46 +02:00
|
|
|
"""Custom metadata class to add information"""
|
|
|
|
|
|
|
|
|
|
def determine_metadata(self, request, view):
|
|
|
|
|
"""Add language choices only for the list endpoint."""
|
|
|
|
|
simple_metadata = super().determine_metadata(request, view)
|
|
|
|
|
|
|
|
|
|
if request.path.endswith("/documents/"):
|
|
|
|
|
simple_metadata["actions"]["POST"]["language"] = {
|
|
|
|
|
"choices": [
|
|
|
|
|
{"value": code, "display_name": name}
|
|
|
|
|
for code, name in enums.ALL_LANGUAGES.items()
|
|
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
return simple_metadata
|
|
|
|
|
|
|
|
|
|
|
2025-01-02 23:15:03 +01:00
|
|
|
# pylint: disable=too-many-public-methods
|
2024-04-03 18:50:28 +02:00
|
|
|
class DocumentViewSet(
|
2025-01-17 19:50:03 +01:00
|
|
|
SerializerPerActionMixin,
|
2024-11-18 07:59:55 +01:00
|
|
|
drf.mixins.CreateModelMixin,
|
|
|
|
|
drf.mixins.DestroyModelMixin,
|
|
|
|
|
drf.mixins.UpdateModelMixin,
|
2025-01-02 17:20:09 +01:00
|
|
|
viewsets.GenericViewSet,
|
2024-04-03 18:50:28 +02:00
|
|
|
):
|
2024-11-12 16:28:34 +01:00
|
|
|
"""
|
2025-01-02 17:20:09 +01:00
|
|
|
DocumentViewSet API.
|
|
|
|
|
|
|
|
|
|
This view set provides CRUD operations and additional actions for managing documents.
|
|
|
|
|
Supports filtering, ordering, and annotations for enhanced querying capabilities.
|
|
|
|
|
|
|
|
|
|
### API Endpoints:
|
|
|
|
|
1. **List**: Retrieve a paginated list of documents.
|
|
|
|
|
Example: GET /documents/?page=2
|
|
|
|
|
2. **Retrieve**: Get a specific document by its ID.
|
|
|
|
|
Example: GET /documents/{id}/
|
|
|
|
|
3. **Create**: Create a new document.
|
|
|
|
|
Example: POST /documents/
|
|
|
|
|
4. **Update**: Update a document by its ID.
|
|
|
|
|
Example: PUT /documents/{id}/
|
|
|
|
|
5. **Delete**: Soft delete a document by its ID.
|
|
|
|
|
Example: DELETE /documents/{id}/
|
|
|
|
|
|
|
|
|
|
### Additional Actions:
|
|
|
|
|
1. **Trashbin**: List soft deleted documents for a document owner
|
|
|
|
|
Example: GET /documents/{id}/trashbin/
|
|
|
|
|
|
|
|
|
|
2. **Children**: List or create child documents.
|
|
|
|
|
Example: GET, POST /documents/{id}/children/
|
|
|
|
|
|
|
|
|
|
3. **Versions List**: Retrieve version history of a document.
|
|
|
|
|
Example: GET /documents/{id}/versions/
|
|
|
|
|
|
|
|
|
|
4. **Version Detail**: Get or delete a specific document version.
|
|
|
|
|
Example: GET, DELETE /documents/{id}/versions/{version_id}/
|
|
|
|
|
|
|
|
|
|
5. **Favorite**: Get list of favorite documents for a user. Mark or unmark
|
|
|
|
|
a document as favorite.
|
|
|
|
|
Examples:
|
|
|
|
|
- GET /documents/favorite/
|
|
|
|
|
- POST, DELETE /documents/{id}/favorite/
|
|
|
|
|
|
|
|
|
|
6. **Create for Owner**: Create a document via server-to-server on behalf of a user.
|
|
|
|
|
Example: POST /documents/create-for-owner/
|
|
|
|
|
|
|
|
|
|
7. **Link Configuration**: Update document link configuration.
|
|
|
|
|
Example: PUT /documents/{id}/link-configuration/
|
|
|
|
|
|
|
|
|
|
8. **Attachment Upload**: Upload a file attachment for the document.
|
|
|
|
|
Example: POST /documents/{id}/attachment-upload/
|
|
|
|
|
|
|
|
|
|
9. **Media Auth**: Authorize access to document media.
|
|
|
|
|
Example: GET /documents/media-auth/
|
|
|
|
|
|
2025-03-25 15:04:47 +01:00
|
|
|
10. **AI Transform**: Apply a transformation action on a piece of text with AI.
|
2025-01-02 17:20:09 +01:00
|
|
|
Example: POST /documents/{id}/ai-transform/
|
|
|
|
|
Expected data:
|
|
|
|
|
- text (str): The input text.
|
|
|
|
|
- action (str): The transformation type, one of [prompt, correct, rephrase, summarize].
|
|
|
|
|
Returns: JSON response with the processed text.
|
|
|
|
|
Throttled by: AIDocumentRateThrottle, AIUserRateThrottle.
|
|
|
|
|
|
2025-03-25 15:04:47 +01:00
|
|
|
11. **AI Translate**: Translate a piece of text with AI.
|
2025-01-02 17:20:09 +01:00
|
|
|
Example: POST /documents/{id}/ai-translate/
|
|
|
|
|
Expected data:
|
|
|
|
|
- text (str): The input text.
|
|
|
|
|
- language (str): The target language, chosen from settings.LANGUAGES.
|
|
|
|
|
Returns: JSON response with the translated text.
|
|
|
|
|
Throttled by: AIDocumentRateThrottle, AIUserRateThrottle.
|
|
|
|
|
|
|
|
|
|
### Ordering: created_at, updated_at, is_favorite, title
|
|
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
- Ascending: GET /api/v1.0/documents/?ordering=created_at
|
2025-05-26 10:27:17 +02:00
|
|
|
- Descending: GET /api/v1.0/documents/?ordering=-title
|
2025-01-02 17:20:09 +01:00
|
|
|
|
|
|
|
|
### Filtering:
|
2024-11-12 16:28:34 +01:00
|
|
|
- `is_creator_me=true`: Returns documents created by the current user.
|
|
|
|
|
- `is_creator_me=false`: Returns documents created by other users.
|
2024-11-13 08:58:12 +01:00
|
|
|
- `is_favorite=true`: Returns documents marked as favorite by the current user
|
|
|
|
|
- `is_favorite=false`: Returns documents not marked as favorite by the current user
|
2024-11-15 09:42:27 +01:00
|
|
|
- `title=hello`: Returns documents which title contains the "hello" string
|
2024-11-12 16:28:34 +01:00
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
Example:
|
2024-11-13 08:58:12 +01:00
|
|
|
- GET /api/v1.0/documents/?is_creator_me=true&is_favorite=true
|
2024-11-15 09:42:27 +01:00
|
|
|
- GET /api/v1.0/documents/?is_creator_me=false&title=hello
|
2025-01-02 17:20:09 +01:00
|
|
|
|
|
|
|
|
### Annotations:
|
|
|
|
|
1. **is_favorite**: Indicates whether the document is marked as favorite by the current user.
|
|
|
|
|
2. **user_roles**: Roles the current user has on the document or its ancestors.
|
|
|
|
|
|
|
|
|
|
### Notes:
|
|
|
|
|
- Only the highest ancestor in a document hierarchy is shown in list views.
|
|
|
|
|
- Implements soft delete logic to retain document tree structures.
|
2024-11-12 16:28:34 +01:00
|
|
|
"""
|
2024-04-03 18:50:28 +02:00
|
|
|
|
2024-11-09 10:27:21 +01:00
|
|
|
metadata_class = DocumentMetadata
|
|
|
|
|
ordering = ["-updated_at"]
|
2025-01-02 17:20:09 +01:00
|
|
|
ordering_fields = ["created_at", "updated_at", "title"]
|
2025-02-15 13:16:35 +01:00
|
|
|
pagination_class = Pagination
|
2024-04-03 18:50:28 +02:00
|
|
|
permission_classes = [
|
2025-05-06 09:41:16 +02:00
|
|
|
permissions.DocumentPermission,
|
2024-04-03 18:50:28 +02:00
|
|
|
]
|
2025-12-12 09:24:04 +01:00
|
|
|
throttle_classes = [DocumentThrottle]
|
2025-09-05 15:29:08 +02:00
|
|
|
throttle_scope = "document"
|
2025-08-22 14:57:02 +02:00
|
|
|
queryset = models.Document.objects.select_related("creator").all()
|
2024-11-09 10:27:21 +01:00
|
|
|
serializer_class = serializers.DocumentSerializer
|
2025-02-16 17:26:51 +01:00
|
|
|
ai_translate_serializer_class = serializers.AITranslateSerializer
|
|
|
|
|
children_serializer_class = serializers.ListDocumentSerializer
|
2025-02-17 10:25:07 +01:00
|
|
|
descendants_serializer_class = serializers.ListDocumentSerializer
|
2025-01-17 19:50:03 +01:00
|
|
|
list_serializer_class = serializers.ListDocumentSerializer
|
|
|
|
|
trashbin_serializer_class = serializers.ListDocumentSerializer
|
2025-02-16 17:26:51 +01:00
|
|
|
tree_serializer_class = serializers.ListDocumentSerializer
|
2025-08-13 06:50:58 +02:00
|
|
|
search_serializer_class = serializers.ListDocumentSerializer
|
2024-04-03 18:50:28 +02:00
|
|
|
|
2024-12-17 07:47:23 +01:00
|
|
|
def get_queryset(self):
|
2025-01-02 17:20:09 +01:00
|
|
|
"""Get queryset performing all annotation and filtering on the document tree structure."""
|
|
|
|
|
user = self.request.user
|
2024-12-17 07:47:23 +01:00
|
|
|
queryset = super().get_queryset()
|
2024-11-09 10:45:38 +01:00
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
# Only list views need filtering and annotation
|
|
|
|
|
if self.detail:
|
|
|
|
|
return queryset
|
2024-11-09 18:33:48 +01:00
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
if not user.is_authenticated:
|
|
|
|
|
return queryset.none()
|
|
|
|
|
|
|
|
|
|
queryset = queryset.filter(ancestors_deleted_at__isnull=True)
|
|
|
|
|
|
|
|
|
|
# Filter documents to which the current user has access...
|
|
|
|
|
access_documents_ids = models.DocumentAccess.objects.filter(
|
|
|
|
|
db.Q(user=user) | db.Q(team__in=user.teams)
|
|
|
|
|
).values_list("document_id", flat=True)
|
|
|
|
|
|
|
|
|
|
# ...or that were previously accessed and are not restricted
|
|
|
|
|
traced_documents_ids = models.LinkTrace.objects.filter(user=user).values_list(
|
|
|
|
|
"document_id", flat=True
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return queryset.filter(
|
|
|
|
|
db.Q(id__in=access_documents_ids)
|
|
|
|
|
| (
|
|
|
|
|
db.Q(id__in=traced_documents_ids)
|
|
|
|
|
& ~db.Q(link_reach=models.LinkReachChoices.RESTRICTED)
|
2024-09-08 23:37:49 +02:00
|
|
|
)
|
2025-01-02 17:20:09 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def filter_queryset(self, queryset):
|
2025-02-17 10:19:06 +01:00
|
|
|
"""Override to apply annotations to generic views."""
|
|
|
|
|
queryset = super().filter_queryset(queryset)
|
2025-04-12 11:35:36 +02:00
|
|
|
user = self.request.user
|
|
|
|
|
queryset = queryset.annotate_is_favorite(user)
|
|
|
|
|
queryset = queryset.annotate_user_roles(user)
|
2025-02-17 10:19:06 +01:00
|
|
|
return queryset
|
|
|
|
|
|
2025-04-25 08:03:12 +02:00
|
|
|
def get_response_for_queryset(self, queryset, context=None):
|
2025-02-17 10:19:06 +01:00
|
|
|
"""Return paginated response for the queryset if requested."""
|
2025-04-25 08:03:12 +02:00
|
|
|
context = context or self.get_serializer_context()
|
2025-02-17 10:19:06 +01:00
|
|
|
page = self.paginate_queryset(queryset)
|
|
|
|
|
if page is not None:
|
2025-04-25 08:03:12 +02:00
|
|
|
serializer = self.get_serializer(page, many=True, context=context)
|
2025-02-17 10:19:06 +01:00
|
|
|
return self.get_paginated_response(serializer.data)
|
|
|
|
|
|
2025-04-25 08:03:12 +02:00
|
|
|
serializer = self.get_serializer(queryset, many=True, context=context)
|
2025-02-17 10:19:06 +01:00
|
|
|
return drf.response.Response(serializer.data)
|
|
|
|
|
|
|
|
|
|
def list(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Returns a DRF response containing the filtered, annotated and ordered document list.
|
|
|
|
|
|
|
|
|
|
This method applies filtering based on request parameters using `ListDocumentFilter`.
|
|
|
|
|
It performs early filtering on model fields, annotates user roles, and removes
|
|
|
|
|
descendant documents to keep only the highest ancestors readable by the current user.
|
|
|
|
|
"""
|
2025-04-12 11:35:36 +02:00
|
|
|
user = self.request.user
|
|
|
|
|
|
|
|
|
|
# Not calling filter_queryset. We do our own cooking.
|
|
|
|
|
queryset = self.get_queryset()
|
2025-02-17 10:19:06 +01:00
|
|
|
|
|
|
|
|
filterset = ListDocumentFilter(
|
2025-01-02 17:20:09 +01:00
|
|
|
self.request.GET, queryset=queryset, request=self.request
|
|
|
|
|
)
|
2025-02-17 10:19:06 +01:00
|
|
|
if not filterset.is_valid():
|
|
|
|
|
raise drf.exceptions.ValidationError(filterset.errors)
|
2025-01-02 17:20:09 +01:00
|
|
|
filter_data = filterset.form.cleaned_data
|
|
|
|
|
|
|
|
|
|
# Filter as early as possible on fields that are available on the model
|
|
|
|
|
for field in ["is_creator_me", "title"]:
|
|
|
|
|
queryset = filterset.filters[field].filter(queryset, filter_data[field])
|
2024-12-17 07:35:09 +01:00
|
|
|
|
2025-04-12 11:35:36 +02:00
|
|
|
queryset = queryset.annotate_user_roles(user)
|
2025-01-02 17:20:09 +01:00
|
|
|
|
2025-02-17 10:19:06 +01:00
|
|
|
# Among the results, we may have documents that are ancestors/descendants
|
|
|
|
|
# of each other. In this case we want to keep only the highest ancestors.
|
|
|
|
|
root_paths = utils.filter_root_paths(
|
|
|
|
|
queryset.order_by("path").values_list("path", flat=True),
|
|
|
|
|
skip_sorting=True,
|
|
|
|
|
)
|
|
|
|
|
queryset = queryset.filter(path__in=root_paths)
|
2024-12-17 07:35:09 +01:00
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
# Annotate favorite status and filter if applicable as late as possible
|
2025-04-12 11:35:36 +02:00
|
|
|
queryset = queryset.annotate_is_favorite(user)
|
2025-07-13 19:56:07 +02:00
|
|
|
for field in ["is_favorite", "is_masked"]:
|
|
|
|
|
queryset = filterset.filters[field].filter(queryset, filter_data[field])
|
2024-09-08 23:37:49 +02:00
|
|
|
|
2025-05-13 16:00:12 +02:00
|
|
|
# Apply ordering only now that everything is filtered and annotated
|
2025-02-17 10:19:06 +01:00
|
|
|
queryset = filters.OrderingFilter().filter_queryset(
|
|
|
|
|
self.request, queryset, self
|
|
|
|
|
)
|
2024-09-08 23:37:49 +02:00
|
|
|
|
2025-02-17 10:19:06 +01:00
|
|
|
return self.get_response_for_queryset(queryset)
|
2024-09-08 23:37:49 +02:00
|
|
|
|
|
|
|
|
def retrieve(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Add a trace that the document was accessed by a user. This is used to list documents
|
|
|
|
|
on a user's list view even though the user has no specific role in the document (link
|
|
|
|
|
access when the link reach configuration of the document allows it).
|
|
|
|
|
"""
|
2025-01-02 17:20:09 +01:00
|
|
|
user = self.request.user
|
2024-09-08 23:37:49 +02:00
|
|
|
instance = self.get_object()
|
|
|
|
|
serializer = self.get_serializer(instance)
|
|
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
# The `create` query generates 5 db queries which are much less efficient than an
|
|
|
|
|
# `exists` query. The user will visit the document many times after the first visit
|
|
|
|
|
# so that's what we should optimize for.
|
|
|
|
|
if (
|
|
|
|
|
user.is_authenticated
|
|
|
|
|
and not instance.link_traces.filter(user=user).exists()
|
|
|
|
|
):
|
|
|
|
|
models.LinkTrace.objects.create(document=instance, user=request.user)
|
2024-09-08 23:37:49 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(serializer.data)
|
2024-09-08 23:37:49 +02:00
|
|
|
|
2025-01-27 21:20:16 +01:00
|
|
|
@transaction.atomic
|
2024-11-09 10:45:38 +01:00
|
|
|
def perform_create(self, serializer):
|
2024-11-12 16:28:34 +01:00
|
|
|
"""Set the current user as creator and owner of the newly created object."""
|
2025-02-12 10:13:41 +01:00
|
|
|
|
|
|
|
|
# locks the table to ensure safe concurrent access
|
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
|
cursor.execute(
|
|
|
|
|
f'LOCK TABLE "{models.Document._meta.db_table}" ' # noqa: SLF001
|
|
|
|
|
"IN SHARE ROW EXCLUSIVE MODE;"
|
|
|
|
|
)
|
|
|
|
|
|
2024-12-16 16:58:14 +01:00
|
|
|
obj = models.Document.add_root(
|
|
|
|
|
creator=self.request.user,
|
|
|
|
|
**serializer.validated_data,
|
|
|
|
|
)
|
|
|
|
|
serializer.instance = obj
|
2024-11-09 10:45:38 +01:00
|
|
|
models.DocumentAccess.objects.create(
|
|
|
|
|
document=obj,
|
|
|
|
|
user=self.request.user,
|
|
|
|
|
role=models.RoleChoices.OWNER,
|
|
|
|
|
)
|
|
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
def perform_destroy(self, instance):
|
|
|
|
|
"""Override to implement a soft delete instead of dumping the record in database."""
|
|
|
|
|
instance.soft_delete()
|
|
|
|
|
|
2025-06-26 07:17:00 +02:00
|
|
|
def _can_user_edit_document(self, document_id, set_cache=False):
|
|
|
|
|
"""Check if the user can edit the document."""
|
2025-06-25 17:30:33 +02:00
|
|
|
try:
|
2025-06-26 07:17:00 +02:00
|
|
|
count, exists = CollaborationService().get_document_connection_info(
|
|
|
|
|
document_id,
|
2025-06-25 17:30:33 +02:00
|
|
|
self.request.session.session_key,
|
|
|
|
|
)
|
|
|
|
|
except requests.HTTPError as e:
|
2025-06-26 07:17:00 +02:00
|
|
|
logger.exception("Failed to call collaboration server: %s", e)
|
|
|
|
|
count = 0
|
|
|
|
|
exists = False
|
2025-06-25 17:30:33 +02:00
|
|
|
|
2025-06-26 07:17:00 +02:00
|
|
|
if count == 0:
|
|
|
|
|
# Nobody is connected to the websocket server
|
2025-06-25 17:30:33 +02:00
|
|
|
logger.debug("update without connection found in the websocket server")
|
2025-06-26 07:17:00 +02:00
|
|
|
cache_key = f"docs:no-websocket:{document_id}"
|
2025-06-25 17:30:33 +02:00
|
|
|
current_editor = cache.get(cache_key)
|
|
|
|
|
|
2025-06-26 07:17:00 +02:00
|
|
|
if not current_editor:
|
|
|
|
|
if set_cache:
|
|
|
|
|
cache.set(
|
|
|
|
|
cache_key,
|
|
|
|
|
self.request.session.session_key,
|
|
|
|
|
settings.NO_WEBSOCKET_CACHE_TIMEOUT,
|
|
|
|
|
)
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
if current_editor != self.request.session.session_key:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
if set_cache:
|
|
|
|
|
cache.touch(cache_key, settings.NO_WEBSOCKET_CACHE_TIMEOUT)
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
if exists:
|
|
|
|
|
# Current user is connected to the websocket server
|
2025-06-25 17:30:33 +02:00
|
|
|
logger.debug("session key found in the websocket server")
|
2025-06-26 07:17:00 +02:00
|
|
|
return True
|
2025-06-25 17:30:33 +02:00
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
|
"Users connected to the websocket but current editor not connected to it. Can not edit."
|
|
|
|
|
)
|
|
|
|
|
|
2025-06-26 07:17:00 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def perform_update(self, serializer):
|
|
|
|
|
"""Check rules about collaboration."""
|
2025-07-04 10:56:24 +02:00
|
|
|
if (
|
|
|
|
|
serializer.validated_data.get("websocket", False)
|
|
|
|
|
or not settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY
|
|
|
|
|
):
|
2025-06-26 07:17:00 +02:00
|
|
|
return super().perform_update(serializer)
|
|
|
|
|
|
|
|
|
|
if self._can_user_edit_document(serializer.instance.id, set_cache=True):
|
|
|
|
|
return super().perform_update(serializer)
|
|
|
|
|
|
2025-06-25 17:30:33 +02:00
|
|
|
raise drf.exceptions.PermissionDenied(
|
|
|
|
|
"You are not allowed to edit this document."
|
|
|
|
|
)
|
|
|
|
|
|
2025-06-26 07:17:00 +02:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["get"],
|
|
|
|
|
url_path="can-edit",
|
|
|
|
|
)
|
|
|
|
|
def can_edit(self, request, *args, **kwargs):
|
|
|
|
|
"""Check if the current user can edit the document."""
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
|
2025-07-04 10:56:24 +02:00
|
|
|
can_edit = (
|
|
|
|
|
True
|
|
|
|
|
if not settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY
|
|
|
|
|
else self._can_user_edit_document(document.id)
|
2025-06-26 07:17:00 +02:00
|
|
|
)
|
|
|
|
|
|
2025-07-04 10:56:24 +02:00
|
|
|
return drf.response.Response({"can_edit": can_edit})
|
|
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=False,
|
|
|
|
|
methods=["get"],
|
2025-03-06 15:21:49 +01:00
|
|
|
permission_classes=[permissions.IsAuthenticated],
|
2025-01-02 17:20:09 +01:00
|
|
|
)
|
|
|
|
|
def favorite_list(self, request, *args, **kwargs):
|
|
|
|
|
"""Get list of favorite documents for the current user."""
|
|
|
|
|
user = request.user
|
|
|
|
|
|
|
|
|
|
favorite_documents_ids = models.DocumentFavorite.objects.filter(
|
|
|
|
|
user=user
|
|
|
|
|
).values_list("document_id", flat=True)
|
|
|
|
|
|
2025-02-17 10:19:06 +01:00
|
|
|
queryset = self.filter_queryset(self.get_queryset())
|
2025-01-02 17:20:09 +01:00
|
|
|
queryset = queryset.filter(id__in=favorite_documents_ids)
|
|
|
|
|
return self.get_response_for_queryset(queryset)
|
|
|
|
|
|
|
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=False,
|
|
|
|
|
methods=["get"],
|
|
|
|
|
)
|
|
|
|
|
def trashbin(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Retrieve soft-deleted documents for which the current user has the owner role.
|
|
|
|
|
|
|
|
|
|
The selected documents are those deleted within the cutoff period defined in the
|
|
|
|
|
settings (see TRASHBIN_CUTOFF_DAYS), before they are considered permanently deleted.
|
|
|
|
|
"""
|
2025-10-16 17:06:47 +02:00
|
|
|
|
|
|
|
|
if not request.user.is_authenticated:
|
|
|
|
|
return self.get_response_for_queryset(self.queryset.none())
|
|
|
|
|
|
|
|
|
|
access_documents_paths = (
|
|
|
|
|
models.DocumentAccess.objects.select_related("document")
|
|
|
|
|
.filter(
|
|
|
|
|
db.Q(user=self.request.user) | db.Q(team__in=self.request.user.teams),
|
|
|
|
|
role=models.RoleChoices.OWNER,
|
|
|
|
|
)
|
|
|
|
|
.values_list("document__path", flat=True)
|
|
|
|
|
)
|
|
|
|
|
|
2025-10-23 12:03:31 +02:00
|
|
|
if not access_documents_paths:
|
|
|
|
|
return self.get_response_for_queryset(self.queryset.none())
|
|
|
|
|
|
2025-10-16 17:06:47 +02:00
|
|
|
children_clause = db.Q()
|
|
|
|
|
for path in access_documents_paths:
|
|
|
|
|
children_clause |= db.Q(path__startswith=path)
|
|
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
queryset = self.queryset.filter(
|
2025-10-16 17:06:47 +02:00
|
|
|
children_clause,
|
2025-01-02 17:20:09 +01:00
|
|
|
deleted_at__isnull=False,
|
|
|
|
|
deleted_at__gte=models.get_trashbin_cutoff(),
|
|
|
|
|
)
|
2025-04-12 11:35:36 +02:00
|
|
|
queryset = queryset.annotate_user_roles(self.request.user)
|
2025-01-02 17:20:09 +01:00
|
|
|
|
|
|
|
|
return self.get_response_for_queryset(queryset)
|
|
|
|
|
|
2024-12-01 11:25:01 +01:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
authentication_classes=[authentication.ServerToServerAuthentication],
|
|
|
|
|
detail=False,
|
|
|
|
|
methods=["post"],
|
|
|
|
|
permission_classes=[],
|
|
|
|
|
url_path="create-for-owner",
|
|
|
|
|
)
|
2025-02-12 10:13:41 +01:00
|
|
|
@transaction.atomic
|
2024-12-01 11:25:01 +01:00
|
|
|
def create_for_owner(self, request):
|
|
|
|
|
"""
|
|
|
|
|
Create a document on behalf of a specified owner (pre-existing user or invited).
|
|
|
|
|
"""
|
2025-02-12 10:13:41 +01:00
|
|
|
|
|
|
|
|
# locks the table to ensure safe concurrent access
|
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
|
cursor.execute(
|
|
|
|
|
f'LOCK TABLE "{models.Document._meta.db_table}" ' # noqa: SLF001
|
|
|
|
|
"IN SHARE ROW EXCLUSIVE MODE;"
|
|
|
|
|
)
|
|
|
|
|
|
2024-12-01 11:25:01 +01:00
|
|
|
# Deserialize and validate the data
|
|
|
|
|
serializer = serializers.ServerCreateDocumentSerializer(data=request.data)
|
|
|
|
|
if not serializer.is_valid():
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
document = serializer.save()
|
|
|
|
|
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
{"id": str(document.id)}, status=status.HTTP_201_CREATED
|
|
|
|
|
)
|
|
|
|
|
|
2025-01-02 23:15:03 +01:00
|
|
|
@drf.decorators.action(detail=True, methods=["post"])
|
|
|
|
|
@transaction.atomic
|
|
|
|
|
def move(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Move a document to another location within the document tree.
|
|
|
|
|
|
|
|
|
|
The user must be an administrator or owner of both the document being moved
|
|
|
|
|
and the target parent document.
|
|
|
|
|
"""
|
|
|
|
|
user = request.user
|
|
|
|
|
document = self.get_object() # including permission checks
|
|
|
|
|
|
|
|
|
|
# Validate the input payload
|
|
|
|
|
serializer = serializers.MoveDocumentSerializer(data=request.data)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
validated_data = serializer.validated_data
|
|
|
|
|
|
|
|
|
|
target_document_id = validated_data["target_document_id"]
|
|
|
|
|
try:
|
|
|
|
|
target_document = models.Document.objects.get(
|
|
|
|
|
id=target_document_id, ancestors_deleted_at__isnull=True
|
|
|
|
|
)
|
|
|
|
|
except models.Document.DoesNotExist:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"target_document_id": "Target parent document does not exist."},
|
|
|
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
position = validated_data["position"]
|
|
|
|
|
message = None
|
2025-05-04 22:16:34 +02:00
|
|
|
owner_accesses = []
|
2025-01-02 23:15:03 +01:00
|
|
|
if position in [
|
|
|
|
|
enums.MoveNodePositionChoices.FIRST_CHILD,
|
|
|
|
|
enums.MoveNodePositionChoices.LAST_CHILD,
|
|
|
|
|
]:
|
|
|
|
|
if not target_document.get_abilities(user).get("move"):
|
|
|
|
|
message = (
|
|
|
|
|
"You do not have permission to move documents "
|
|
|
|
|
"as a child to this target document."
|
|
|
|
|
)
|
2025-05-04 22:16:34 +02:00
|
|
|
elif target_document.is_root():
|
|
|
|
|
owner_accesses = document.get_root().accesses.filter(
|
|
|
|
|
role=models.RoleChoices.OWNER
|
|
|
|
|
)
|
|
|
|
|
elif not target_document.get_parent().get_abilities(user).get("move"):
|
|
|
|
|
message = (
|
|
|
|
|
"You do not have permission to move documents "
|
|
|
|
|
"as a sibling of this target document."
|
|
|
|
|
)
|
2025-01-02 23:15:03 +01:00
|
|
|
|
|
|
|
|
if message:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"target_document_id": message},
|
|
|
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
document.move(target_document, pos=position)
|
|
|
|
|
|
2025-05-04 22:16:34 +02:00
|
|
|
# Make sure we have at least one owner
|
|
|
|
|
if (
|
|
|
|
|
owner_accesses
|
|
|
|
|
and not document.accesses.filter(role=models.RoleChoices.OWNER).exists()
|
|
|
|
|
):
|
|
|
|
|
for owner_access in owner_accesses:
|
|
|
|
|
models.DocumentAccess.objects.update_or_create(
|
|
|
|
|
document=document,
|
|
|
|
|
user=owner_access.user,
|
|
|
|
|
team=owner_access.team,
|
|
|
|
|
defaults={"role": models.RoleChoices.OWNER},
|
|
|
|
|
)
|
|
|
|
|
|
2025-01-02 23:15:03 +01:00
|
|
|
return drf.response.Response(
|
|
|
|
|
{"message": "Document moved successfully."}, status=status.HTTP_200_OK
|
|
|
|
|
)
|
|
|
|
|
|
2025-01-05 14:43:15 +01:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["post"],
|
|
|
|
|
)
|
|
|
|
|
def restore(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Restore a soft-deleted document if it was deleted less than x days ago.
|
|
|
|
|
"""
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
document.restore()
|
|
|
|
|
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
{"detail": "Document has been successfully restored."},
|
|
|
|
|
status=status.HTTP_200_OK,
|
|
|
|
|
)
|
|
|
|
|
|
2024-12-17 19:03:45 +01:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["get", "post"],
|
2025-01-02 17:20:09 +01:00
|
|
|
ordering=["path"],
|
2024-12-17 19:03:45 +01:00
|
|
|
)
|
2024-12-18 08:44:12 +01:00
|
|
|
def children(self, request, *args, **kwargs):
|
|
|
|
|
"""Handle listing and creating children of a document"""
|
2024-12-17 19:03:45 +01:00
|
|
|
document = self.get_object()
|
2024-12-18 08:44:12 +01:00
|
|
|
|
|
|
|
|
if request.method == "POST":
|
|
|
|
|
# Create a child document
|
|
|
|
|
serializer = serializers.DocumentSerializer(
|
|
|
|
|
data=request.data, context=self.get_serializer_context()
|
|
|
|
|
)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
|
|
|
|
with transaction.atomic():
|
2025-02-12 10:13:41 +01:00
|
|
|
# "select_for_update" locks the table to ensure safe concurrent access
|
|
|
|
|
locked_parent = models.Document.objects.select_for_update().get(
|
|
|
|
|
pk=document.pk
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
child_document = locked_parent.add_child(
|
2024-12-18 08:44:12 +01:00
|
|
|
creator=request.user,
|
|
|
|
|
**serializer.validated_data,
|
|
|
|
|
)
|
2025-05-04 22:16:34 +02:00
|
|
|
|
2024-12-18 08:44:12 +01:00
|
|
|
# Set the created instance to the serializer
|
|
|
|
|
serializer.instance = child_document
|
|
|
|
|
|
|
|
|
|
headers = self.get_success_headers(serializer.data)
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
serializer.data, status=status.HTTP_201_CREATED, headers=headers
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# GET: List children
|
2025-08-22 14:57:02 +02:00
|
|
|
queryset = (
|
|
|
|
|
document.get_children()
|
|
|
|
|
.select_related("creator")
|
|
|
|
|
.filter(ancestors_deleted_at__isnull=True)
|
|
|
|
|
)
|
2025-01-02 17:20:09 +01:00
|
|
|
queryset = self.filter_queryset(queryset)
|
2025-02-17 10:19:06 +01:00
|
|
|
|
|
|
|
|
filterset = DocumentFilter(request.GET, queryset=queryset)
|
|
|
|
|
if not filterset.is_valid():
|
|
|
|
|
raise drf.exceptions.ValidationError(filterset.errors)
|
|
|
|
|
|
|
|
|
|
queryset = filterset.qs
|
|
|
|
|
|
2025-04-25 08:03:12 +02:00
|
|
|
# Pass ancestors' links paths mapping to the serializer as a context variable
|
|
|
|
|
# in order to allow saving time while computing abilities on the instance
|
|
|
|
|
paths_links_mapping = document.compute_ancestors_links_paths_mapping()
|
|
|
|
|
|
|
|
|
|
return self.get_response_for_queryset(
|
|
|
|
|
queryset,
|
|
|
|
|
context={
|
|
|
|
|
"request": request,
|
|
|
|
|
"paths_links_mapping": paths_links_mapping,
|
|
|
|
|
},
|
|
|
|
|
)
|
2025-02-17 10:19:06 +01:00
|
|
|
|
|
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["get"],
|
|
|
|
|
ordering=["path"],
|
|
|
|
|
)
|
|
|
|
|
def descendants(self, request, *args, **kwargs):
|
|
|
|
|
"""Handle listing descendants of a document"""
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
|
|
|
|
|
queryset = document.get_descendants().filter(ancestors_deleted_at__isnull=True)
|
|
|
|
|
queryset = self.filter_queryset(queryset)
|
|
|
|
|
|
|
|
|
|
filterset = DocumentFilter(request.GET, queryset=queryset)
|
|
|
|
|
if not filterset.is_valid():
|
|
|
|
|
raise drf.exceptions.ValidationError(filterset.errors)
|
|
|
|
|
|
|
|
|
|
queryset = filterset.qs
|
|
|
|
|
|
2025-01-02 17:20:09 +01:00
|
|
|
return self.get_response_for_queryset(queryset)
|
2024-12-17 19:03:45 +01:00
|
|
|
|
2025-02-16 17:26:51 +01:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["get"],
|
|
|
|
|
ordering=["path"],
|
|
|
|
|
)
|
|
|
|
|
def tree(self, request, pk, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
List ancestors tree above the document.
|
|
|
|
|
What we need to display is the tree structure opened for the current document.
|
|
|
|
|
"""
|
2025-04-12 11:35:36 +02:00
|
|
|
user = self.request.user
|
|
|
|
|
|
2025-02-16 17:26:51 +01:00
|
|
|
try:
|
2025-08-22 14:57:02 +02:00
|
|
|
current_document = (
|
2025-10-06 11:25:17 +02:00
|
|
|
self.queryset.select_related(None)
|
|
|
|
|
.only("depth", "path", "ancestors_deleted_at")
|
|
|
|
|
.get(pk=pk)
|
2025-08-22 14:57:02 +02:00
|
|
|
)
|
2025-02-16 17:26:51 +01:00
|
|
|
except models.Document.DoesNotExist as excpt:
|
2025-05-06 09:41:16 +02:00
|
|
|
raise drf.exceptions.NotFound() from excpt
|
2025-02-16 17:26:51 +01:00
|
|
|
|
2025-10-06 11:25:17 +02:00
|
|
|
is_deleted = current_document.ancestors_deleted_at is not None
|
2025-02-16 17:26:51 +01:00
|
|
|
|
2025-10-06 11:25:17 +02:00
|
|
|
if is_deleted:
|
|
|
|
|
if current_document.get_role(user) != models.RoleChoices.OWNER:
|
|
|
|
|
raise (
|
|
|
|
|
drf.exceptions.PermissionDenied()
|
|
|
|
|
if request.user.is_authenticated
|
|
|
|
|
else drf.exceptions.NotAuthenticated()
|
|
|
|
|
)
|
|
|
|
|
highest_readable = current_document
|
|
|
|
|
ancestors = self.queryset.select_related(None).filter(pk=pk)
|
|
|
|
|
else:
|
|
|
|
|
ancestors = (
|
|
|
|
|
(
|
|
|
|
|
current_document.get_ancestors()
|
|
|
|
|
| self.queryset.select_related(None).filter(pk=pk)
|
|
|
|
|
)
|
|
|
|
|
.filter(ancestors_deleted_at__isnull=True)
|
|
|
|
|
.order_by("path")
|
|
|
|
|
)
|
|
|
|
|
# Get the highest readable ancestor
|
|
|
|
|
highest_readable = (
|
|
|
|
|
ancestors.select_related(None)
|
|
|
|
|
.readable_per_se(request.user)
|
|
|
|
|
.only("depth", "path")
|
|
|
|
|
.first()
|
2025-02-16 17:26:51 +01:00
|
|
|
)
|
2025-10-06 11:25:17 +02:00
|
|
|
|
|
|
|
|
if highest_readable is None:
|
|
|
|
|
raise (
|
|
|
|
|
drf.exceptions.PermissionDenied()
|
|
|
|
|
if request.user.is_authenticated
|
|
|
|
|
else drf.exceptions.NotAuthenticated()
|
|
|
|
|
)
|
2025-02-17 10:25:07 +01:00
|
|
|
paths_links_mapping = {}
|
|
|
|
|
ancestors_links = []
|
2025-02-16 17:26:51 +01:00
|
|
|
children_clause = db.Q()
|
|
|
|
|
for ancestor in ancestors:
|
2025-02-17 10:25:07 +01:00
|
|
|
# Compute cache for ancestors links to avoid many queries while computing
|
2025-05-13 16:00:12 +02:00
|
|
|
# abilities for his documents in the tree!
|
2025-02-17 10:25:07 +01:00
|
|
|
ancestors_links.append(
|
|
|
|
|
{"link_reach": ancestor.link_reach, "link_role": ancestor.link_role}
|
|
|
|
|
)
|
|
|
|
|
paths_links_mapping[ancestor.path] = ancestors_links.copy()
|
|
|
|
|
|
2025-04-25 08:03:12 +02:00
|
|
|
if ancestor.depth < highest_readable.depth:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
children_clause |= db.Q(
|
|
|
|
|
path__startswith=ancestor.path, depth=ancestor.depth + 1
|
|
|
|
|
)
|
|
|
|
|
|
2025-02-16 17:26:51 +01:00
|
|
|
children = self.queryset.filter(children_clause, deleted_at__isnull=True)
|
|
|
|
|
|
2025-08-22 14:57:02 +02:00
|
|
|
queryset = (
|
|
|
|
|
ancestors.select_related("creator").filter(
|
|
|
|
|
depth__gte=highest_readable.depth
|
|
|
|
|
)
|
|
|
|
|
| children
|
|
|
|
|
)
|
2025-02-16 17:26:51 +01:00
|
|
|
queryset = queryset.order_by("path")
|
2025-04-12 11:35:36 +02:00
|
|
|
queryset = queryset.annotate_user_roles(user)
|
|
|
|
|
queryset = queryset.annotate_is_favorite(user)
|
2025-02-16 17:26:51 +01:00
|
|
|
|
2025-04-25 08:03:12 +02:00
|
|
|
# Pass ancestors' links paths mapping to the serializer as a context variable
|
2025-02-16 17:26:51 +01:00
|
|
|
# in order to allow saving time while computing abilities on the instance
|
|
|
|
|
serializer = self.get_serializer(
|
|
|
|
|
queryset,
|
|
|
|
|
many=True,
|
|
|
|
|
context={
|
|
|
|
|
"request": request,
|
2025-02-17 10:25:07 +01:00
|
|
|
"paths_links_mapping": paths_links_mapping,
|
2025-02-16 17:26:51 +01:00
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
utils.nest_tree(serializer.data, self.queryset.model.steplen)
|
|
|
|
|
)
|
|
|
|
|
|
2025-01-20 10:23:18 +01:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["post"],
|
2025-05-06 09:41:16 +02:00
|
|
|
permission_classes=[
|
|
|
|
|
permissions.IsAuthenticated,
|
|
|
|
|
permissions.DocumentPermission,
|
|
|
|
|
],
|
2025-01-20 10:23:18 +01:00
|
|
|
url_path="duplicate",
|
|
|
|
|
)
|
|
|
|
|
@transaction.atomic
|
|
|
|
|
def duplicate(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Duplicate a document and store the links to attached files in the duplicated
|
|
|
|
|
document to allow cross-access.
|
|
|
|
|
|
|
|
|
|
Optionally duplicates accesses if `with_accesses` is set to true
|
|
|
|
|
in the payload.
|
|
|
|
|
"""
|
|
|
|
|
# Get document while checking permissions
|
2025-09-15 22:44:58 +02:00
|
|
|
document_to_duplicate = self.get_object()
|
2025-01-20 10:23:18 +01:00
|
|
|
|
|
|
|
|
serializer = serializers.DocumentDuplicationSerializer(
|
|
|
|
|
data=request.data, partial=True
|
|
|
|
|
)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
with_accesses = serializer.validated_data.get("with_accesses", False)
|
2025-09-15 22:44:58 +02:00
|
|
|
user_role = document_to_duplicate.get_role(request.user)
|
|
|
|
|
is_owner_or_admin = user_role in models.PRIVILEGED_ROLES
|
2025-01-20 10:23:18 +01:00
|
|
|
|
2025-09-15 22:44:58 +02:00
|
|
|
base64_yjs_content = document_to_duplicate.content
|
2025-01-20 10:23:18 +01:00
|
|
|
|
|
|
|
|
# Duplicate the document instance
|
|
|
|
|
link_kwargs = (
|
2025-09-15 22:44:58 +02:00
|
|
|
{
|
|
|
|
|
"link_reach": document_to_duplicate.link_reach,
|
|
|
|
|
"link_role": document_to_duplicate.link_role,
|
|
|
|
|
}
|
2025-01-20 10:23:18 +01:00
|
|
|
if with_accesses
|
|
|
|
|
else {}
|
|
|
|
|
)
|
2025-09-15 22:44:58 +02:00
|
|
|
extracted_attachments = set(extract_attachments(document_to_duplicate.content))
|
|
|
|
|
attachments = list(
|
|
|
|
|
extracted_attachments & set(document_to_duplicate.attachments)
|
|
|
|
|
)
|
|
|
|
|
title = capfirst(_("copy of {title}").format(title=document_to_duplicate.title))
|
|
|
|
|
if not document_to_duplicate.is_root() and choices.RoleChoices.get_priority(
|
|
|
|
|
user_role
|
|
|
|
|
) < choices.RoleChoices.get_priority(models.RoleChoices.EDITOR):
|
|
|
|
|
duplicated_document = models.Document.add_root(
|
|
|
|
|
creator=self.request.user,
|
|
|
|
|
title=title,
|
|
|
|
|
content=base64_yjs_content,
|
|
|
|
|
attachments=attachments,
|
|
|
|
|
duplicated_from=document_to_duplicate,
|
|
|
|
|
**link_kwargs,
|
|
|
|
|
)
|
|
|
|
|
models.DocumentAccess.objects.create(
|
|
|
|
|
document=duplicated_document,
|
|
|
|
|
user=self.request.user,
|
|
|
|
|
role=models.RoleChoices.OWNER,
|
|
|
|
|
)
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
{"id": str(duplicated_document.id)}, status=status.HTTP_201_CREATED
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
duplicated_document = document_to_duplicate.add_sibling(
|
2025-01-20 10:23:18 +01:00
|
|
|
"right",
|
2025-09-15 22:44:58 +02:00
|
|
|
title=title,
|
2025-01-20 10:23:18 +01:00
|
|
|
content=base64_yjs_content,
|
|
|
|
|
attachments=attachments,
|
2025-09-15 22:44:58 +02:00
|
|
|
duplicated_from=document_to_duplicate,
|
2025-01-20 10:23:18 +01:00
|
|
|
creator=request.user,
|
|
|
|
|
**link_kwargs,
|
|
|
|
|
)
|
|
|
|
|
|
2025-07-09 15:00:29 +02:00
|
|
|
# Always add the logged-in user as OWNER for root documents
|
2025-09-15 22:44:58 +02:00
|
|
|
if document_to_duplicate.is_root():
|
2025-07-09 15:00:29 +02:00
|
|
|
accesses_to_create = [
|
2025-01-20 10:23:18 +01:00
|
|
|
models.DocumentAccess(
|
|
|
|
|
document=duplicated_document,
|
2025-07-09 15:00:29 +02:00
|
|
|
user=request.user,
|
|
|
|
|
role=models.RoleChoices.OWNER,
|
|
|
|
|
)
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
# If accesses should be duplicated, add other users' accesses as per original document
|
|
|
|
|
if with_accesses and is_owner_or_admin:
|
|
|
|
|
original_accesses = models.DocumentAccess.objects.filter(
|
2025-09-15 22:44:58 +02:00
|
|
|
document=document_to_duplicate
|
2025-07-09 15:00:29 +02:00
|
|
|
).exclude(user=request.user)
|
|
|
|
|
|
|
|
|
|
accesses_to_create.extend(
|
|
|
|
|
models.DocumentAccess(
|
|
|
|
|
document=duplicated_document,
|
|
|
|
|
user_id=access.user_id,
|
|
|
|
|
team=access.team,
|
|
|
|
|
role=access.role,
|
|
|
|
|
)
|
|
|
|
|
for access in original_accesses
|
2025-01-20 10:23:18 +01:00
|
|
|
)
|
|
|
|
|
|
2025-07-09 15:00:29 +02:00
|
|
|
# Bulk create all the duplicated accesses
|
|
|
|
|
models.DocumentAccess.objects.bulk_create(accesses_to_create)
|
2025-01-20 10:23:18 +01:00
|
|
|
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
{"id": str(duplicated_document.id)}, status=status.HTTP_201_CREATED
|
|
|
|
|
)
|
|
|
|
|
|
2025-08-13 06:50:58 +02:00
|
|
|
@drf.decorators.action(detail=False, methods=["get"], url_path="search")
|
|
|
|
|
@method_decorator(refresh_oidc_access_token)
|
|
|
|
|
def search(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Returns a DRF response containing the filtered, annotated and ordered document list.
|
|
|
|
|
The filtering allows full text search through the opensearch indexation app "find".
|
|
|
|
|
"""
|
|
|
|
|
access_token = request.session.get("oidc_access_token")
|
|
|
|
|
|
|
|
|
|
serializer = serializers.FindDocumentSerializer(data=request.query_params)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
|
|
|
|
try:
|
2025-09-11 14:05:56 +02:00
|
|
|
indexer = get_document_indexer_class()()
|
2025-08-13 06:50:58 +02:00
|
|
|
queryset = indexer.search(
|
|
|
|
|
text=serializer.validated_data.get("q", ""),
|
|
|
|
|
user=request.user,
|
|
|
|
|
token=access_token,
|
|
|
|
|
)
|
2025-09-11 14:05:56 +02:00
|
|
|
except ImproperlyConfigured:
|
2025-08-13 06:50:58 +02:00
|
|
|
return drf.response.Response(
|
2025-09-11 14:05:56 +02:00
|
|
|
{"detail": "The service is not properly configured."},
|
2025-08-13 06:50:58 +02:00
|
|
|
status=status.HTTP_401_UNAUTHORIZED,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return self.get_response_for_queryset(
|
|
|
|
|
queryset,
|
|
|
|
|
context={
|
|
|
|
|
"request": request,
|
|
|
|
|
},
|
|
|
|
|
)
|
2025-08-06 17:35:38 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(detail=True, methods=["get"], url_path="versions")
|
2024-04-08 23:37:15 +02:00
|
|
|
def versions_list(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Return the document's versions but only those created after the user got access
|
|
|
|
|
to the document
|
|
|
|
|
"""
|
2024-09-16 19:27:48 +02:00
|
|
|
user = request.user
|
|
|
|
|
if not user.is_authenticated:
|
2024-11-18 07:59:55 +01:00
|
|
|
raise drf.exceptions.PermissionDenied("Authentication required.")
|
2024-09-08 23:37:49 +02:00
|
|
|
|
2024-09-16 19:27:48 +02:00
|
|
|
# Validate query parameters using dedicated serializer
|
|
|
|
|
serializer = serializers.VersionFilterSerializer(data=request.query_params)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
2024-04-08 23:37:15 +02:00
|
|
|
document = self.get_object()
|
2024-09-16 19:27:48 +02:00
|
|
|
|
|
|
|
|
# Users should not see version history dating from before they gained access to the
|
|
|
|
|
# document. Filter to get the minimum access date for the logged-in user
|
2024-12-17 07:47:23 +01:00
|
|
|
access_queryset = models.DocumentAccess.objects.filter(
|
|
|
|
|
db.Q(user=user) | db.Q(team__in=user.teams),
|
2025-01-02 17:20:09 +01:00
|
|
|
document__path=Left(db.Value(document.path), Length("document__path")),
|
2024-11-18 07:59:55 +01:00
|
|
|
).aggregate(min_date=db.Min("created_at"))
|
2024-09-16 19:27:48 +02:00
|
|
|
|
|
|
|
|
# Handle the case where the user has no accesses
|
|
|
|
|
min_datetime = access_queryset["min_date"]
|
|
|
|
|
if not min_datetime:
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.exceptions.PermissionDenied(
|
2024-09-16 19:27:48 +02:00
|
|
|
"Only users with specific access can see version history"
|
2024-04-08 23:37:15 +02:00
|
|
|
)
|
2024-07-17 09:10:05 +02:00
|
|
|
|
2024-09-16 19:27:48 +02:00
|
|
|
versions_data = document.get_versions_slice(
|
|
|
|
|
from_version_id=serializer.validated_data.get("version_id"),
|
|
|
|
|
min_datetime=min_datetime,
|
|
|
|
|
page_size=serializer.validated_data.get("page_size"),
|
2024-04-08 23:37:15 +02:00
|
|
|
)
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(versions_data)
|
2024-07-17 09:10:05 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(
|
2024-04-08 23:37:15 +02:00
|
|
|
detail=True,
|
|
|
|
|
methods=["get", "delete"],
|
2025-10-30 15:29:11 +01:00
|
|
|
url_path=r"versions/(?P<version_id>[A-Za-z0-9._+\-=~]{1,1024})",
|
2024-04-08 23:37:15 +02:00
|
|
|
)
|
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
|
def versions_detail(self, request, pk, version_id, *args, **kwargs):
|
|
|
|
|
"""Custom action to retrieve a specific version of a document"""
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
response = document.get_content_response(version_id=version_id)
|
|
|
|
|
except (FileNotFoundError, ClientError) as err:
|
|
|
|
|
raise Http404 from err
|
|
|
|
|
|
|
|
|
|
# Don't let users access versions that were created before they were given access
|
|
|
|
|
# to the document
|
2024-09-06 16:12:02 +02:00
|
|
|
user = request.user
|
2024-09-16 19:27:48 +02:00
|
|
|
min_datetime = min(
|
2024-04-08 23:37:15 +02:00
|
|
|
access.created_at
|
2024-12-17 07:47:23 +01:00
|
|
|
for access in models.DocumentAccess.objects.filter(
|
2024-11-18 07:59:55 +01:00
|
|
|
db.Q(user=user) | db.Q(team__in=user.teams),
|
2025-01-02 17:20:09 +01:00
|
|
|
document__path=Left(db.Value(document.path), Length("document__path")),
|
2024-04-08 23:37:15 +02:00
|
|
|
)
|
|
|
|
|
)
|
2024-12-17 07:47:23 +01:00
|
|
|
|
2024-09-16 19:27:48 +02:00
|
|
|
if response["LastModified"] < min_datetime:
|
2024-04-08 23:37:15 +02:00
|
|
|
raise Http404
|
|
|
|
|
|
|
|
|
|
if request.method == "DELETE":
|
|
|
|
|
response = document.delete_version(version_id)
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(
|
2024-04-08 23:37:15 +02:00
|
|
|
status=response["ResponseMetadata"]["HTTPStatusCode"]
|
|
|
|
|
)
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(
|
2024-04-08 23:37:15 +02:00
|
|
|
{
|
2024-05-21 14:46:23 +02:00
|
|
|
"content": response["Body"].read().decode("utf-8"),
|
2024-04-08 23:37:15 +02:00
|
|
|
"last_modified": response["LastModified"],
|
2024-07-17 09:10:05 +02:00
|
|
|
"id": version_id,
|
2024-04-08 23:37:15 +02:00
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(detail=True, methods=["put"], url_path="link-configuration")
|
2024-09-08 23:07:47 +02:00
|
|
|
def link_configuration(self, request, *args, **kwargs):
|
|
|
|
|
"""Update link configuration with specific rights (cf get_abilities)."""
|
|
|
|
|
# Check permissions first
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
|
|
|
|
|
# Deserialize and validate the data
|
|
|
|
|
serializer = serializers.LinkDocumentSerializer(
|
|
|
|
|
document, data=request.data, partial=True
|
|
|
|
|
)
|
2024-09-20 22:42:46 +02:00
|
|
|
serializer.is_valid(raise_exception=True)
|
2024-09-08 23:07:47 +02:00
|
|
|
|
|
|
|
|
serializer.save()
|
2024-11-28 16:35:48 +01:00
|
|
|
|
|
|
|
|
# Notify collaboration server about the link updated
|
|
|
|
|
CollaborationService().reset_connections(str(document.id))
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(serializer.data, status=drf.status.HTTP_200_OK)
|
2024-09-08 23:07:47 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(detail=True, methods=["post", "delete"], url_path="favorite")
|
2024-11-09 10:45:38 +01:00
|
|
|
def favorite(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Mark or unmark the document as a favorite for the logged-in user based on the HTTP method.
|
|
|
|
|
"""
|
|
|
|
|
# Check permissions first
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
user = request.user
|
|
|
|
|
|
|
|
|
|
if request.method == "POST":
|
|
|
|
|
# Try to mark as favorite
|
|
|
|
|
try:
|
|
|
|
|
models.DocumentFavorite.objects.create(document=document, user=user)
|
|
|
|
|
except ValidationError:
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(
|
2024-11-09 10:45:38 +01:00
|
|
|
{"detail": "Document already marked as favorite"},
|
2024-11-18 07:59:55 +01:00
|
|
|
status=drf.status.HTTP_200_OK,
|
2024-11-09 10:45:38 +01:00
|
|
|
)
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(
|
2024-11-09 10:45:38 +01:00
|
|
|
{"detail": "Document marked as favorite"},
|
2024-11-18 07:59:55 +01:00
|
|
|
status=drf.status.HTTP_201_CREATED,
|
2024-11-09 10:45:38 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Handle DELETE method to unmark as favorite
|
|
|
|
|
deleted, _ = models.DocumentFavorite.objects.filter(
|
|
|
|
|
document=document, user=user
|
|
|
|
|
).delete()
|
|
|
|
|
if deleted:
|
2025-07-13 19:56:07 +02:00
|
|
|
return drf.response.Response(status=drf.status.HTTP_204_NO_CONTENT)
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(
|
2024-11-09 10:45:38 +01:00
|
|
|
{"detail": "Document was already not marked as favorite"},
|
2024-11-18 07:59:55 +01:00
|
|
|
status=drf.status.HTTP_200_OK,
|
2024-11-09 10:45:38 +01:00
|
|
|
)
|
|
|
|
|
|
2025-07-13 19:56:07 +02:00
|
|
|
@drf.decorators.action(detail=True, methods=["post", "delete"], url_path="mask")
|
|
|
|
|
def mask(self, request, *args, **kwargs):
|
|
|
|
|
"""Mask or unmask the document for the logged-in user based on the HTTP method."""
|
|
|
|
|
# Check permissions first
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
user = request.user
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
link_trace = models.LinkTrace.objects.get(document=document, user=user)
|
|
|
|
|
except models.LinkTrace.DoesNotExist:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "User never accessed this document before."},
|
|
|
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if request.method == "POST":
|
|
|
|
|
if link_trace.is_masked:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "Document was already masked"},
|
|
|
|
|
status=drf.status.HTTP_200_OK,
|
|
|
|
|
)
|
|
|
|
|
link_trace.is_masked = True
|
|
|
|
|
link_trace.save(update_fields=["is_masked"])
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "Document was masked"},
|
|
|
|
|
status=drf.status.HTTP_201_CREATED,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Handle DELETE method to unmask document
|
|
|
|
|
if not link_trace.is_masked:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "Document was already not masked"},
|
|
|
|
|
status=drf.status.HTTP_200_OK,
|
|
|
|
|
)
|
|
|
|
|
link_trace.is_masked = False
|
|
|
|
|
link_trace.save(update_fields=["is_masked"])
|
|
|
|
|
return drf.response.Response(status=drf.status.HTTP_204_NO_CONTENT)
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(detail=True, methods=["post"], url_path="attachment-upload")
|
2024-08-19 22:35:48 +02:00
|
|
|
def attachment_upload(self, request, *args, **kwargs):
|
|
|
|
|
"""Upload a file related to a given document"""
|
|
|
|
|
# Check permissions first
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
|
|
|
|
|
# Validate metadata in payload
|
|
|
|
|
serializer = serializers.FileUploadSerializer(data=request.data)
|
2024-09-20 22:42:46 +02:00
|
|
|
serializer.is_valid(raise_exception=True)
|
2024-08-19 22:35:48 +02:00
|
|
|
|
|
|
|
|
# Generate a generic yet unique filename to store the image in object storage
|
|
|
|
|
file_id = uuid.uuid4()
|
2025-01-20 10:23:18 +01:00
|
|
|
ext = serializer.validated_data["expected_extension"]
|
2024-10-07 20:10:42 +02:00
|
|
|
|
|
|
|
|
# Prepare metadata for storage
|
2025-01-15 15:58:46 +01:00
|
|
|
extra_args = {
|
2025-05-05 16:01:12 +02:00
|
|
|
"Metadata": {
|
|
|
|
|
"owner": str(request.user.id),
|
|
|
|
|
"status": enums.DocumentAttachmentStatus.PROCESSING,
|
|
|
|
|
},
|
2025-01-15 15:58:46 +01:00
|
|
|
"ContentType": serializer.validated_data["content_type"],
|
|
|
|
|
}
|
2025-02-26 18:21:38 +01:00
|
|
|
file_unsafe = ""
|
2024-10-07 20:10:42 +02:00
|
|
|
if serializer.validated_data["is_unsafe"]:
|
2024-09-20 22:42:46 +02:00
|
|
|
extra_args["Metadata"]["is_unsafe"] = "true"
|
2025-02-26 18:21:38 +01:00
|
|
|
file_unsafe = "-unsafe"
|
|
|
|
|
|
2025-01-20 10:23:18 +01:00
|
|
|
key = f"{document.key_base}/{enums.ATTACHMENTS_FOLDER:s}/{file_id!s}{file_unsafe}.{ext:s}"
|
2024-10-07 20:10:42 +02:00
|
|
|
|
2025-02-27 16:19:17 +01:00
|
|
|
file_name = serializer.validated_data["file_name"]
|
|
|
|
|
if (
|
|
|
|
|
not serializer.validated_data["content_type"].startswith("image/")
|
|
|
|
|
or serializer.validated_data["is_unsafe"]
|
|
|
|
|
):
|
|
|
|
|
extra_args.update(
|
|
|
|
|
{"ContentDisposition": f'attachment; filename="{file_name:s}"'}
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
extra_args.update(
|
|
|
|
|
{"ContentDisposition": f'inline; filename="{file_name:s}"'}
|
|
|
|
|
)
|
|
|
|
|
|
2024-10-07 20:10:42 +02:00
|
|
|
file = serializer.validated_data["file"]
|
|
|
|
|
default_storage.connection.meta.client.upload_fileobj(
|
2024-09-20 22:42:46 +02:00
|
|
|
file, default_storage.bucket_name, key, ExtraArgs=extra_args
|
2024-10-07 20:10:42 +02:00
|
|
|
)
|
2024-08-19 22:35:48 +02:00
|
|
|
|
2025-01-20 10:23:18 +01:00
|
|
|
# Make the attachment readable by document readers
|
|
|
|
|
document.attachments.append(key)
|
|
|
|
|
document.save()
|
|
|
|
|
|
2025-05-05 16:01:12 +02:00
|
|
|
malware_detection.analyse_file(key, document_id=document.id)
|
|
|
|
|
|
2025-05-21 15:09:40 +02:00
|
|
|
url = reverse(
|
|
|
|
|
"documents-media-check",
|
|
|
|
|
kwargs={"pk": document.id},
|
|
|
|
|
)
|
|
|
|
|
parameters = urlencode({"key": key})
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(
|
2025-05-21 15:09:40 +02:00
|
|
|
{
|
|
|
|
|
"file": f"{url:s}?{parameters:s}",
|
|
|
|
|
},
|
2024-11-18 07:59:55 +01:00
|
|
|
status=drf.status.HTTP_201_CREATED,
|
2024-08-19 22:35:48 +02:00
|
|
|
)
|
|
|
|
|
|
2024-12-27 17:19:16 +01:00
|
|
|
def _auth_get_original_url(self, request):
|
2024-08-19 22:38:41 +02:00
|
|
|
"""
|
2024-12-27 17:19:16 +01:00
|
|
|
Extracts and parses the original URL from the "HTTP_X_ORIGINAL_URL" header.
|
|
|
|
|
Raises PermissionDenied if the header is missing.
|
2024-08-19 22:38:41 +02:00
|
|
|
|
|
|
|
|
The original url is passed by nginx in the "HTTP_X_ORIGINAL_URL" header.
|
|
|
|
|
See corresponding ingress configuration in Helm chart and read about the
|
|
|
|
|
nginx.ingress.kubernetes.io/auth-url annotation to understand how the Nginx ingress
|
|
|
|
|
is configured to do this.
|
|
|
|
|
|
|
|
|
|
Based on the original url and the logged in user, we must decide if we authorize Nginx
|
|
|
|
|
to let this request go through (by returning a 200 code) or if we block it (by returning
|
|
|
|
|
a 403 error). Note that we return 403 errors without any further details for security
|
|
|
|
|
reasons.
|
|
|
|
|
"""
|
2024-11-18 07:59:55 +01:00
|
|
|
# Extract the original URL from the request header
|
|
|
|
|
original_url = request.META.get("HTTP_X_ORIGINAL_URL")
|
|
|
|
|
if not original_url:
|
|
|
|
|
logger.debug("Missing HTTP_X_ORIGINAL_URL header in subrequest")
|
|
|
|
|
raise drf.exceptions.PermissionDenied()
|
|
|
|
|
|
2024-12-27 17:19:16 +01:00
|
|
|
logger.debug("Original url: '%s'", original_url)
|
|
|
|
|
return urlparse(original_url)
|
2024-08-19 22:38:41 +02:00
|
|
|
|
2024-12-27 17:19:16 +01:00
|
|
|
def _auth_get_url_params(self, pattern, fragment):
|
|
|
|
|
"""
|
|
|
|
|
Extracts URL parameters from the given fragment using the specified regex pattern.
|
|
|
|
|
Raises PermissionDenied if parameters cannot be extracted.
|
|
|
|
|
"""
|
|
|
|
|
match = pattern.search(fragment)
|
2024-08-19 22:38:41 +02:00
|
|
|
try:
|
2024-12-27 17:19:16 +01:00
|
|
|
return match.groupdict()
|
2024-11-18 07:59:55 +01:00
|
|
|
except (ValueError, AttributeError) as exc:
|
|
|
|
|
logger.debug("Failed to extract parameters from subrequest URL: %s", exc)
|
|
|
|
|
raise drf.exceptions.PermissionDenied() from exc
|
|
|
|
|
|
|
|
|
|
@drf.decorators.action(detail=False, methods=["get"], url_path="media-auth")
|
|
|
|
|
def media_auth(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
This view is used by an Nginx subrequest to control access to a document's
|
|
|
|
|
attachment file.
|
|
|
|
|
|
|
|
|
|
When we let the request go through, we compute authorization headers that will be added to
|
|
|
|
|
the request going through thanks to the nginx.ingress.kubernetes.io/auth-response-headers
|
|
|
|
|
annotation. The request will then be proxied to the object storage backend who will
|
|
|
|
|
respond with the file after checking the signature included in headers.
|
|
|
|
|
"""
|
2024-12-27 17:19:16 +01:00
|
|
|
parsed_url = self._auth_get_original_url(request)
|
|
|
|
|
url_params = self._auth_get_url_params(
|
|
|
|
|
enums.MEDIA_STORAGE_URL_PATTERN, parsed_url.path
|
2024-11-18 08:05:54 +01:00
|
|
|
)
|
2024-12-27 17:19:16 +01:00
|
|
|
|
2025-01-20 10:23:18 +01:00
|
|
|
user = request.user
|
|
|
|
|
key = f"{url_params['pk']:s}/{url_params['attachment']:s}"
|
|
|
|
|
|
|
|
|
|
# Look for a document to which the user has access and that includes this attachment
|
|
|
|
|
# We must look into all descendants of any document to which the user has access per se
|
|
|
|
|
readable_per_se_paths = (
|
|
|
|
|
self.queryset.readable_per_se(user)
|
|
|
|
|
.order_by("path")
|
|
|
|
|
.values_list("path", flat=True)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
attachments_documents = (
|
2025-08-22 14:57:02 +02:00
|
|
|
self.queryset.select_related(None)
|
|
|
|
|
.filter(attachments__contains=[key])
|
2025-01-20 10:23:18 +01:00
|
|
|
.only("path")
|
|
|
|
|
.order_by("path")
|
|
|
|
|
)
|
|
|
|
|
readable_attachments_paths = filter_descendants(
|
|
|
|
|
[doc.path for doc in attachments_documents],
|
|
|
|
|
readable_per_se_paths,
|
|
|
|
|
skip_sorting=True,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not readable_attachments_paths:
|
|
|
|
|
logger.debug("User '%s' lacks permission for attachment", user)
|
2024-12-27 17:19:16 +01:00
|
|
|
raise drf.exceptions.PermissionDenied()
|
2024-08-19 22:38:41 +02:00
|
|
|
|
2025-05-05 16:01:12 +02:00
|
|
|
# Check if the attachment is ready
|
|
|
|
|
s3_client = default_storage.connection.meta.client
|
|
|
|
|
bucket_name = default_storage.bucket_name
|
2025-05-21 14:22:31 +02:00
|
|
|
try:
|
|
|
|
|
head_resp = s3_client.head_object(Bucket=bucket_name, Key=key)
|
|
|
|
|
except ClientError as err:
|
|
|
|
|
raise drf.exceptions.PermissionDenied() from err
|
2025-05-05 16:01:12 +02:00
|
|
|
metadata = head_resp.get("Metadata", {})
|
|
|
|
|
# In order to be compatible with existing upload without `status` metadata,
|
|
|
|
|
# we consider them as ready.
|
|
|
|
|
if (
|
|
|
|
|
metadata.get("status", enums.DocumentAttachmentStatus.READY)
|
|
|
|
|
!= enums.DocumentAttachmentStatus.READY
|
|
|
|
|
):
|
|
|
|
|
raise drf.exceptions.PermissionDenied()
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
# Generate S3 authorization headers using the extracted URL parameters
|
2025-01-20 10:23:18 +01:00
|
|
|
request = utils.generate_s3_authorization_headers(key)
|
2024-08-19 22:38:41 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response("authorized", headers=request.headers, status=200)
|
2024-08-19 22:38:41 +02:00
|
|
|
|
2025-05-21 14:22:31 +02:00
|
|
|
@drf.decorators.action(detail=True, methods=["get"], url_path="media-check")
|
|
|
|
|
def media_check(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Check if the media is ready to be served.
|
|
|
|
|
"""
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
|
|
|
|
|
key = request.query_params.get("key")
|
|
|
|
|
if not key:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "Missing 'key' query parameter"},
|
|
|
|
|
status=drf.status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if key not in document.attachments:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "Attachment missing"},
|
|
|
|
|
status=drf.status.HTTP_404_NOT_FOUND,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Check if the attachment is ready
|
|
|
|
|
s3_client = default_storage.connection.meta.client
|
|
|
|
|
bucket_name = default_storage.bucket_name
|
|
|
|
|
try:
|
|
|
|
|
head_resp = s3_client.head_object(Bucket=bucket_name, Key=key)
|
|
|
|
|
except ClientError as err:
|
|
|
|
|
logger.error("Client Error fetching file %s metadata: %s", key, err)
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "Media not found"},
|
|
|
|
|
status=drf.status.HTTP_404_NOT_FOUND,
|
|
|
|
|
)
|
|
|
|
|
metadata = head_resp.get("Metadata", {})
|
|
|
|
|
|
|
|
|
|
body = {
|
|
|
|
|
"status": metadata.get("status", enums.DocumentAttachmentStatus.PROCESSING),
|
|
|
|
|
}
|
|
|
|
|
if metadata.get("status") == enums.DocumentAttachmentStatus.READY:
|
|
|
|
|
body = {
|
|
|
|
|
"status": enums.DocumentAttachmentStatus.READY,
|
|
|
|
|
"file": f"{settings.MEDIA_URL:s}{key:s}",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return drf.response.Response(body, status=drf.status.HTTP_200_OK)
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(
|
2024-09-20 22:42:46 +02:00
|
|
|
detail=True,
|
|
|
|
|
methods=["post"],
|
|
|
|
|
name="Apply a transformation action on a piece of text with AI",
|
|
|
|
|
url_path="ai-transform",
|
|
|
|
|
throttle_classes=[utils.AIDocumentRateThrottle, utils.AIUserRateThrottle],
|
|
|
|
|
)
|
|
|
|
|
def ai_transform(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
POST /api/v1.0/documents/<resource_id>/ai-transform
|
|
|
|
|
with expected data:
|
|
|
|
|
- text: str
|
|
|
|
|
- action: str [prompt, correct, rephrase, summarize]
|
|
|
|
|
Return JSON response with the processed text.
|
|
|
|
|
"""
|
|
|
|
|
# Check permissions first
|
|
|
|
|
self.get_object()
|
|
|
|
|
|
|
|
|
|
serializer = serializers.AITransformSerializer(data=request.data)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
|
|
|
|
text = serializer.validated_data["text"]
|
|
|
|
|
action = serializer.validated_data["action"]
|
|
|
|
|
|
|
|
|
|
response = AIService().transform(text, action)
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(response, status=drf.status.HTTP_200_OK)
|
2024-09-20 22:42:46 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
@drf.decorators.action(
|
2024-09-20 22:42:46 +02:00
|
|
|
detail=True,
|
|
|
|
|
methods=["post"],
|
|
|
|
|
name="Translate a piece of text with AI",
|
|
|
|
|
url_path="ai-translate",
|
|
|
|
|
throttle_classes=[utils.AIDocumentRateThrottle, utils.AIUserRateThrottle],
|
|
|
|
|
)
|
|
|
|
|
def ai_translate(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
POST /api/v1.0/documents/<resource_id>/ai-translate
|
|
|
|
|
with expected data:
|
|
|
|
|
- text: str
|
|
|
|
|
- language: str [settings.LANGUAGES]
|
|
|
|
|
Return JSON response with the translated text.
|
|
|
|
|
"""
|
|
|
|
|
# Check permissions first
|
|
|
|
|
self.get_object()
|
|
|
|
|
|
|
|
|
|
serializer = self.get_serializer(data=request.data)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
|
|
|
|
text = serializer.validated_data["text"]
|
|
|
|
|
language = serializer.validated_data["language"]
|
|
|
|
|
|
|
|
|
|
response = AIService().translate(text, language)
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(response, status=drf.status.HTTP_200_OK)
|
2024-09-20 22:42:46 +02:00
|
|
|
|
2025-03-10 10:11:38 +01:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["get"],
|
|
|
|
|
name="",
|
|
|
|
|
url_path="cors-proxy",
|
|
|
|
|
)
|
2025-06-30 10:42:48 +02:00
|
|
|
@csp_update({"img-src": [NONE, "data:"]})
|
2025-03-10 10:11:38 +01:00
|
|
|
def cors_proxy(self, request, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
GET /api/v1.0/documents/<resource_id>/cors-proxy
|
|
|
|
|
Act like a proxy to fetch external resources and bypass CORS restrictions.
|
|
|
|
|
"""
|
|
|
|
|
url = request.query_params.get("url")
|
|
|
|
|
if not url:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "Missing 'url' query parameter"},
|
|
|
|
|
status=drf.status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Check for permissions.
|
|
|
|
|
self.get_object()
|
|
|
|
|
|
|
|
|
|
url = unquote(url)
|
|
|
|
|
|
2025-08-25 16:15:16 +02:00
|
|
|
url_validator = URLValidator(schemes=["http", "https"])
|
|
|
|
|
try:
|
|
|
|
|
url_validator(url)
|
|
|
|
|
except drf.exceptions.ValidationError as e:
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": str(e)},
|
|
|
|
|
status=drf.status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
2025-03-10 10:11:38 +01:00
|
|
|
try:
|
|
|
|
|
response = requests.get(
|
|
|
|
|
url,
|
|
|
|
|
stream=True,
|
|
|
|
|
headers={
|
|
|
|
|
"User-Agent": request.headers.get("User-Agent", ""),
|
|
|
|
|
"Accept": request.headers.get("Accept", ""),
|
|
|
|
|
},
|
|
|
|
|
timeout=10,
|
|
|
|
|
)
|
2025-03-20 11:04:02 +01:00
|
|
|
content_type = response.headers.get("Content-Type", "")
|
|
|
|
|
|
|
|
|
|
if not content_type.startswith("image/"):
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
status=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE
|
|
|
|
|
)
|
2025-03-10 10:11:38 +01:00
|
|
|
|
|
|
|
|
# Use StreamingHttpResponse with the response's iter_content to properly stream the data
|
|
|
|
|
proxy_response = StreamingHttpResponse(
|
|
|
|
|
streaming_content=response.iter_content(chunk_size=8192),
|
2025-03-20 11:04:02 +01:00
|
|
|
content_type=content_type,
|
|
|
|
|
headers={
|
|
|
|
|
"Content-Disposition": "attachment;",
|
|
|
|
|
},
|
2025-03-10 10:11:38 +01:00
|
|
|
status=response.status_code,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return proxy_response
|
|
|
|
|
|
|
|
|
|
except requests.RequestException as e:
|
2025-08-25 17:24:57 +02:00
|
|
|
logger.exception(e)
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"error": f"Failed to fetch resource from {url}"},
|
|
|
|
|
status=status.HTTP_400_BAD_REQUEST,
|
2025-03-10 10:11:38 +01:00
|
|
|
)
|
|
|
|
|
|
2025-07-24 02:31:50 +02:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["get"],
|
|
|
|
|
url_path="content",
|
|
|
|
|
name="Get document content in different formats",
|
|
|
|
|
)
|
|
|
|
|
def content(self, request, pk=None):
|
|
|
|
|
"""
|
|
|
|
|
Retrieve document content in different formats (JSON, Markdown, HTML).
|
|
|
|
|
|
|
|
|
|
Query parameters:
|
|
|
|
|
- content_format: The desired output format (json, markdown, html)
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
JSON response with content in the specified format.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
document = self.get_object()
|
|
|
|
|
|
|
|
|
|
content_format = request.query_params.get("content_format", "json").lower()
|
|
|
|
|
if content_format not in {"json", "markdown", "html"}:
|
|
|
|
|
raise drf.exceptions.ValidationError(
|
|
|
|
|
"Invalid format. Must be one of: json, markdown, html"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Get the base64 content from the document
|
|
|
|
|
content = None
|
|
|
|
|
base64_content = document.content
|
|
|
|
|
if base64_content is not None:
|
|
|
|
|
# Convert using the y-provider service
|
|
|
|
|
try:
|
2025-07-24 14:08:18 +02:00
|
|
|
yprovider = YdocConverter()
|
|
|
|
|
result = yprovider.convert(
|
|
|
|
|
base64.b64decode(base64_content),
|
|
|
|
|
"application/vnd.yjs.doc",
|
|
|
|
|
{
|
|
|
|
|
"markdown": "text/markdown",
|
|
|
|
|
"html": "text/html",
|
|
|
|
|
"json": "application/json",
|
|
|
|
|
}[content_format],
|
|
|
|
|
)
|
|
|
|
|
content = result
|
2025-07-24 02:31:50 +02:00
|
|
|
except YProviderValidationError as e:
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
{"error": str(e)}, status=status.HTTP_400_BAD_REQUEST
|
|
|
|
|
)
|
|
|
|
|
except YProviderServiceUnavailableError as e:
|
|
|
|
|
logger.error("Error getting content for document %s: %s", pk, e)
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
{"error": "Failed to get document content"},
|
|
|
|
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return drf_response.Response(
|
|
|
|
|
{
|
|
|
|
|
"id": str(document.id),
|
|
|
|
|
"title": document.title,
|
|
|
|
|
"content": content,
|
|
|
|
|
"created_at": document.created_at,
|
|
|
|
|
"updated_at": document.updated_at,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2024-04-03 18:50:28 +02:00
|
|
|
|
|
|
|
|
class DocumentAccessViewSet(
|
|
|
|
|
ResourceAccessViewsetMixin,
|
2025-04-12 09:11:33 +02:00
|
|
|
drf.mixins.CreateModelMixin,
|
|
|
|
|
drf.mixins.RetrieveModelMixin,
|
|
|
|
|
drf.mixins.UpdateModelMixin,
|
|
|
|
|
drf.mixins.DestroyModelMixin,
|
|
|
|
|
viewsets.GenericViewSet,
|
2024-04-03 18:50:28 +02:00
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
API ViewSet for all interactions with document accesses.
|
|
|
|
|
|
|
|
|
|
GET /api/v1.0/documents/<resource_id>/accesses/:<document_access_id>
|
|
|
|
|
Return list of all document accesses related to the logged-in user or one
|
|
|
|
|
document access if an id is provided.
|
|
|
|
|
|
|
|
|
|
POST /api/v1.0/documents/<resource_id>/accesses/ with expected data:
|
|
|
|
|
- user: str
|
2024-05-25 08:15:34 +02:00
|
|
|
- role: str [administrator|editor|reader]
|
2024-04-03 18:50:28 +02:00
|
|
|
Return newly created document access
|
|
|
|
|
|
|
|
|
|
PUT /api/v1.0/documents/<resource_id>/accesses/<document_access_id>/ with expected data:
|
2024-05-25 08:15:34 +02:00
|
|
|
- role: str [owner|admin|editor|reader]
|
2024-04-03 18:50:28 +02:00
|
|
|
Return updated document access
|
|
|
|
|
|
|
|
|
|
PATCH /api/v1.0/documents/<resource_id>/accesses/<document_access_id>/ with expected data:
|
2024-05-25 08:15:34 +02:00
|
|
|
- role: str [owner|admin|editor|reader]
|
2024-04-03 18:50:28 +02:00
|
|
|
Return partially updated document access
|
|
|
|
|
|
|
|
|
|
DELETE /api/v1.0/documents/<resource_id>/accesses/<document_access_id>/
|
|
|
|
|
Delete targeted document access
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
lookup_field = "pk"
|
2025-05-06 09:41:16 +02:00
|
|
|
permission_classes = [permissions.ResourceAccessPermission]
|
|
|
|
|
queryset = models.DocumentAccess.objects.select_related("user", "document").only(
|
|
|
|
|
"id",
|
|
|
|
|
"created_at",
|
|
|
|
|
"role",
|
|
|
|
|
"team",
|
|
|
|
|
"user__id",
|
|
|
|
|
"user__short_name",
|
|
|
|
|
"user__full_name",
|
|
|
|
|
"user__email",
|
|
|
|
|
"user__language",
|
|
|
|
|
"document__id",
|
|
|
|
|
"document__path",
|
|
|
|
|
"document__depth",
|
|
|
|
|
)
|
2024-04-03 18:50:28 +02:00
|
|
|
resource_field_name = "document"
|
2025-09-05 15:29:08 +02:00
|
|
|
throttle_scope = "document_access"
|
2025-03-24 23:36:24 +01:00
|
|
|
|
2025-05-06 09:41:16 +02:00
|
|
|
@cached_property
|
|
|
|
|
def document(self):
|
|
|
|
|
"""Get related document from resource ID in url and annotate user roles."""
|
2025-05-02 18:30:12 +02:00
|
|
|
try:
|
2025-05-06 09:41:16 +02:00
|
|
|
return models.Document.objects.annotate_user_roles(self.request.user).get(
|
|
|
|
|
pk=self.kwargs["resource_id"]
|
|
|
|
|
)
|
2025-05-02 18:30:12 +02:00
|
|
|
except models.Document.DoesNotExist as excpt:
|
2025-05-06 09:41:16 +02:00
|
|
|
raise drf.exceptions.NotFound() from excpt
|
2025-03-24 23:36:24 +01:00
|
|
|
|
2025-05-02 18:30:12 +02:00
|
|
|
def get_serializer_class(self):
|
|
|
|
|
"""Use light serializer for unprivileged users."""
|
|
|
|
|
return (
|
|
|
|
|
serializers.DocumentAccessSerializer
|
|
|
|
|
if self.document.get_role(self.request.user) in choices.PRIVILEGED_ROLES
|
|
|
|
|
else serializers.DocumentAccessLightSerializer
|
2025-04-12 13:43:30 +02:00
|
|
|
)
|
|
|
|
|
|
2025-05-02 18:30:12 +02:00
|
|
|
def list(self, request, *args, **kwargs):
|
|
|
|
|
"""Return accesses for the current document with filters and annotations."""
|
|
|
|
|
user = request.user
|
|
|
|
|
|
|
|
|
|
role = self.document.get_role(user)
|
|
|
|
|
if not role:
|
2025-04-12 13:43:30 +02:00
|
|
|
return drf.response.Response([])
|
|
|
|
|
|
2025-05-02 18:30:12 +02:00
|
|
|
ancestors = (
|
|
|
|
|
self.document.get_ancestors()
|
|
|
|
|
| models.Document.objects.filter(pk=self.document.pk)
|
|
|
|
|
).filter(ancestors_deleted_at__isnull=True)
|
2025-04-12 13:43:30 +02:00
|
|
|
|
2025-05-02 18:30:12 +02:00
|
|
|
queryset = self.get_queryset().filter(document__in=ancestors)
|
|
|
|
|
|
|
|
|
|
if role not in choices.PRIVILEGED_ROLES:
|
2025-04-23 22:47:24 +02:00
|
|
|
queryset = queryset.filter(role__in=choices.PRIVILEGED_ROLES)
|
2025-04-12 09:11:33 +02:00
|
|
|
|
2025-05-07 18:48:08 +02:00
|
|
|
accesses = list(queryset.order_by("document__path"))
|
2025-05-02 18:30:12 +02:00
|
|
|
|
|
|
|
|
# Annotate more information on roles
|
2025-05-02 19:18:30 +02:00
|
|
|
path_to_key_to_max_ancestors_role = defaultdict(
|
|
|
|
|
lambda: defaultdict(lambda: None)
|
|
|
|
|
)
|
2025-05-02 18:30:12 +02:00
|
|
|
path_to_ancestors_roles = defaultdict(list)
|
|
|
|
|
path_to_role = defaultdict(lambda: None)
|
|
|
|
|
for access in accesses:
|
2025-05-02 19:18:30 +02:00
|
|
|
key = access.target_key
|
|
|
|
|
path = access.document.path
|
|
|
|
|
parent_path = path[: -models.Document.steplen]
|
|
|
|
|
|
|
|
|
|
path_to_key_to_max_ancestors_role[path][key] = choices.RoleChoices.max(
|
|
|
|
|
path_to_key_to_max_ancestors_role[path][key], access.role
|
|
|
|
|
)
|
2025-05-02 18:30:12 +02:00
|
|
|
|
2025-05-02 19:18:30 +02:00
|
|
|
if parent_path:
|
|
|
|
|
path_to_key_to_max_ancestors_role[path][key] = choices.RoleChoices.max(
|
|
|
|
|
path_to_key_to_max_ancestors_role[parent_path][key],
|
|
|
|
|
path_to_key_to_max_ancestors_role[path][key],
|
|
|
|
|
)
|
|
|
|
|
path_to_ancestors_roles[path].extend(
|
|
|
|
|
path_to_ancestors_roles[parent_path]
|
|
|
|
|
)
|
|
|
|
|
path_to_ancestors_roles[path].append(path_to_role[parent_path])
|
|
|
|
|
else:
|
|
|
|
|
path_to_ancestors_roles[path] = []
|
|
|
|
|
|
|
|
|
|
if access.user_id == user.id or access.team in user.teams:
|
|
|
|
|
path_to_role[path] = choices.RoleChoices.max(
|
|
|
|
|
path_to_role[path], access.role
|
2025-05-02 18:30:12 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# serialize and return the response
|
|
|
|
|
context = self.get_serializer_context()
|
|
|
|
|
serializer_class = self.get_serializer_class()
|
|
|
|
|
serialized_data = []
|
|
|
|
|
for access in accesses:
|
2025-05-02 19:18:30 +02:00
|
|
|
path = access.document.path
|
|
|
|
|
parent_path = path[: -models.Document.steplen]
|
|
|
|
|
access.max_ancestors_role = (
|
|
|
|
|
path_to_key_to_max_ancestors_role[parent_path][access.target_key]
|
|
|
|
|
if parent_path
|
|
|
|
|
else None
|
|
|
|
|
)
|
2025-05-02 18:30:12 +02:00
|
|
|
access.set_user_roles_tuple(
|
2025-05-02 19:18:30 +02:00
|
|
|
choices.RoleChoices.max(*path_to_ancestors_roles[path]),
|
|
|
|
|
path_to_role.get(path),
|
2025-05-02 18:30:12 +02:00
|
|
|
)
|
|
|
|
|
serializer = serializer_class(access, context=context)
|
|
|
|
|
serialized_data.append(serializer.data)
|
|
|
|
|
|
|
|
|
|
return drf.response.Response(serialized_data)
|
2025-03-24 23:36:24 +01:00
|
|
|
|
2024-08-15 15:40:56 +02:00
|
|
|
def perform_create(self, serializer):
|
2025-05-06 09:41:16 +02:00
|
|
|
"""
|
|
|
|
|
Actually create the new document access:
|
|
|
|
|
- Ensures the `document_id` is explicitly set from the URL
|
|
|
|
|
- If the assigned role is `OWNER`, checks that the requesting user is an owner
|
|
|
|
|
of the document. This is the only permission check deferred until this step;
|
|
|
|
|
all other access checks are handled earlier in the permission lifecycle.
|
|
|
|
|
- Sends an invitation email to the newly added user after saving the access.
|
|
|
|
|
"""
|
|
|
|
|
role = serializer.validated_data.get("role")
|
|
|
|
|
if (
|
|
|
|
|
role == choices.RoleChoices.OWNER
|
|
|
|
|
and self.document.get_role(self.request.user) != choices.RoleChoices.OWNER
|
|
|
|
|
):
|
|
|
|
|
raise drf.exceptions.PermissionDenied(
|
|
|
|
|
"Only owners of a document can assign other users as owners."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
access = serializer.save(document_id=self.kwargs["resource_id"])
|
2024-10-15 15:53:18 +02:00
|
|
|
|
2025-05-07 19:52:02 +02:00
|
|
|
if access.user:
|
|
|
|
|
access.document.send_invitation_email(
|
|
|
|
|
access.user.email,
|
|
|
|
|
access.role,
|
|
|
|
|
self.request.user,
|
|
|
|
|
access.user.language
|
|
|
|
|
or self.request.user.language
|
|
|
|
|
or settings.LANGUAGE_CODE,
|
|
|
|
|
)
|
2024-08-15 15:40:56 +02:00
|
|
|
|
2024-11-28 16:35:48 +01:00
|
|
|
def perform_update(self, serializer):
|
|
|
|
|
"""Update an access to the document and notify the collaboration server."""
|
|
|
|
|
access = serializer.save()
|
|
|
|
|
|
|
|
|
|
access_user_id = None
|
|
|
|
|
if access.user:
|
|
|
|
|
access_user_id = str(access.user.id)
|
|
|
|
|
|
|
|
|
|
# Notify collaboration server about the access change
|
|
|
|
|
CollaborationService().reset_connections(
|
|
|
|
|
str(access.document.id), access_user_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def perform_destroy(self, instance):
|
|
|
|
|
"""Delete an access to the document and notify the collaboration server."""
|
|
|
|
|
instance.delete()
|
|
|
|
|
|
|
|
|
|
# Notify collaboration server about the access removed
|
|
|
|
|
CollaborationService().reset_connections(
|
|
|
|
|
str(instance.document.id), str(instance.user.id)
|
|
|
|
|
)
|
|
|
|
|
|
2024-04-03 18:50:28 +02:00
|
|
|
|
|
|
|
|
class TemplateViewSet(
|
2024-11-18 07:59:55 +01:00
|
|
|
drf.mixins.RetrieveModelMixin,
|
2025-01-02 17:20:09 +01:00
|
|
|
viewsets.GenericViewSet,
|
2024-04-03 18:50:28 +02:00
|
|
|
):
|
|
|
|
|
"""Template ViewSet"""
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
filter_backends = [drf.filters.OrderingFilter]
|
2024-04-03 18:50:28 +02:00
|
|
|
permission_classes = [
|
|
|
|
|
permissions.IsAuthenticatedOrSafe,
|
2025-05-06 09:41:16 +02:00
|
|
|
permissions.ResourceWithAccessPermission,
|
2024-04-03 18:50:28 +02:00
|
|
|
]
|
2025-09-05 15:29:08 +02:00
|
|
|
throttle_scope = "template"
|
2024-11-09 10:45:38 +01:00
|
|
|
ordering = ["-created_at"]
|
|
|
|
|
ordering_fields = ["created_at", "updated_at", "title"]
|
2024-04-03 18:50:28 +02:00
|
|
|
serializer_class = serializers.TemplateSerializer
|
|
|
|
|
queryset = models.Template.objects.all()
|
|
|
|
|
|
2024-11-09 10:45:38 +01:00
|
|
|
def get_queryset(self):
|
|
|
|
|
"""Custom queryset to get user related templates."""
|
|
|
|
|
queryset = super().get_queryset()
|
|
|
|
|
user = self.request.user
|
|
|
|
|
|
|
|
|
|
if not user.is_authenticated:
|
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
|
|
user_roles_query = (
|
|
|
|
|
models.TemplateAccess.objects.filter(
|
2025-01-02 17:20:09 +01:00
|
|
|
db.Q(user=user) | db.Q(team__in=user.teams),
|
|
|
|
|
template_id=db.OuterRef("pk"),
|
2024-11-09 10:45:38 +01:00
|
|
|
)
|
|
|
|
|
.values("template")
|
|
|
|
|
.annotate(roles_array=ArrayAgg("role"))
|
|
|
|
|
.values("roles_array")
|
|
|
|
|
)
|
2025-01-02 17:20:09 +01:00
|
|
|
return queryset.annotate(user_roles=db.Subquery(user_roles_query)).distinct()
|
2024-11-09 10:45:38 +01:00
|
|
|
|
2024-09-08 23:37:49 +02:00
|
|
|
def list(self, request, *args, **kwargs):
|
|
|
|
|
"""Restrict templates returned by the list endpoint"""
|
|
|
|
|
queryset = self.filter_queryset(self.get_queryset())
|
|
|
|
|
user = self.request.user
|
|
|
|
|
if user.is_authenticated:
|
|
|
|
|
queryset = queryset.filter(
|
2024-11-18 07:59:55 +01:00
|
|
|
db.Q(accesses__user=user)
|
|
|
|
|
| db.Q(accesses__team__in=user.teams)
|
|
|
|
|
| db.Q(is_public=True)
|
2024-09-08 23:37:49 +02:00
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
queryset = queryset.filter(is_public=True)
|
|
|
|
|
|
|
|
|
|
page = self.paginate_queryset(queryset)
|
|
|
|
|
if page is not None:
|
|
|
|
|
serializer = self.get_serializer(page, many=True)
|
|
|
|
|
return self.get_paginated_response(serializer.data)
|
|
|
|
|
|
|
|
|
|
serializer = self.get_serializer(queryset, many=True)
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(serializer.data)
|
2024-09-08 23:37:49 +02:00
|
|
|
|
2024-05-13 23:31:00 +02:00
|
|
|
|
|
|
|
|
class InvitationViewset(
|
2024-11-18 07:59:55 +01:00
|
|
|
drf.mixins.CreateModelMixin,
|
|
|
|
|
drf.mixins.ListModelMixin,
|
|
|
|
|
drf.mixins.RetrieveModelMixin,
|
|
|
|
|
drf.mixins.DestroyModelMixin,
|
|
|
|
|
drf.mixins.UpdateModelMixin,
|
2025-01-02 17:20:09 +01:00
|
|
|
viewsets.GenericViewSet,
|
2024-05-13 23:31:00 +02:00
|
|
|
):
|
|
|
|
|
"""API ViewSet for user invitations to document.
|
|
|
|
|
|
|
|
|
|
GET /api/v1.0/documents/<document_id>/invitations/:<invitation_id>/
|
|
|
|
|
Return list of invitations related to that document or one
|
|
|
|
|
document access if an id is provided.
|
|
|
|
|
|
|
|
|
|
POST /api/v1.0/documents/<document_id>/invitations/ with expected data:
|
|
|
|
|
- email: str
|
2024-05-25 08:15:34 +02:00
|
|
|
- role: str [administrator|editor|reader]
|
2024-05-13 23:31:00 +02:00
|
|
|
Return newly created invitation (issuer and document are automatically set)
|
|
|
|
|
|
2024-08-16 16:55:00 +02:00
|
|
|
PATCH /api/v1.0/documents/<document_id>/invitations/:<invitation_id>/ with expected data:
|
|
|
|
|
- role: str [owner|admin|editor|reader]
|
|
|
|
|
Return partially updated document invitation
|
2024-05-13 23:31:00 +02:00
|
|
|
|
|
|
|
|
DELETE /api/v1.0/documents/<document_id>/invitations/<invitation_id>/
|
|
|
|
|
Delete targeted invitation
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
lookup_field = "id"
|
|
|
|
|
pagination_class = Pagination
|
2024-10-22 00:28:16 +02:00
|
|
|
permission_classes = [
|
|
|
|
|
permissions.CanCreateInvitationPermission,
|
2025-05-06 09:41:16 +02:00
|
|
|
permissions.ResourceWithAccessPermission,
|
2024-10-22 00:28:16 +02:00
|
|
|
]
|
2025-09-05 15:29:08 +02:00
|
|
|
throttle_scope = "invitation"
|
2024-05-13 23:31:00 +02:00
|
|
|
queryset = (
|
|
|
|
|
models.Invitation.objects.all()
|
|
|
|
|
.select_related("document")
|
|
|
|
|
.order_by("-created_at")
|
|
|
|
|
)
|
|
|
|
|
serializer_class = serializers.InvitationSerializer
|
|
|
|
|
|
|
|
|
|
def get_serializer_context(self):
|
|
|
|
|
"""Extra context provided to the serializer class."""
|
|
|
|
|
context = super().get_serializer_context()
|
|
|
|
|
context["resource_id"] = self.kwargs["resource_id"]
|
|
|
|
|
return context
|
|
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
|
"""Return the queryset according to the action."""
|
|
|
|
|
queryset = super().get_queryset()
|
|
|
|
|
queryset = queryset.filter(document=self.kwargs["resource_id"])
|
|
|
|
|
|
|
|
|
|
if self.action == "list":
|
|
|
|
|
user = self.request.user
|
2024-09-06 16:12:02 +02:00
|
|
|
teams = user.teams
|
2024-05-13 23:31:00 +02:00
|
|
|
|
|
|
|
|
# Determine which role the logged-in user has in the document
|
|
|
|
|
user_roles_query = (
|
|
|
|
|
models.DocumentAccess.objects.filter(
|
2024-11-18 07:59:55 +01:00
|
|
|
db.Q(user=user) | db.Q(team__in=teams),
|
2024-05-13 23:31:00 +02:00
|
|
|
document=self.kwargs["resource_id"],
|
|
|
|
|
)
|
|
|
|
|
.values("document")
|
|
|
|
|
.annotate(roles_array=ArrayAgg("role"))
|
|
|
|
|
.values("roles_array")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
queryset = (
|
2024-10-22 00:28:16 +02:00
|
|
|
# The logged-in user should be administrator or owner to see its accesses
|
2024-05-13 23:31:00 +02:00
|
|
|
queryset.filter(
|
2024-11-18 07:59:55 +01:00
|
|
|
db.Q(
|
2024-10-22 00:28:16 +02:00
|
|
|
document__accesses__user=user,
|
2025-04-23 22:47:24 +02:00
|
|
|
document__accesses__role__in=choices.PRIVILEGED_ROLES,
|
2024-10-22 00:28:16 +02:00
|
|
|
)
|
2024-11-18 07:59:55 +01:00
|
|
|
| db.Q(
|
2024-10-22 00:28:16 +02:00
|
|
|
document__accesses__team__in=teams,
|
2025-04-23 22:47:24 +02:00
|
|
|
document__accesses__role__in=choices.PRIVILEGED_ROLES,
|
2024-10-22 00:28:16 +02:00
|
|
|
),
|
2024-05-13 23:31:00 +02:00
|
|
|
)
|
|
|
|
|
# Abilities are computed based on logged-in user's role and
|
|
|
|
|
# the user role on each document access
|
2024-11-18 07:59:55 +01:00
|
|
|
.annotate(user_roles=db.Subquery(user_roles_query))
|
2024-05-13 23:31:00 +02:00
|
|
|
.distinct()
|
|
|
|
|
)
|
|
|
|
|
return queryset
|
2024-08-15 15:40:56 +02:00
|
|
|
|
2024-08-15 15:38:38 +02:00
|
|
|
def perform_create(self, serializer):
|
|
|
|
|
"""Save invitation to a document then send an email to the invited user."""
|
|
|
|
|
invitation = serializer.save()
|
|
|
|
|
|
2024-12-01 11:25:01 +01:00
|
|
|
invitation.document.send_invitation_email(
|
2025-03-04 13:46:40 +01:00
|
|
|
invitation.email,
|
|
|
|
|
invitation.role,
|
|
|
|
|
self.request.user,
|
|
|
|
|
self.request.user.language or settings.LANGUAGE_CODE,
|
2024-09-25 12:43:02 +02:00
|
|
|
)
|
2024-11-15 09:29:07 +01:00
|
|
|
|
|
|
|
|
|
2025-06-18 15:13:48 +02:00
|
|
|
class DocumentAskForAccessViewSet(
|
|
|
|
|
drf.mixins.ListModelMixin,
|
|
|
|
|
drf.mixins.RetrieveModelMixin,
|
|
|
|
|
drf.mixins.DestroyModelMixin,
|
|
|
|
|
viewsets.GenericViewSet,
|
|
|
|
|
):
|
|
|
|
|
"""API ViewSet for asking for access to a document."""
|
|
|
|
|
|
|
|
|
|
lookup_field = "id"
|
|
|
|
|
pagination_class = Pagination
|
2025-07-01 16:29:08 +02:00
|
|
|
permission_classes = [
|
|
|
|
|
permissions.IsAuthenticated,
|
|
|
|
|
permissions.ResourceWithAccessPermission,
|
|
|
|
|
]
|
2025-09-05 15:29:08 +02:00
|
|
|
throttle_scope = "document_ask_for_access"
|
2025-06-18 15:13:48 +02:00
|
|
|
queryset = models.DocumentAskForAccess.objects.all()
|
|
|
|
|
serializer_class = serializers.DocumentAskForAccessSerializer
|
|
|
|
|
_document = None
|
|
|
|
|
|
|
|
|
|
def get_document_or_404(self):
|
|
|
|
|
"""Get the document related to the viewset or raise a 404 error."""
|
|
|
|
|
if self._document is None:
|
|
|
|
|
try:
|
|
|
|
|
self._document = models.Document.objects.get(
|
2025-07-08 10:58:48 +02:00
|
|
|
pk=self.kwargs["resource_id"],
|
|
|
|
|
depth=1,
|
2025-06-18 15:13:48 +02:00
|
|
|
)
|
|
|
|
|
except models.Document.DoesNotExist as e:
|
|
|
|
|
raise drf.exceptions.NotFound("Document not found.") from e
|
|
|
|
|
return self._document
|
|
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
|
"""Return the queryset according to the action."""
|
|
|
|
|
document = self.get_document_or_404()
|
|
|
|
|
|
|
|
|
|
queryset = super().get_queryset()
|
|
|
|
|
queryset = queryset.filter(document=document)
|
|
|
|
|
|
2025-07-01 16:29:08 +02:00
|
|
|
is_owner_or_admin = (
|
|
|
|
|
document.get_role(self.request.user) in models.PRIVILEGED_ROLES
|
|
|
|
|
)
|
2025-06-18 15:13:48 +02:00
|
|
|
if not is_owner_or_admin:
|
|
|
|
|
queryset = queryset.filter(user=self.request.user)
|
|
|
|
|
|
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
|
|
def create(self, request, *args, **kwargs):
|
|
|
|
|
"""Create a document ask for access resource."""
|
|
|
|
|
document = self.get_document_or_404()
|
|
|
|
|
|
|
|
|
|
serializer = serializers.DocumentAskForAccessCreateSerializer(data=request.data)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
|
|
|
|
queryset = self.get_queryset()
|
|
|
|
|
|
|
|
|
|
if queryset.filter(user=request.user).exists():
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "You already ask to access to this document."},
|
|
|
|
|
status=drf.status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
2025-06-20 15:24:46 +02:00
|
|
|
ask_for_access = models.DocumentAskForAccess.objects.create(
|
2025-06-18 15:13:48 +02:00
|
|
|
document=document,
|
|
|
|
|
user=request.user,
|
|
|
|
|
role=serializer.validated_data["role"],
|
|
|
|
|
)
|
|
|
|
|
|
2025-06-20 15:24:46 +02:00
|
|
|
send_ask_for_access_mail.delay(ask_for_access.id)
|
|
|
|
|
|
2025-06-18 15:13:48 +02:00
|
|
|
return drf.response.Response(status=drf.status.HTTP_201_CREATED)
|
|
|
|
|
|
2025-06-18 15:50:12 +02:00
|
|
|
@drf.decorators.action(detail=True, methods=["post"])
|
|
|
|
|
def accept(self, request, *args, **kwargs):
|
|
|
|
|
"""Accept a document ask for access resource."""
|
|
|
|
|
document_ask_for_access = self.get_object()
|
|
|
|
|
|
|
|
|
|
serializer = serializers.RoleSerializer(data=request.data)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
2025-11-13 14:44:28 +01:00
|
|
|
target_role = serializer.validated_data.get(
|
|
|
|
|
"role", document_ask_for_access.role
|
|
|
|
|
)
|
|
|
|
|
abilities = document_ask_for_access.get_abilities(request.user)
|
2025-11-12 11:54:55 +01:00
|
|
|
|
2025-11-13 14:44:28 +01:00
|
|
|
if target_role not in abilities["set_role_to"]:
|
2025-11-12 11:54:55 +01:00
|
|
|
return drf.response.Response(
|
|
|
|
|
{"detail": "You cannot accept a role higher than your own."},
|
|
|
|
|
status=drf.status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
document_ask_for_access.accept(role=target_role)
|
2025-06-18 15:50:12 +02:00
|
|
|
return drf.response.Response(status=drf.status.HTTP_204_NO_CONTENT)
|
|
|
|
|
|
2025-06-18 15:13:48 +02:00
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
class ConfigView(drf.views.APIView):
|
2024-11-15 09:29:07 +01:00
|
|
|
"""API ViewSet for sharing some public settings."""
|
|
|
|
|
|
|
|
|
|
permission_classes = [AllowAny]
|
2025-09-05 15:29:08 +02:00
|
|
|
throttle_scope = "config"
|
2024-11-15 09:29:07 +01:00
|
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
|
"""
|
|
|
|
|
GET /api/v1.0/config/
|
|
|
|
|
Return a dictionary of public settings.
|
|
|
|
|
"""
|
|
|
|
|
array_settings = [
|
2025-03-27 23:21:26 +01:00
|
|
|
"AI_FEATURE_ENABLED",
|
2024-12-03 15:07:21 +01:00
|
|
|
"COLLABORATION_WS_URL",
|
2025-05-23 10:17:00 +02:00
|
|
|
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY",
|
2024-11-25 14:35:02 +01:00
|
|
|
"CRISP_WEBSITE_ID",
|
2024-11-15 11:43:10 +01:00
|
|
|
"ENVIRONMENT",
|
2025-03-26 16:15:44 +01:00
|
|
|
"FRONTEND_CSS_URL",
|
2025-04-09 15:32:25 +02:00
|
|
|
"FRONTEND_HOMEPAGE_FEATURE_ENABLED",
|
2025-04-04 10:04:30 +02:00
|
|
|
"FRONTEND_THEME",
|
2024-11-15 11:31:09 +01:00
|
|
|
"MEDIA_BASE_URL",
|
2025-01-21 14:16:00 +01:00
|
|
|
"POSTHOG_KEY",
|
2024-11-15 09:29:07 +01:00
|
|
|
"LANGUAGES",
|
|
|
|
|
"LANGUAGE_CODE",
|
|
|
|
|
"SENTRY_DSN",
|
2025-10-06 08:38:40 +02:00
|
|
|
"TRASHBIN_CUTOFF_DAYS",
|
2024-11-15 09:29:07 +01:00
|
|
|
]
|
|
|
|
|
dict_settings = {}
|
|
|
|
|
for setting in array_settings:
|
|
|
|
|
if hasattr(settings, setting):
|
|
|
|
|
dict_settings[setting] = getattr(settings, setting)
|
|
|
|
|
|
2025-05-07 11:50:04 +02:00
|
|
|
dict_settings["theme_customization"] = self._load_theme_customization()
|
|
|
|
|
|
2024-11-18 07:59:55 +01:00
|
|
|
return drf.response.Response(dict_settings)
|
2025-04-04 10:04:30 +02:00
|
|
|
|
2025-05-07 11:50:04 +02:00
|
|
|
def _load_theme_customization(self):
|
|
|
|
|
if not settings.THEME_CUSTOMIZATION_FILE_PATH:
|
|
|
|
|
return {}
|
2025-04-04 10:04:30 +02:00
|
|
|
|
2025-05-07 11:50:04 +02:00
|
|
|
cache_key = (
|
|
|
|
|
f"theme_customization_{slugify(settings.THEME_CUSTOMIZATION_FILE_PATH)}"
|
|
|
|
|
)
|
|
|
|
|
theme_customization = cache.get(cache_key, {})
|
|
|
|
|
if theme_customization:
|
|
|
|
|
return theme_customization
|
2025-04-04 10:04:30 +02:00
|
|
|
|
2025-05-07 11:50:04 +02:00
|
|
|
try:
|
|
|
|
|
with open(
|
|
|
|
|
settings.THEME_CUSTOMIZATION_FILE_PATH, "r", encoding="utf-8"
|
|
|
|
|
) as f:
|
|
|
|
|
theme_customization = json.load(f)
|
|
|
|
|
except FileNotFoundError:
|
|
|
|
|
logger.error(
|
|
|
|
|
"Configuration file not found: %s",
|
|
|
|
|
settings.THEME_CUSTOMIZATION_FILE_PATH,
|
|
|
|
|
)
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
logger.error(
|
|
|
|
|
"Configuration file is not a valid JSON: %s",
|
|
|
|
|
settings.THEME_CUSTOMIZATION_FILE_PATH,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
cache.set(
|
|
|
|
|
cache_key,
|
|
|
|
|
theme_customization,
|
|
|
|
|
settings.THEME_CUSTOMIZATION_CACHE_TIMEOUT,
|
|
|
|
|
)
|
2025-04-04 10:04:30 +02:00
|
|
|
|
2025-05-07 11:50:04 +02:00
|
|
|
return theme_customization
|
2025-08-28 08:21:35 +02:00
|
|
|
|
|
|
|
|
|
2025-09-12 15:28:25 +02:00
|
|
|
class CommentViewSetMixin:
|
|
|
|
|
"""Comment ViewSet Mixin."""
|
2025-08-28 08:21:35 +02:00
|
|
|
|
|
|
|
|
_document = None
|
|
|
|
|
|
|
|
|
|
def get_document_or_404(self):
|
|
|
|
|
"""Get the document related to the viewset or raise a 404 error."""
|
|
|
|
|
if self._document is None:
|
|
|
|
|
try:
|
|
|
|
|
self._document = models.Document.objects.get(
|
|
|
|
|
pk=self.kwargs["resource_id"],
|
|
|
|
|
)
|
|
|
|
|
except models.Document.DoesNotExist as e:
|
|
|
|
|
raise drf.exceptions.NotFound("Document not found.") from e
|
|
|
|
|
return self._document
|
|
|
|
|
|
2025-09-12 15:28:25 +02:00
|
|
|
|
|
|
|
|
class ThreadViewSet(
|
|
|
|
|
ResourceAccessViewsetMixin,
|
|
|
|
|
CommentViewSetMixin,
|
|
|
|
|
drf.mixins.CreateModelMixin,
|
|
|
|
|
drf.mixins.ListModelMixin,
|
|
|
|
|
drf.mixins.RetrieveModelMixin,
|
|
|
|
|
drf.mixins.DestroyModelMixin,
|
|
|
|
|
viewsets.GenericViewSet,
|
|
|
|
|
):
|
|
|
|
|
"""Thread API: list/create threads and nested comment operations."""
|
|
|
|
|
|
|
|
|
|
permission_classes = [permissions.CommentPermission]
|
|
|
|
|
pagination_class = Pagination
|
|
|
|
|
serializer_class = serializers.ThreadSerializer
|
|
|
|
|
queryset = models.Thread.objects.select_related("creator", "document").filter(
|
|
|
|
|
resolved=False
|
|
|
|
|
)
|
|
|
|
|
resource_field_name = "document"
|
|
|
|
|
|
|
|
|
|
def perform_create(self, serializer):
|
|
|
|
|
"""Create the first comment of the thread."""
|
|
|
|
|
body = serializer.validated_data["body"]
|
|
|
|
|
del serializer.validated_data["body"]
|
|
|
|
|
thread = serializer.save()
|
|
|
|
|
|
|
|
|
|
models.Comment.objects.create(
|
|
|
|
|
thread=thread,
|
|
|
|
|
user=self.request.user if self.request.user.is_authenticated else None,
|
|
|
|
|
body=body,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
@drf.decorators.action(detail=True, methods=["post"], url_path="resolve")
|
|
|
|
|
def resolve(self, request, *args, **kwargs):
|
|
|
|
|
"""Resolve a thread."""
|
|
|
|
|
thread = self.get_object()
|
|
|
|
|
if not thread.resolved:
|
|
|
|
|
thread.resolved = True
|
|
|
|
|
thread.resolved_at = timezone.now()
|
|
|
|
|
thread.resolved_by = request.user
|
|
|
|
|
thread.save(update_fields=["resolved", "resolved_at", "resolved_by"])
|
|
|
|
|
return drf.response.Response(status=status.HTTP_204_NO_CONTENT)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class CommentViewSet(
|
|
|
|
|
CommentViewSetMixin,
|
|
|
|
|
viewsets.ModelViewSet,
|
|
|
|
|
):
|
|
|
|
|
"""Comment API: list/create comments and nested reaction operations."""
|
|
|
|
|
|
|
|
|
|
permission_classes = [permissions.CommentPermission]
|
|
|
|
|
pagination_class = Pagination
|
|
|
|
|
serializer_class = serializers.CommentSerializer
|
|
|
|
|
queryset = models.Comment.objects.select_related("user").all()
|
|
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
|
"""Override to filter on related resource."""
|
|
|
|
|
return (
|
|
|
|
|
super()
|
|
|
|
|
.get_queryset()
|
|
|
|
|
.filter(
|
|
|
|
|
thread=self.kwargs["thread_id"],
|
|
|
|
|
thread__document=self.kwargs["resource_id"],
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
2025-08-28 08:21:35 +02:00
|
|
|
def get_serializer_context(self):
|
|
|
|
|
"""Extra context provided to the serializer class."""
|
|
|
|
|
context = super().get_serializer_context()
|
2025-09-12 15:28:25 +02:00
|
|
|
context["document_id"] = self.kwargs["resource_id"]
|
|
|
|
|
context["thread_id"] = self.kwargs["thread_id"]
|
2025-08-28 08:21:35 +02:00
|
|
|
return context
|
|
|
|
|
|
2025-09-12 15:28:25 +02:00
|
|
|
@drf.decorators.action(
|
|
|
|
|
detail=True,
|
|
|
|
|
methods=["post", "delete"],
|
|
|
|
|
)
|
|
|
|
|
def reactions(self, request, *args, **kwargs):
|
|
|
|
|
"""POST: add reaction; DELETE: remove reaction.
|
|
|
|
|
|
|
|
|
|
Emoji is expected in request.data['emoji'] for both operations.
|
|
|
|
|
"""
|
|
|
|
|
comment = self.get_object()
|
|
|
|
|
serializer = serializers.ReactionSerializer(data=request.data)
|
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
|
|
|
|
|
|
if request.method == "POST":
|
|
|
|
|
reaction, created = models.Reaction.objects.get_or_create(
|
|
|
|
|
comment=comment,
|
|
|
|
|
emoji=serializer.validated_data["emoji"],
|
|
|
|
|
)
|
|
|
|
|
if not created and reaction.users.filter(id=request.user.id).exists():
|
|
|
|
|
return drf.response.Response(
|
|
|
|
|
{"user_already_reacted": True}, status=status.HTTP_400_BAD_REQUEST
|
|
|
|
|
)
|
|
|
|
|
reaction.users.add(request.user)
|
|
|
|
|
return drf.response.Response(status=status.HTTP_201_CREATED)
|
|
|
|
|
|
|
|
|
|
# DELETE
|
|
|
|
|
try:
|
|
|
|
|
reaction = models.Reaction.objects.get(
|
|
|
|
|
comment=comment,
|
|
|
|
|
emoji=serializer.validated_data["emoji"],
|
|
|
|
|
users__in=[request.user],
|
|
|
|
|
)
|
|
|
|
|
except models.Reaction.DoesNotExist as e:
|
|
|
|
|
raise drf.exceptions.NotFound("Reaction not found.") from e
|
|
|
|
|
reaction.users.remove(request.user)
|
|
|
|
|
if not reaction.users.exists():
|
|
|
|
|
reaction.delete()
|
|
|
|
|
return drf.response.Response(status=status.HTTP_204_NO_CONTENT)
|