(back) manage streaming with the ai service

We want to handle both streaming or not when interacting with the AI
backend service.
This commit is contained in:
Manuel Raynaud
2025-06-12 11:08:23 +02:00
committed by Anthony LC
parent 9d6fe5da8f
commit 6f0dac4f48
10 changed files with 173 additions and 22 deletions

View File

@@ -1857,8 +1857,20 @@ class DocumentViewSet(
serializer = serializers.AIProxySerializer(data=request.data)
serializer.is_valid(raise_exception=True)
response = AIService().proxy(request.data)
return drf.response.Response(response, status=drf.status.HTTP_200_OK)
ai_service = AIService()
if settings.AI_STREAM:
return StreamingHttpResponse(
ai_service.stream(request.data),
content_type="text/event-stream",
status=drf.status.HTTP_200_OK,
)
ai_response = ai_service.proxy(request.data)
return drf.response.Response(
ai_response.model_dump(),
status=drf.status.HTTP_200_OK,
)
@drf.decorators.action(
detail=True,
@@ -2557,6 +2569,7 @@ class ConfigView(drf.views.APIView):
array_settings = [
"AI_BOT",
"AI_FEATURE_ENABLED",
"AI_STREAM",
"API_USERS_SEARCH_QUERY_MIN_LENGTH",
"COLLABORATION_WS_URL",
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY",