✨(back) manage streaming with the ai service
We want to handle both streaming or not when interacting with the AI backend service.
This commit is contained in:
committed by
Anthony LC
parent
9d6fe5da8f
commit
6f0dac4f48
@@ -838,9 +838,7 @@ class AIProxySerializer(serializers.Serializer):
|
||||
|
||||
messages = serializers.ListField(
|
||||
required=True,
|
||||
child=serializers.DictField(
|
||||
child=serializers.CharField(required=True),
|
||||
),
|
||||
child=serializers.DictField(),
|
||||
allow_empty=False,
|
||||
)
|
||||
model = serializers.CharField(required=True)
|
||||
|
||||
Reference in New Issue
Block a user