(backend) use batches in indexing task

Reduce the number of Find API calls by grouping all the latest changes
for indexation : send all the documents updated or deleted since the
triggering of the task.

Signed-off-by: Fabre Florian <ffabre@hybird.org>
This commit is contained in:
Fabre Florian
2025-10-31 17:06:46 +01:00
committed by Quentin BEY
parent 65d572ccd6
commit 780bcb360a
7 changed files with 506 additions and 378 deletions

View File

@@ -9,7 +9,7 @@ from django.db.models import signals
from django.dispatch import receiver
from . import models
from .tasks.search import trigger_document_indexer
from .tasks.search import trigger_batch_document_indexer
@receiver(signals.post_save, sender=models.Document)
@@ -19,7 +19,7 @@ def document_post_save(sender, instance, **kwargs): # pylint: disable=unused-ar
Note : Within the transaction we can have an empty content and a serialization
error.
"""
transaction.on_commit(partial(trigger_document_indexer, instance))
transaction.on_commit(partial(trigger_batch_document_indexer, instance))
@receiver(signals.post_save, sender=models.DocumentAccess)
@@ -28,4 +28,6 @@ def document_access_post_save(sender, instance, created, **kwargs): # pylint: d
Asynchronous call to the document indexer at the end of the transaction.
"""
if not created:
transaction.on_commit(partial(trigger_document_indexer, instance.document))
transaction.on_commit(
partial(trigger_batch_document_indexer, instance.document)
)