(backend) add document search indexer

Add indexer that loops across documents in the database, formats them
as json objects and indexes them in the remote "Find" mico-service.
This commit is contained in:
Samuel Paccoud - DINUM
2025-07-24 12:31:20 +02:00
committed by Quentin BEY
parent f4bdde7e59
commit 1d9c2a8118
7 changed files with 503 additions and 0 deletions

View File

@@ -75,3 +75,28 @@ def test_utils_extract_attachments():
base64_string = base64.b64encode(update).decode("utf-8")
# image_key2 is missing the "/media/" part and shouldn't get extracted
assert utils.extract_attachments(base64_string) == [image_key1, image_key3]
def test_utils_get_ancestor_to_descendants_map_single_path():
"""Test ancestor mapping of a single path."""
paths = ['000100020005']
result = utils.get_ancestor_to_descendants_map(paths, steplen=4)
assert result == {
'0001': {'000100020005'},
'00010002': {'000100020005'},
'000100020005': {'000100020005'},
}
def test_utils_get_ancestor_to_descendants_map_multiple_paths():
"""Test ancestor mapping of multiple paths with shared prefixes."""
paths = ['000100020005', '00010003']
result = utils.get_ancestor_to_descendants_map(paths, steplen=4)
assert result == {
'0001': {'000100020005', '00010003'},
'00010002': {'000100020005'},
'000100020005': {'000100020005'},
'00010003': {'00010003'},
}