10
10
logger = getLogger (__file__ )
11
11
12
12
13
- def document_indexer_debounce_key (document_id ):
14
- """Returns debounce cache key"""
15
- return f"doc-indexer-debounce-{ document_id } "
16
-
17
-
18
- def incr_counter (key ):
13
+ def indexer_debounce_lock (document_id ):
19
14
"""Increase or reset counter"""
15
+ key = f"doc-indexer-debounce-{ document_id } "
16
+
20
17
try :
21
18
return cache .incr (key )
22
19
except ValueError :
23
20
cache .set (key , 1 )
24
21
return 1
25
22
26
23
27
- def decr_counter ( key ):
24
+ def indexer_debounce_release ( document_id ):
28
25
"""Decrease or reset counter"""
26
+ key = f"doc-indexer-debounce-{ document_id } "
27
+
29
28
try :
30
29
return cache .decr (key )
31
30
except ValueError :
@@ -36,24 +35,26 @@ def decr_counter(key):
36
35
@app .task
37
36
def document_indexer_task (document_id ):
38
37
"""Celery Task : Sends indexation query for a document."""
39
- key = document_indexer_debounce_key (document_id )
38
+ # Prevents some circular imports
39
+ # pylint: disable=import-outside-toplevel
40
+ from core import models # noqa : PLC0415
41
+ from core .services .search_indexers import ( # noqa : PLC0415
42
+ get_batch_accesses_by_users_and_teams ,
43
+ get_document_indexer ,
44
+ )
40
45
41
46
# check if the counter : if still up, skip the task. only the last one
42
47
# within the countdown delay will do the query.
43
- if decr_counter ( key ) > 0 :
48
+ if indexer_debounce_release ( document_id ) > 0 :
44
49
logger .info ("Skip document %s indexation" , document_id )
45
50
return
46
51
47
- # Prevents some circular imports
48
- # pylint: disable=import-outside-toplevel
49
- from core import models # noqa: PLC0415
50
- from core .services .search_indexers import ( # noqa: PLC0415
51
- get_batch_accesses_by_users_and_teams ,
52
- get_document_indexer_class ,
53
- )
52
+ indexer = get_document_indexer ()
53
+
54
+ if indexer is None :
55
+ return
54
56
55
57
doc = models .Document .objects .get (pk = document_id )
56
- indexer = get_document_indexer_class ()()
57
58
accesses = get_batch_accesses_by_users_and_teams ((doc .path ,))
58
59
59
60
data = indexer .serialize_document (document = doc , accesses = accesses )
@@ -69,11 +70,7 @@ def trigger_document_indexer(document):
69
70
Args:
70
71
document (Document): The document instance.
71
72
"""
72
- if document .deleted_at or document .ancestors_deleted_at :
73
- return
74
-
75
- key = document_indexer_debounce_key (document .pk )
76
- countdown = getattr (settings , "SEARCH_INDEXER_COUNTDOWN" , 1 )
73
+ countdown = settings .SEARCH_INDEXER_COUNTDOWN
77
74
78
75
logger .info (
79
76
"Add task for document %s indexation in %.2f seconds" ,
@@ -83,6 +80,6 @@ def trigger_document_indexer(document):
83
80
84
81
# Each time this method is called during the countdown, we increment the
85
82
# counter and each task decrease it, so the index be run only once.
86
- incr_counter ( key )
83
+ indexer_debounce_lock ( document . pk )
87
84
88
85
document_indexer_task .apply_async (args = [document .pk ], countdown = countdown )
0 commit comments