We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 56b83b3 commit 16b9883Copy full SHA for 16b9883
tasks/update_embeddings.py
@@ -19,6 +19,7 @@
19
os.environ.setdefault("OMP_NUM_THREADS", "1")
20
os.environ.setdefault("TOKENIZERS_PARALLELISM", "false")
21
22
+MODEL_INIT_LOCK = threading.Lock()
23
24
"""
25
High-level call order (condensed):
@@ -211,7 +212,11 @@ def _get_embedder(
211
212
213
cached = getattr(thread_local, "embedder", None)
214
if cached is None:
- thread_local.embedder = EmbedderTools(model_name=model_name, **embedder_init_kwargs)
215
+ with MODEL_INIT_LOCK:
216
+ thread_local.embedder = EmbedderTools(
217
+ model_name=model_name,
218
+ **embedder_init_kwargs,
219
+ )
220
cached = thread_local.embedder
221
return cached
222
0 commit comments