File tree Expand file tree Collapse file tree 1 file changed +8
-2
lines changed
llm-service/app/ai/indexing Expand file tree Collapse file tree 1 file changed +8
-2
lines changed Original file line number Diff line number Diff line change @@ -130,6 +130,12 @@ def _compute_embeddings(
130
130
logger .debug (f"Waiting for { len (futures )} futures" )
131
131
for future in as_completed (futures ):
132
132
i , batch_embeddings = future .result ()
133
- for chunk , embedding in zip (batched_chunks [i ], batch_embeddings ):
133
+ batch_chunks = batched_chunks [i ]
134
+ if len (batch_chunks ) != len (batch_embeddings ):
135
+ raise ValueError (
136
+ f"Expected { len (batch_chunks )} embedding vectors for this batch of chunks,"
137
+ + f" but got { len (batch_embeddings )} from { self .embedding_model .model_name } "
138
+ )
139
+ for chunk , embedding in zip (batch_chunks , batch_embeddings ):
134
140
chunk .embedding = embedding
135
- yield batched_chunks [ i ]
141
+ yield batch_chunks
You can’t perform that action at this time.
0 commit comments