|
14 | 14 | import org.apache.lucene.document.SortedNumericDocValuesField;
|
15 | 15 | import org.apache.lucene.index.DirectoryReader;
|
16 | 16 | import org.apache.lucene.index.IndexReader;
|
| 17 | +import org.apache.lucene.index.IndexableField; |
17 | 18 | import org.apache.lucene.index.MultiReader;
|
18 | 19 | import org.apache.lucene.index.RandomIndexWriter;
|
19 | 20 | import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
@@ -243,9 +244,23 @@ private void verifyAvgOfDoubles(double[] values, double expected, double delta)
|
243 | 244 | MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.DOUBLE);
|
244 | 245 | testAggregation(aggregationBuilder, new MatchAllDocsQuery(),
|
245 | 246 | iw -> {
|
| 247 | + List<List<IndexableField>> docs = new ArrayList<>(); |
246 | 248 | for (double value : values) {
|
247 |
| - iw.addDocument(singleton(new NumericDocValuesField("number", NumericUtils.doubleToSortableLong(value)))); |
| 249 | + docs.add( |
| 250 | + org.elasticsearch.common.collect.List.of( |
| 251 | + new NumericDocValuesField("number", NumericUtils.doubleToSortableLong(value)) |
| 252 | + ) |
| 253 | + ); |
248 | 254 | }
|
| 255 | + /* |
| 256 | + * Use add documents to force us to collect from a single segment |
| 257 | + * so we don't break the collection across the shrads. We can't do |
| 258 | + * *that* because we don't bring back the compensations for the sum |
| 259 | + * back in the shard results. If we don't bring back the compensations |
| 260 | + * errors can creep in. Not big errors, but big enough to upset this |
| 261 | + * test. |
| 262 | + */ |
| 263 | + iw.addDocuments(docs); |
249 | 264 | },
|
250 | 265 | avg -> assertEquals(expected, avg.getValue(), delta),
|
251 | 266 | fieldType
|
|
0 commit comments