diff options
author | Christoph Büscher <christoph@elastic.co> | 2017-03-13 09:54:50 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2017-03-13 09:54:50 -0700 |
commit | a8117a2d77da613628ec45c5006e8a1b3f223dbf (patch) | |
tree | a66c00db9de4b03dc5118add891cc2d0c56581ee /core/src/test/java/org/elasticsearch/search/aggregations/bucket | |
parent | 21dcd4f4ca4622022d521f306a07e752eaa019e0 (diff) |
Tests: fix GeoHashGridAggregatorTests expectations (#23556)
Currently GeoHashGridAggregatorTests#testWithSeveralDocs increases the expected
document count per hash for each geo point added to a document. When points
added to the same doc fall into one bucket (one hash cell) the document should
only be counted once.
Closes #23555
Diffstat (limited to 'core/src/test/java/org/elasticsearch/search/aggregations/bucket')
-rw-r--r-- | core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java | 9 |
1 files changed, 8 insertions, 1 deletions
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java index a893990efc..04147b245c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java @@ -36,8 +36,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Consumer; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; @@ -68,15 +70,20 @@ public class GeoHashGridAggregatorTests extends AggregatorTestCase { Map<String, Integer> expectedCountPerGeoHash = new HashMap<>(); testCase(new MatchAllDocsQuery(), FIELD_NAME, precision, iw -> { List<LatLonDocValuesField> points = new ArrayList<>(); + Set<String> distinctHashesPerDoc = new HashSet<>(); for (int pointId = 0; pointId < numPoints; pointId++) { double lat = (180d * randomDouble()) - 90d; double lng = (360d * randomDouble()) - 180d; points.add(new LatLonDocValuesField(FIELD_NAME, lat, lng)); String hash = stringEncode(lng, lat, precision); - expectedCountPerGeoHash.put(hash, expectedCountPerGeoHash.getOrDefault(hash, 0) + 1); + if (distinctHashesPerDoc.contains(hash) == false) { + expectedCountPerGeoHash.put(hash, expectedCountPerGeoHash.getOrDefault(hash, 0) + 1); + } + distinctHashesPerDoc.add(hash); if (usually()) { iw.addDocument(points); points.clear(); + distinctHashesPerDoc.clear(); } } if (points.size() != 0) { |