summaryrefslogtreecommitdiff
path: root/core/src/test/java/org/elasticsearch/search/aggregations/bucket
diff options
context:
space:
mode:
authorColin Goodheart-Smithe <colings86@users.noreply.github.com>2017-04-03 09:39:03 +0100
committerGitHub <noreply@github.com>2017-04-03 09:39:03 +0100
commit058869ed549deb080522d873e3891425d377cc48 (patch)
tree29d6750ab8c4ba30cf366cb897b66cfd6ac9b690 /core/src/test/java/org/elasticsearch/search/aggregations/bucket
parent5cf1d4ae90b1c19df4c3ddd1dbf8f196177012b0 (diff)
Adds tests for cardinality and filter aggregations (#23826)
* Adds tests for cardinality and filter aggregations Relates to #22278 * addresses review comments
Diffstat (limited to 'core/src/test/java/org/elasticsearch/search/aggregations/bucket')
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterAggregatorTests.java104
1 files changed, 104 insertions, 0 deletions
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterAggregatorTests.java
new file mode 100644
index 0000000000..491f445fdf
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterAggregatorTests.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.store.Directory;
+import org.elasticsearch.index.mapper.KeywordFieldMapper;
+import org.elasticsearch.index.mapper.MappedFieldType;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.aggregations.AggregatorTestCase;
+import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
+import org.junit.Before;
+
+public class FilterAggregatorTests extends AggregatorTestCase {
+ private MappedFieldType fieldType;
+
+ @Before
+ public void setUpTest() throws Exception {
+ super.setUp();
+ fieldType = new KeywordFieldMapper.KeywordFieldType();
+ fieldType.setHasDocValues(true);
+ fieldType.setIndexOptions(IndexOptions.DOCS);
+ fieldType.setName("field");
+ }
+
+ public void testEmpty() throws Exception {
+ Directory directory = newDirectory();
+ RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
+ indexWriter.close();
+ IndexReader indexReader = DirectoryReader.open(directory);
+ IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
+ QueryBuilder filter = QueryBuilders.termQuery("field", randomAsciiOfLength(5));
+ FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter);
+ InternalFilter response = search(indexSearcher, new MatchAllDocsQuery(), builder,
+ fieldType);
+ assertEquals(response.getDocCount(), 0);
+ indexReader.close();
+ directory.close();
+ }
+
+ public void testRandom() throws Exception {
+ Directory directory = newDirectory();
+ RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
+ int numDocs = randomIntBetween(100, 200);
+ int maxTerm = randomIntBetween(10, 50);
+ int[] expectedBucketCount = new int[maxTerm];
+ Document document = new Document();
+ for (int i = 0; i < numDocs; i++) {
+ if (frequently()) {
+ // make sure we have more than one segment to test the merge
+ indexWriter.getReader().close();
+ }
+ int value = randomInt(maxTerm-1);
+ expectedBucketCount[value] += 1;
+ document.add(new Field("field", Integer.toString(value), fieldType));
+ indexWriter.addDocument(document);
+ document.clear();
+ }
+ indexWriter.close();
+
+ IndexReader indexReader = DirectoryReader.open(directory);
+ IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
+ int value = randomInt(maxTerm - 1);
+ QueryBuilder filter = QueryBuilders.termQuery("field", Integer.toString(value));
+ FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter);
+
+ for (boolean doReduce : new boolean[] {true, false}) {
+ final InternalFilter response;
+ if (doReduce) {
+ response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
+ } else {
+ response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
+ }
+ assertEquals(response.getDocCount(), (long) expectedBucketCount[value]);
+ }
+ indexReader.close();
+ directory.close();
+ }
+}