summaryrefslogtreecommitdiff
path: root/core/src/test/java/org/elasticsearch/search/fetch/subphase/NestedChildrenFilterTests.java
blob: 440d90bdba4b9e3342d56e6f3abd80f1434f80ca (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
/*
 * Licensed to Elasticsearch under one or more contributor
 * license agreements. See the NOTICE file distributed with
 * this work for additional information regarding copyright
 * ownership. Elasticsearch licenses this file to you under
 * the Apache License, Version 2.0 (the "License"); you may
 * not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */

package org.elasticsearch.search.fetch.subphase;

import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LegacyIntField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.QueryBitSetProducer;
import org.apache.lucene.store.Directory;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext.NestedInnerHits.NestedChildrenQuery;
import org.elasticsearch.test.ESTestCase;

import java.util.ArrayList;
import java.util.List;

import static org.hamcrest.Matchers.equalTo;

/**
 */
public class NestedChildrenFilterTests extends ESTestCase {
    public void testNestedChildrenFilter() throws Exception {
        int numParentDocs = scaledRandomIntBetween(0, 32);
        int maxChildDocsPerParent = scaledRandomIntBetween(8, 16);

        Directory dir = newDirectory();
        RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
        for (int i = 0; i < numParentDocs; i++) {
            int numChildDocs = scaledRandomIntBetween(0, maxChildDocsPerParent);
            List<Document> docs = new ArrayList<>(numChildDocs + 1);
            for (int j = 0; j < numChildDocs; j++) {
                Document childDoc = new Document();
                childDoc.add(new StringField("type", "child", Field.Store.NO));
                docs.add(childDoc);
            }

            Document parenDoc = new Document();
            parenDoc.add(new StringField("type", "parent", Field.Store.NO));
            parenDoc.add(new LegacyIntField("num_child_docs", numChildDocs, Field.Store.YES));
            docs.add(parenDoc);
            writer.addDocuments(docs);
        }

        IndexReader reader = writer.getReader();
        writer.close();

        IndexSearcher searcher = new IndexSearcher(reader);
        FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
        BitSetProducer parentFilter = new QueryBitSetProducer(new TermQuery(new Term("type", "parent")));
        Query childFilter = new TermQuery(new Term("type", "child"));
        int checkedParents = 0;
        final Weight parentsWeight = searcher.createNormalizedWeight(new TermQuery(new Term("type", "parent")), false);
        for (LeafReaderContext leaf : reader.leaves()) {
            DocIdSetIterator parents = parentsWeight.scorer(leaf).iterator();
            for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) {
                int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue();
                hitContext.reset(null, leaf, parentDoc, searcher);
                NestedChildrenQuery nestedChildrenFilter = new NestedChildrenQuery(parentFilter, childFilter, hitContext);
                TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
                searcher.search(new ConstantScoreQuery(nestedChildrenFilter), totalHitCountCollector);
                assertThat(totalHitCountCollector.getTotalHits(), equalTo(expectedChildDocs));
                checkedParents++;
            }
        }
        assertThat(checkedParents, equalTo(numParentDocs));
        reader.close();
        dir.close();
    }

}