summaryrefslogtreecommitdiff
path: root/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java
blob: 70c1486fe1f03a0573e128dd3b2a983932a97e85 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
/*
 * Licensed to Elasticsearch under one or more contributor
 * license agreements. See the NOTICE file distributed with
 * this work for additional information regarding copyright
 * ownership. Elasticsearch licenses this file to you under
 * the Apache License, Version 2.0 (the "License"); you may
 * not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */

package org.elasticsearch.index.fielddata;

import com.carrotsearch.hppc.ObjectArrayList;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;

import java.util.List;
import static org.hamcrest.Matchers.equalTo;

public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
    @Override
    protected boolean hasDocValues() {
        return true;
    }

    public void testDocValue() throws Exception {
        String mapping = XContentFactory.jsonBuilder().startObject().startObject("test")
                .startObject("properties")
                .startObject("field")
                .field("type", "binary")
                .field("doc_values", true)
                .endObject()
                .endObject()
                .endObject().endObject().string();

        final DocumentMapper mapper = mapperService.documentMapperParser().parse("test", new CompressedXContent(mapping));


        ObjectArrayList<byte[]> bytesList1 = new ObjectArrayList<>(2);
        bytesList1.add(randomBytes());
        bytesList1.add(randomBytes());
        XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("field").value(bytesList1.get(0)).value(bytesList1.get(1)).endArray().endObject();
        ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", 
                doc.bytes(), XContentType.JSON));
        writer.addDocument(d.rootDoc());

        byte[] bytes1 = randomBytes();
        doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1).endObject();
        d = mapper.parse(SourceToParse.source("test", "test", "2", doc.bytes(), XContentType.JSON));
        writer.addDocument(d.rootDoc());

        doc = XContentFactory.jsonBuilder().startObject().endObject();
        d = mapper.parse(SourceToParse.source("test", "test", "3", doc.bytes(), XContentType.JSON));
        writer.addDocument(d.rootDoc());

        // test remove duplicate value
        ObjectArrayList<byte[]> bytesList2 = new ObjectArrayList<>(2);
        bytesList2.add(randomBytes());
        bytesList2.add(randomBytes());
        doc = XContentFactory.jsonBuilder().startObject().startArray("field").value(bytesList2.get(0)).value(bytesList2.get(1)).value(bytesList2.get(0)).endArray().endObject();
        d = mapper.parse(SourceToParse.source("test", "test", "4", doc.bytes(), XContentType.JSON));
        writer.addDocument(d.rootDoc());

        List<LeafReaderContext> readers = refreshReader();
        IndexFieldData<?> indexFieldData = getForField("field");
        for (LeafReaderContext reader : readers) {
            AtomicFieldData fieldData = indexFieldData.load(reader);

            SortedBinaryDocValues bytesValues = fieldData.getBytesValues();

            CollectionUtils.sortAndDedup(bytesList1);
            bytesValues.setDocument(0);
            assertThat(bytesValues.count(), equalTo(2));
            assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(bytesList1.get(0))));
            assertThat(bytesValues.valueAt(1), equalTo(new BytesRef(bytesList1.get(1))));

            bytesValues.setDocument(1);
            assertThat(bytesValues.count(), equalTo(1));
            assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(bytes1)));

            bytesValues.setDocument(2);
            assertThat(bytesValues.count(), equalTo(0));

            CollectionUtils.sortAndDedup(bytesList2);
            bytesValues.setDocument(3);
            assertThat(bytesValues.count(), equalTo(2));
            assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(bytesList2.get(0))));
            assertThat(bytesValues.valueAt(1), equalTo(new BytesRef(bytesList2.get(1))));
        }
    }

    private byte[] randomBytes() {
        int size = randomIntBetween(10, 1000);
        byte[] bytes = new byte[size];
        random().nextBytes(bytes);
        return bytes;
    }

    @Override
    protected String getFieldDataType() {
        return "binary";
    }
}