summaryrefslogtreecommitdiff
path: root/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/date/InternalDateRangeTests.java
blob: bbfcdf7463aae6e7a63b88761180a6de6e570248 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
/*
 * Licensed to Elasticsearch under one or more contributor
 * license agreements. See the NOTICE file distributed with
 * this work for additional information regarding copyright
 * ownership. Elasticsearch licenses this file to you under
 * the Apache License, Version 2.0 (the "License"); you may
 * not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */

package org.elasticsearch.search.aggregations.bucket.range.date;

import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Before;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;

public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRange> {

    private DocValueFormat format;
    private List<Tuple<Double, Double>> dateRanges;

    @Override
    @Before
    public void setUp() throws Exception {
        super.setUp();
        format = randomNumericDocValueFormat();

        Function<DateTime, DateTime> interval = randomFrom(dateTime -> dateTime.plusSeconds(1), dateTime -> dateTime.plusMinutes(1),
                dateTime -> dateTime.plusHours(1), dateTime -> dateTime.plusDays(1), dateTime -> dateTime.plusMonths(1), dateTime ->
                        dateTime.plusYears(1));

        final int numRanges = randomIntBetween(1, 10);
        final List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);

        DateTime date = new DateTime(DateTimeZone.UTC);
        double start = date.getMillis();
        double end = 0;
        for (int i = 0; i < numRanges; i++) {
            double from = date.getMillis();
            date = interval.apply(date);
            double to = date.getMillis();
            listOfRanges.add(Tuple.tuple(from, to));
            if (to > end) {
                end = to;
            }
        }
        if (randomBoolean()) {
            final int randomOverlaps = randomIntBetween(1, 5);
            for (int i = 0; i < randomOverlaps; i++) {
                listOfRanges.add(Tuple.tuple(start, randomDoubleBetween(start, end, false)));
            }
        }
        dateRanges = Collections.unmodifiableList(listOfRanges);
    }

    @Override
    protected InternalDateRange createTestInstance(String name,
                                                   List<PipelineAggregator> pipelineAggregators,
                                                   Map<String, Object> metaData,
                                                   boolean keyed) {
        final List<InternalDateRange.Bucket> buckets = new ArrayList<>();
        for (int i = 0; i < dateRanges.size(); ++i) {
            Tuple<Double, Double> range = dateRanges.get(i);
            int docCount = randomIntBetween(0, 1000);
            double from = range.v1();
            double to = range.v2();
            buckets.add( new InternalDateRange.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed, format));
        }
        return new InternalDateRange(name, buckets, format, keyed, pipelineAggregators, metaData);
    }

    @Override
    protected Writeable.Reader<InternalDateRange> instanceReader() {
        return InternalDateRange::new;
    }
}