summaryrefslogtreecommitdiff
path: root/core/src/test/java/org/elasticsearch
diff options
context:
space:
mode:
authorjavanna <cavannaluca@gmail.com>2017-05-17 17:59:00 +0200
committerLuca Cavanna <luca@elastic.co>2017-05-17 17:59:00 +0200
commitce7326eb88b95c7d9b893fde2e71cc90b09d4fb3 (patch)
tree6b92539451ef529c7b8b2c5b06a417144570fcd2 /core/src/test/java/org/elasticsearch
parentd5fc520741943c83cfac58b4f3289f65af233c6d (diff)
parent2ccc223ff761043807683f34b29c693af6c94d95 (diff)
Merge branch 'master' into feature/client_aggs_parsing
Diffstat (limited to 'core/src/test/java/org/elasticsearch')
-rw-r--r--core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/VersionTests.java43
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java54
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsBlocksIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java43
-rw-r--r--core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/main/MainActionTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java31
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java9
-rw-r--r--core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java259
-rw-r--r--core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java50
-rw-r--r--core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java25
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java24
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java226
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/concurrent/ResizableBlockingQueueTests.java52
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/MapperTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/refresh/RefreshStatsTests.java11
-rw-r--r--core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java45
-rw-r--r--core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/script/IndexLookupIT.java1029
-rw-r--r--core/src/test/java/org/elasticsearch/script/NativeScriptTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java15
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java15
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java19
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java38
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java16
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java13
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java42
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java14
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java13
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java13
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java60
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java11
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorTests.java11
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java24
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java101
-rw-r--r--core/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java41
77 files changed, 847 insertions, 1682 deletions
diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
index c0430001bb..106c24982a 100644
--- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
+++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
@@ -972,7 +972,7 @@ public class ExceptionSerializationTests extends ESTestCase {
try (StreamInput in = decoded.streamInput()) {
//randomize the version across released and unreleased ones
Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
in.setVersion(version);
ElasticsearchException exception = new ElasticsearchException(in);
assertEquals("test message", exception.getMessage());
diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java
index 06a4fe117f..96a0c9aa81 100644
--- a/core/src/test/java/org/elasticsearch/VersionTests.java
+++ b/core/src/test/java/org/elasticsearch/VersionTests.java
@@ -33,7 +33,7 @@ import java.util.Locale;
import java.util.Map;
import java.util.Set;
-import static org.elasticsearch.Version.V_5_3_0_UNRELEASED;
+import static org.elasticsearch.Version.V_5_3_0;
import static org.elasticsearch.Version.V_6_0_0_alpha2_UNRELEASED;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
@@ -46,30 +46,30 @@ import static org.hamcrest.Matchers.sameInstance;
public class VersionTests extends ESTestCase {
public void testVersionComparison() throws Exception {
- assertThat(V_5_3_0_UNRELEASED.before(V_6_0_0_alpha2_UNRELEASED), is(true));
- assertThat(V_5_3_0_UNRELEASED.before(V_5_3_0_UNRELEASED), is(false));
- assertThat(V_6_0_0_alpha2_UNRELEASED.before(V_5_3_0_UNRELEASED), is(false));
+ assertThat(V_5_3_0.before(V_6_0_0_alpha2_UNRELEASED), is(true));
+ assertThat(V_5_3_0.before(V_5_3_0), is(false));
+ assertThat(V_6_0_0_alpha2_UNRELEASED.before(V_5_3_0), is(false));
- assertThat(V_5_3_0_UNRELEASED.onOrBefore(V_6_0_0_alpha2_UNRELEASED), is(true));
- assertThat(V_5_3_0_UNRELEASED.onOrBefore(V_5_3_0_UNRELEASED), is(true));
- assertThat(V_6_0_0_alpha2_UNRELEASED.onOrBefore(V_5_3_0_UNRELEASED), is(false));
+ assertThat(V_5_3_0.onOrBefore(V_6_0_0_alpha2_UNRELEASED), is(true));
+ assertThat(V_5_3_0.onOrBefore(V_5_3_0), is(true));
+ assertThat(V_6_0_0_alpha2_UNRELEASED.onOrBefore(V_5_3_0), is(false));
- assertThat(V_5_3_0_UNRELEASED.after(V_6_0_0_alpha2_UNRELEASED), is(false));
- assertThat(V_5_3_0_UNRELEASED.after(V_5_3_0_UNRELEASED), is(false));
- assertThat(V_6_0_0_alpha2_UNRELEASED.after(V_5_3_0_UNRELEASED), is(true));
+ assertThat(V_5_3_0.after(V_6_0_0_alpha2_UNRELEASED), is(false));
+ assertThat(V_5_3_0.after(V_5_3_0), is(false));
+ assertThat(V_6_0_0_alpha2_UNRELEASED.after(V_5_3_0), is(true));
- assertThat(V_5_3_0_UNRELEASED.onOrAfter(V_6_0_0_alpha2_UNRELEASED), is(false));
- assertThat(V_5_3_0_UNRELEASED.onOrAfter(V_5_3_0_UNRELEASED), is(true));
- assertThat(V_6_0_0_alpha2_UNRELEASED.onOrAfter(V_5_3_0_UNRELEASED), is(true));
+ assertThat(V_5_3_0.onOrAfter(V_6_0_0_alpha2_UNRELEASED), is(false));
+ assertThat(V_5_3_0.onOrAfter(V_5_3_0), is(true));
+ assertThat(V_6_0_0_alpha2_UNRELEASED.onOrAfter(V_5_3_0), is(true));
assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1")));
assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2")));
assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24")));
assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0")));
- assertThat(V_5_3_0_UNRELEASED, is(lessThan(V_6_0_0_alpha2_UNRELEASED)));
- assertThat(V_5_3_0_UNRELEASED.compareTo(V_5_3_0_UNRELEASED), is(0));
- assertThat(V_6_0_0_alpha2_UNRELEASED, is(greaterThan(V_5_3_0_UNRELEASED)));
+ assertThat(V_5_3_0, is(lessThan(V_6_0_0_alpha2_UNRELEASED)));
+ assertThat(V_5_3_0.compareTo(V_5_3_0), is(0));
+ assertThat(V_6_0_0_alpha2_UNRELEASED, is(greaterThan(V_5_3_0)));
}
public void testMin() {
@@ -100,7 +100,7 @@ public class VersionTests extends ESTestCase {
assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha2_UNRELEASED.minimumIndexCompatibilityVersion());
assertEquals(Version.fromId(2000099), Version.V_5_0_0.minimumIndexCompatibilityVersion());
assertEquals(Version.fromId(2000099),
- Version.V_5_1_1_UNRELEASED.minimumIndexCompatibilityVersion());
+ Version.V_5_1_1.minimumIndexCompatibilityVersion());
assertEquals(Version.fromId(2000099),
Version.V_5_0_0_alpha1.minimumIndexCompatibilityVersion());
}
@@ -157,7 +157,7 @@ public class VersionTests extends ESTestCase {
public void testIndexCreatedVersion() {
// an actual index has a IndexMetaData.SETTING_INDEX_UUID
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_2,
- Version.V_5_2_0_UNRELEASED, Version.V_6_0_0_alpha2_UNRELEASED);
+ Version.V_5_2_0, Version.V_6_0_0_alpha2_UNRELEASED);
assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build()));
}
@@ -311,8 +311,8 @@ public class VersionTests extends ESTestCase {
}
if (other.isAlpha() == false && version.isAlpha() == false
&& other.major == version.major && other.minor == version.minor) {
- assertEquals(other.luceneVersion.major, version.luceneVersion.major);
- assertEquals(other.luceneVersion.minor, version.luceneVersion.minor);
+ assertEquals(version + " vs. " + other, other.luceneVersion.major, version.luceneVersion.major);
+ assertEquals(version + " vs. " + other, other.luceneVersion.minor, version.luceneVersion.minor);
// should we also assert the lucene bugfix version?
}
}
@@ -326,11 +326,12 @@ public class VersionTests extends ESTestCase {
public void testIsCompatible() {
assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion()));
- assertTrue(isCompatible(Version.V_5_0_0, Version.V_6_0_0_alpha2_UNRELEASED));
+ assertTrue(isCompatible(Version.V_5_5_0_UNRELEASED, Version.V_6_0_0_alpha2_UNRELEASED));
assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_0_0_alpha2_UNRELEASED));
assertFalse(isCompatible(Version.fromId(2000099), Version.V_5_0_0));
}
+
public boolean isCompatible(Version left, Version right) {
boolean result = left.isCompatible(right);
assert result == right.isCompatible(left);
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java
index bd1377b89f..ad03d4b001 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java
@@ -122,5 +122,14 @@ public class SettingsUpdaterTests extends ESTestCase {
Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build());
assertEquals(clusterState.blocks().global().size(), 0);
+
+ clusterState = updater.updateSettings(build, Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true).build(),
+ Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build());
+ assertEquals(clusterState.blocks().global().size(), 1);
+ assertEquals(clusterState.blocks().global().iterator().next(), MetaData.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK);
+ clusterState = updater.updateSettings(build, Settings.EMPTY,
+ Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), false).build());
+ assertEquals(clusterState.blocks().global().size(), 0);
+
}
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java
index 5181e943c2..90eb7cdcfd 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java
@@ -93,7 +93,7 @@ public class ClusterSearchShardsResponseTests extends ESTestCase {
assertEquals(clusterSearchShardsGroup.getShardId(), deserializedGroup.getShardId());
assertArrayEquals(clusterSearchShardsGroup.getShards(), deserializedGroup.getShards());
}
- if (version.onOrAfter(Version.V_5_1_1_UNRELEASED)) {
+ if (version.onOrAfter(Version.V_5_1_1)) {
assertEquals(clusterSearchShardsResponse.getIndicesAndFilters(), deserialized.getIndicesAndFilters());
} else {
assertNull(deserialized.getIndicesAndFilters());
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java
index 5f3e107942..aec8349ea8 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java
@@ -51,7 +51,7 @@ public class PutStoredScriptRequestTests extends ESTestCase {
public void testSerializationBwc() throws IOException {
final byte[] rawStreamBytes = Base64.getDecoder().decode("ADwDCG11c3RhY2hlAQZzY3JpcHQCe30A");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(rawStreamBytes)) {
in.setVersion(version);
PutStoredScriptRequest serialized = new PutStoredScriptRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java
index 95fa5b2600..2aaf2507e3 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java
@@ -28,6 +28,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
@ClusterScope(scope = ESIntegTestCase.Scope.TEST)
public class PendingTasksBlocksIT extends ESIntegTestCase {
@@ -36,7 +37,8 @@ public class PendingTasksBlocksIT extends ESIntegTestCase {
ensureGreen("test");
// This test checks that the Pending Cluster Tasks operation is never blocked, even if an index is read only or whatever.
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test", blockSetting);
PendingClusterTasksResponse response = client().admin().cluster().preparePendingClusterTasks().execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java
index dbc7e5cddc..ee1f4dd24e 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java
@@ -28,6 +28,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
@@ -52,7 +53,7 @@ public class ClearIndicesCacheBlocksIT extends ESIntegTestCase {
}
}
// Request is blocked
- for (String blockSetting : Arrays.asList(SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) {
+ for (String blockSetting : Arrays.asList(SETTING_READ_ONLY, SETTING_BLOCKS_METADATA, SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test", blockSetting);
assertBlocked(client().admin().indices().prepareClearCache("test").setFieldDataCache(true).setQueryCache(true).setFieldDataCache(true));
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java
index 590eba3666..97e1bf2930 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java
@@ -51,8 +51,8 @@ public class CreateIndexRequestTests extends ESTestCase {
public void testSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("ADwDAANmb28APAMBB215X3R5cGULeyJ0eXBlIjp7fX0AAAD////+AA==");
- final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, Version.V_5_1_1, Version.V_5_1_2,
+ Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
CreateIndexRequest serialized = new CreateIndexRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java
index a83c209a3c..63cfc5da43 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java
@@ -19,22 +19,68 @@
package org.elasticsearch.action.admin.indices.delete;
+import org.elasticsearch.action.support.IndicesOptions;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
-@ClusterScope(scope = ESIntegTestCase.Scope.TEST)
public class DeleteIndexBlocksIT extends ESIntegTestCase {
public void testDeleteIndexWithBlocks() {
createIndex("test");
ensureGreen("test");
-
try {
setClusterReadOnly(true);
- assertBlocked(client().admin().indices().prepareDelete("test"));
+ assertBlocked(client().admin().indices().prepareDelete("test"), MetaData.CLUSTER_READ_ONLY_BLOCK);
} finally {
setClusterReadOnly(false);
}
}
+
+ public void testDeleteIndexOnIndexReadOnlyAllowDeleteSetting() {
+ createIndex("test");
+ ensureGreen("test");
+ client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get();
+ refresh();
+ try {
+ Settings settings = Settings.builder().put(IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE, true).build();
+ assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get());
+ assertSearchHits(client().prepareSearch().get(), "1");
+ assertBlocked(client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"),
+ IndexMetaData.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK);
+ assertBlocked(client().admin().indices().prepareUpdateSettings("test")
+ .setSettings(Settings.builder().put("index.number_of_replicas", 2)), IndexMetaData.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK);
+ assertSearchHits(client().prepareSearch().get(), "1");
+ assertAcked(client().admin().indices().prepareDelete("test"));
+ } finally {
+ Settings settings = Settings.builder().putNull(IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE).build();
+ assertAcked(client().admin().indices().prepareUpdateSettings("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()).
+ setSettings(settings).get());
+ }
+ }
+
+ public void testDeleteIndexOnReadOnlyAllowDeleteSetting() {
+ createIndex("test");
+ ensureGreen("test");
+ client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get();
+ refresh();
+ try {
+ Settings settings = Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true).build();
+ assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get());
+ assertSearchHits(client().prepareSearch().get(), "1");
+ assertBlocked(client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"),
+ MetaData.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK);
+ assertBlocked(client().admin().indices().prepareUpdateSettings("test")
+ .setSettings(Settings.builder().put("index.number_of_replicas", 2)), MetaData.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK);
+ assertSearchHits(client().prepareSearch().get(), "1");
+ assertAcked(client().admin().indices().prepareDelete("test"));
+ } finally {
+ Settings settings = Settings.builder().putNull(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey()).build();
+ assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get());
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java
index 3ba349ffca..1ace701572 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java
@@ -28,6 +28,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
@@ -46,7 +47,8 @@ public class FlushBlocksIT extends ESIntegTestCase {
}
// Request is not blocked
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test", blockSetting);
FlushResponse response = client().admin().indices().prepareFlush("test").execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java
index e1f498b09b..aa6b7c6138 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java
@@ -28,6 +28,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
@@ -59,7 +60,7 @@ public class ForceMergeBlocksIT extends ESIntegTestCase {
}
// Request is blocked
- for (String blockSetting : Arrays.asList(SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) {
+ for (String blockSetting : Arrays.asList(SETTING_READ_ONLY, SETTING_BLOCKS_METADATA, SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test", blockSetting);
assertBlocked(client().admin().indices().prepareForceMerge("test"));
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java
index 722482837a..2bd13669fe 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java
@@ -38,6 +38,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.hamcrest.Matchers.anyOf;
@@ -178,7 +179,7 @@ public class GetIndexIT extends ESIntegTestCase {
}
public void testGetIndexWithBlocks() {
- for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
+ for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("idx", block);
GetIndexResponse response = client().admin().indices().prepareGetIndex().addIndices("idx")
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java
index fd7f830e59..2870b04fdb 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java
@@ -94,7 +94,7 @@ public class PutMappingRequestTests extends ESTestCase {
public void testSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("ADwDAQNmb28MAA8tLS0KZm9vOiAiYmFyIgoAPAMAAAA=");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
PutMappingRequest request = new PutMappingRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java
index cc74f7c734..d69f7842bb 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java
@@ -29,7 +29,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
@@ -42,7 +42,8 @@ public class RefreshBlocksIT extends ESIntegTestCase {
NumShards numShards = getNumShards("test");
// Request is not blocked
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test", blockSetting);
RefreshResponse response = client().admin().indices().prepareRefresh("test").execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java
index 035c760d84..bcf106eda8 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java
@@ -28,6 +28,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
@@ -44,7 +45,8 @@ public class IndicesSegmentsBlocksIT extends ESIntegTestCase {
client().admin().indices().prepareFlush("test-blocks").get();
// Request is not blocked
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test-blocks", blockSetting);
IndicesSegmentResponse response = client().admin().indices().prepareSegments("test-blocks").execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsBlocksIT.java
index 25fdb7a84d..e7b477f61b 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsBlocksIT.java
@@ -29,6 +29,7 @@ import java.util.Arrays;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
@ClusterScope(scope = ESIntegTestCase.Scope.TEST)
public class IndicesStatsBlocksIT extends ESIntegTestCase {
@@ -37,7 +38,8 @@ public class IndicesStatsBlocksIT extends ESIntegTestCase {
ensureGreen("ro");
// Request is not blocked
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("ro", blockSetting);
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("ro").execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java
index 48b2ae79cf..453efb2a60 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java
@@ -89,7 +89,7 @@ public class PutIndexTemplateRequestTests extends ESTestCase {
public void testPutIndexTemplateRequestSerializationXContentBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("ADwDAANmb28IdGVtcGxhdGUAAAAAAAABA2Jhcg8tLS0KZm9vOiAiYmFyIgoAAAAAAAAAAAAAAAA=");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
PutIndexTemplateRequest request = new PutIndexTemplateRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java
index 6b68112d5d..8543b35569 100644
--- a/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.fieldcaps;
+import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;
@@ -33,10 +34,52 @@ public class FieldCapabilitiesRequestTests extends ESTestCase {
for (int i = 0; i < size; i++) {
randomFields[i] = randomAlphaOfLengthBetween(5, 10);
}
+
+ size = randomIntBetween(0, 20);
+ String[] randomIndices = new String[size];
+ for (int i = 0; i < size; i++) {
+ randomIndices[i] = randomAlphaOfLengthBetween(5, 10);
+ }
request.fields(randomFields);
+ request.indices(randomIndices);
+ if (randomBoolean()) {
+ request.indicesOptions(randomBoolean() ? IndicesOptions.strictExpand() : IndicesOptions.lenientExpandOpen());
+ }
return request;
}
+ public void testEqualsAndHashcode() {
+ FieldCapabilitiesRequest request = new FieldCapabilitiesRequest();
+ request.indices("foo");
+ request.indicesOptions(IndicesOptions.lenientExpandOpen());
+ request.fields("bar");
+
+ FieldCapabilitiesRequest other = new FieldCapabilitiesRequest();
+ other.indices("foo");
+ other.indicesOptions(IndicesOptions.lenientExpandOpen());
+ other.fields("bar");
+ assertEquals(request, request);
+ assertEquals(request, other);
+ assertEquals(request.hashCode(), other.hashCode());
+
+ // change indices
+ other.indices("foo", "bar");
+ assertNotEquals(request, other);
+ other.indices("foo");
+ assertEquals(request, other);
+
+ // change fields
+ other.fields("foo", "bar");
+ assertNotEquals(request, other);
+ other.fields("bar");
+ assertEquals(request, request);
+
+ // change indices options
+ other.indicesOptions(IndicesOptions.strictExpand());
+ assertNotEquals(request, other);
+
+ }
+
public void testFieldCapsRequestSerialization() throws IOException {
for (int i = 0; i < 20; i++) {
FieldCapabilitiesRequest request = randomRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java
index f1cbaa9df3..309e844505 100644
--- a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTests.java
@@ -113,7 +113,7 @@ public class FieldStatsRequestTests extends ESTestCase {
FieldStatsShardResponse deserialized = new FieldStatsShardResponse();
deserialized.readFrom(input);
final Map<String, FieldStats<?>> expected;
- if (version.before(Version.V_5_2_0_UNRELEASED)) {
+ if (version.before(Version.V_5_2_0)) {
expected = deserialized.filterNullMinMax();
} else {
expected = deserialized.getFieldStats();
diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java
index 4fb1d0c648..73a44ff145 100644
--- a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java
@@ -178,7 +178,7 @@ public class IndexRequestTests extends ESTestCase {
public void testIndexRequestXContentSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("AAD////+AgQDZm9vAAAAAQNiYXIBATEAAAAAAnt9AP/////////9AAAA//////////8AAAAAAAA=");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
IndexRequest serialized = new IndexRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java b/core/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java
index 01aed87947..37a15c5b35 100644
--- a/core/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java
@@ -49,7 +49,7 @@ public class PutPipelineRequestTests extends ESTestCase {
public void testSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("ADwDATECe30=");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
PutPipelineRequest request = new PutPipelineRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java
index e3ca936bb8..ecd0256b11 100644
--- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java
@@ -74,7 +74,7 @@ public class SimulatePipelineRequestTests extends ESTestCase {
public void testSerializationWithXContentBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("AAAAAnt9AAA=");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
SimulatePipelineRequest request = new SimulatePipelineRequest();
diff --git a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java
index 3407007d64..92e093350a 100644
--- a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java
@@ -108,13 +108,13 @@ public class MainActionTests extends ESTestCase {
} else {
blocks = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(randomIntBetween(1, 16), "test global block 400", randomBoolean(), randomBoolean(),
- RestStatus.BAD_REQUEST, ClusterBlockLevel.ALL))
+ false, RestStatus.BAD_REQUEST, ClusterBlockLevel.ALL))
.build();
}
} else {
blocks = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(randomIntBetween(1, 16), "test global block 503", randomBoolean(), randomBoolean(),
- RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL))
+ false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL))
.build();
}
ClusterState state = ClusterState.builder(clusterName).blocks(blocks).build();
diff --git a/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java
index b7f0e0785f..255025302c 100644
--- a/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java
+++ b/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java
@@ -196,4 +196,35 @@ public class ExpandSearchPhaseTests extends ESTestCase {
assertNotNull(reference.get());
assertEquals(1, mockSearchPhaseContext.phasesExecuted.get());
}
+
+ public void testSkipExpandCollapseNoHits() throws IOException {
+ MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1);
+ mockSearchPhaseContext.searchTransport = new SearchTransportService(
+ Settings.builder().put("search.remote.connect", false).build(), null) {
+
+ @Override
+ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener<MultiSearchResponse> listener) {
+ fail("expand should not try to send empty multi search request");
+ }
+ };
+ mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder()
+ .collapse(new CollapseBuilder("someField").setInnerHits(new InnerHitBuilder().setName("foobarbaz"))));
+
+ SearchHits hits = new SearchHits(new SearchHit[0], 1, 1.0f);
+ InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
+ SearchResponse response = mockSearchPhaseContext.buildSearchResponse(internalSearchResponse, null);
+ AtomicReference<SearchResponse> reference = new AtomicReference<>();
+ ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, response, r ->
+ new SearchPhase("test") {
+ @Override
+ public void run() throws IOException {
+ reference.set(r);
+ }
+ }
+ );
+ phase.run();
+ mockSearchPhaseContext.assertNoFailure();
+ assertNotNull(reference.get());
+ assertEquals(1, mockSearchPhaseContext.phasesExecuted.get());
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
index 93d8be990d..7e04e99b17 100644
--- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
@@ -262,7 +262,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ClusterBlocks.Builder block = ClusterBlocks.builder()
- .addGlobalBlock(new ClusterBlock(1, "test-block", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
+ .addGlobalBlock(new ClusterBlock(1, "test-block", false, true, false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
try {
action.new AsyncAction(null, request, listener).start();
@@ -277,7 +277,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ClusterBlocks.Builder block = ClusterBlocks.builder()
- .addIndexBlock(TEST_INDEX, new ClusterBlock(1, "test-block", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
+ .addIndexBlock(TEST_INDEX, new ClusterBlock(1, "test-block", false, true, false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
try {
action.new AsyncAction(null, request, listener).start();
diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java
index ae543aa14c..f8975a5369 100644
--- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java
@@ -205,7 +205,7 @@ public class TransportMasterNodeActionTests extends ESTestCase {
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ClusterBlock block = new ClusterBlock(1, "", retryableBlock, true,
- randomFrom(RestStatus.values()), ClusterBlockLevel.ALL);
+ false, randomFrom(RestStatus.values()), ClusterBlockLevel.ALL);
ClusterState stateWithBlock = ClusterState.builder(ClusterStateCreationUtils.state(localNode, localNode, allNodes))
.blocks(ClusterBlocks.builder().addGlobalBlock(block)).build();
setState(clusterService, stateWithBlock);
diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
index db8855aadd..b402feb6d8 100644
--- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
@@ -194,8 +194,8 @@ public class TransportReplicationActionTests extends ESTestCase {
}
};
- ClusterBlocks.Builder block = ClusterBlocks.builder()
- .addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
+ ClusterBlocks.Builder block = ClusterBlocks.builder().addGlobalBlock(new ClusterBlock(1, "non retryable", false, true,
+ false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
@@ -203,7 +203,7 @@ public class TransportReplicationActionTests extends ESTestCase {
assertPhase(task, "failed");
block = ClusterBlocks.builder()
- .addGlobalBlock(new ClusterBlock(1, "retryable", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
+ .addGlobalBlock(new ClusterBlock(1, "retryable", true, true, false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, new Request().timeout("5ms"), listener);
@@ -219,8 +219,8 @@ public class TransportReplicationActionTests extends ESTestCase {
assertPhase(task, "waiting_for_retry");
assertTrue(request.isRetrySet.get());
- block = ClusterBlocks.builder()
- .addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
+ block = ClusterBlocks.builder().addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, false,
+ RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
assertListenerThrows("primary phase should fail operation when moving from a retryable block to a non-retryable one", listener,
ClusterBlockException.class);
diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java
index c83a76ddc1..ba488cecb3 100644
--- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java
@@ -176,7 +176,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase {
Request request = new Request();
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ClusterBlocks.Builder block = ClusterBlocks.builder()
- .addGlobalBlock(new ClusterBlock(1, "", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
+ .addGlobalBlock(new ClusterBlock(1, "", false, true, false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
try {
action.new AsyncSingleAction(request, listener).start();
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
index 05e30d7e2d..15a2f9e74a 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
@@ -66,7 +66,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
import static org.hamcrest.Matchers.equalTo;
public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase {
-
protected static class TestFieldSetting {
public final String name;
public final boolean storedOffset;
@@ -211,7 +210,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase {
Settings.Builder settings = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "standard")
- .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase");
+ .putArray("index.analysis.analyzer.tv_test.filter", "lowercase");
assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias)));
}
@@ -395,11 +394,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase {
assertThat("Missing offset test failed" + failDesc, esDocsPosEnum.startOffset(), equalTo(-1));
assertThat("Missing offset test failed" + failDesc, esDocsPosEnum.endOffset(), equalTo(-1));
}
- if (field.storedPayloads && testConfig.requestPayloads) {
- assertThat("Payload test failed" + failDesc, luceneDocsPosEnum.getPayload(), equalTo(esDocsPosEnum.getPayload()));
- } else {
- assertThat("Missing payload test failed" + failDesc, esDocsPosEnum.getPayload(), equalTo(null));
- }
+ assertNull("Missing payload test failed" + failDesc, esDocsPosEnum.getPayload());
}
}
assertNull("Es returned terms are done but lucene isn't", luceneTermEnum.next());
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java
deleted file mode 100644
index 294a0ffde8..0000000000
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.action.termvectors;
-
-import org.apache.lucene.index.Fields;
-import org.apache.lucene.index.PostingsEnum;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.hamcrest.Matchers;
-
-import java.io.IOException;
-
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
-import static org.hamcrest.Matchers.equalTo;
-
-public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
-
- @Override
- protected int numberOfShards() {
- return 1;
- }
-
- @Override
- protected int numberOfReplicas() {
- return 0;
- }
-
- @Override
- public Settings indexSettings() {
- return Settings.builder()
- .put(super.indexSettings())
- .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase")
- .build();
- }
-
- public void testSimpleTermVectors() throws IOException {
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("term_vector", "with_positions_offsets_payloads")
- .field("analyzer", "tv_test")
- .endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(prepareCreate("test").addMapping("type1", mapping));
- ensureGreen();
- int numDocs = 15;
- for (int i = 0; i < numDocs; i++) {
- client().prepareIndex("test", "type1", Integer.toString(i))
- .setSource(XContentFactory.jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
- // 0the3 4quick9 10brown15 16fox19 20jumps25 26over30
- // 31the34 35lazy39 40dog43
- .endObject()).execute().actionGet();
- refresh();
- }
- String[] values = { "brown", "dog", "fox", "jumps", "lazy", "over", "quick", "the" };
- int[] freq = { 1, 1, 1, 1, 1, 1, 1, 2 };
- int[][] pos = { { 2 }, { 8 }, { 3 }, { 4 }, { 7 }, { 5 }, { 1 }, { 0, 6 } };
- int[][] startOffset = { { 10 }, { 40 }, { 16 }, { 20 }, { 35 }, { 26 }, { 4 }, { 0, 31 } };
- int[][] endOffset = { { 15 }, { 43 }, { 19 }, { 25 }, { 39 }, { 30 }, { 9 }, { 3, 34 } };
- for (int i = 0; i < numDocs; i++) {
- checkAllInfo(numDocs, values, freq, pos, startOffset, endOffset, i);
- checkWithoutTermStatistics(numDocs, values, freq, pos, startOffset, endOffset, i);
- checkWithoutFieldStatistics(numDocs, values, freq, pos, startOffset, endOffset, i);
- }
- }
-
- private void checkWithoutFieldStatistics(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset,
- int i) throws IOException {
- TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
- .setPositions(true).setTermStatistics(true).setFieldStatistics(false).setSelectedFields();
- TermVectorsResponse response = resp.execute().actionGet();
- assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
- Fields fields = response.getFields();
- assertThat(fields.size(), equalTo(1));
- Terms terms = fields.terms("field");
- assertThat(terms.size(), equalTo(8L));
- assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) -1));
- assertThat(terms.getDocCount(), Matchers.equalTo(-1));
- assertThat(terms.getSumDocFreq(), equalTo((long) -1));
- TermsEnum iterator = terms.iterator();
- for (int j = 0; j < values.length; j++) {
- String string = values[j];
- BytesRef next = iterator.next();
- assertThat(next, Matchers.notNullValue());
- assertThat("expected " + string, string, equalTo(next.utf8ToString()));
- assertThat(next, Matchers.notNullValue());
- if (string.equals("the")) {
- assertThat("expected ttf of " + string, numDocs * 2, equalTo((int) iterator.totalTermFreq()));
- } else {
- assertThat("expected ttf of " + string, numDocs, equalTo((int) iterator.totalTermFreq()));
- }
-
- PostingsEnum docsAndPositions = iterator.postings(null, PostingsEnum.ALL);
- assertThat(docsAndPositions.nextDoc(), equalTo(0));
- assertThat(freq[j], equalTo(docsAndPositions.freq()));
- assertThat(iterator.docFreq(), equalTo(numDocs));
- int[] termPos = pos[j];
- int[] termStartOffset = startOffset[j];
- int[] termEndOffset = endOffset[j];
- assertThat(termPos.length, equalTo(freq[j]));
- assertThat(termStartOffset.length, equalTo(freq[j]));
- assertThat(termEndOffset.length, equalTo(freq[j]));
- for (int k = 0; k < freq[j]; k++) {
- int nextPosition = docsAndPositions.nextPosition();
- assertThat("term: " + string, nextPosition, equalTo(termPos[k]));
- assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k]));
- assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k]));
- assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word")));
- }
- }
- assertThat(iterator.next(), Matchers.nullValue());
-
- XContentBuilder xBuilder = XContentFactory.jsonBuilder();
- response.toXContent(xBuilder, null);
- String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");;
- String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\""
- + i
- + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}";
- assertThat(utf8, equalTo(expectedString));
-
- }
-
- private void checkWithoutTermStatistics(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset,
- int i) throws IOException {
- TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
- .setPositions(true).setTermStatistics(false).setFieldStatistics(true).setSelectedFields();
- assertThat(resp.request().termStatistics(), equalTo(false));
- TermVectorsResponse response = resp.execute().actionGet();
- assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
- Fields fields = response.getFields();
- assertThat(fields.size(), equalTo(1));
- Terms terms = fields.terms("field");
- assertThat(terms.size(), equalTo(8L));
- assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) (9 * numDocs)));
- assertThat(terms.getDocCount(), Matchers.equalTo(numDocs));
- assertThat(terms.getSumDocFreq(), equalTo((long) numDocs * values.length));
- TermsEnum iterator = terms.iterator();
- for (int j = 0; j < values.length; j++) {
- String string = values[j];
- BytesRef next = iterator.next();
- assertThat(next, Matchers.notNullValue());
- assertThat("expected " + string, string, equalTo(next.utf8ToString()));
- assertThat(next, Matchers.notNullValue());
-
- assertThat("expected ttf of " + string, -1, equalTo((int) iterator.totalTermFreq()));
-
- PostingsEnum docsAndPositions = iterator.postings(null, PostingsEnum.ALL);
- assertThat(docsAndPositions.nextDoc(), equalTo(0));
- assertThat(freq[j], equalTo(docsAndPositions.freq()));
- assertThat(iterator.docFreq(), equalTo(-1));
- int[] termPos = pos[j];
- int[] termStartOffset = startOffset[j];
- int[] termEndOffset = endOffset[j];
- assertThat(termPos.length, equalTo(freq[j]));
- assertThat(termStartOffset.length, equalTo(freq[j]));
- assertThat(termEndOffset.length, equalTo(freq[j]));
- for (int k = 0; k < freq[j]; k++) {
- int nextPosition = docsAndPositions.nextPosition();
- assertThat("term: " + string, nextPosition, equalTo(termPos[k]));
- assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k]));
- assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k]));
- assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word")));
- }
- }
- assertThat(iterator.next(), Matchers.nullValue());
-
- XContentBuilder xBuilder = XContentFactory.jsonBuilder();
- response.toXContent(xBuilder, null);
- String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");;
- String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\""
- + i
- + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}";
- assertThat(utf8, equalTo(expectedString));
-
- }
-
- private void checkAllInfo(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset, int i)
- throws IOException {
- TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
- .setPositions(true).setFieldStatistics(true).setTermStatistics(true).setSelectedFields();
- assertThat(resp.request().fieldStatistics(), equalTo(true));
- TermVectorsResponse response = resp.execute().actionGet();
- assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
- Fields fields = response.getFields();
- assertThat(fields.size(), equalTo(1));
- Terms terms = fields.terms("field");
- assertThat(terms.size(), equalTo(8L));
- assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) (9 * numDocs)));
- assertThat(terms.getDocCount(), Matchers.equalTo(numDocs));
- assertThat(terms.getSumDocFreq(), equalTo((long) numDocs * values.length));
- TermsEnum iterator = terms.iterator();
- for (int j = 0; j < values.length; j++) {
- String string = values[j];
- BytesRef next = iterator.next();
- assertThat(next, Matchers.notNullValue());
- assertThat("expected " + string, string, equalTo(next.utf8ToString()));
- assertThat(next, Matchers.notNullValue());
- if (string.equals("the")) {
- assertThat("expected ttf of " + string, numDocs * 2, equalTo((int) iterator.totalTermFreq()));
- } else {
- assertThat("expected ttf of " + string, numDocs, equalTo((int) iterator.totalTermFreq()));
- }
-
- PostingsEnum docsAndPositions = iterator.postings(null, PostingsEnum.ALL);
- assertThat(docsAndPositions.nextDoc(), equalTo(0));
- assertThat(freq[j], equalTo(docsAndPositions.freq()));
- assertThat(iterator.docFreq(), equalTo(numDocs));
- int[] termPos = pos[j];
- int[] termStartOffset = startOffset[j];
- int[] termEndOffset = endOffset[j];
- assertThat(termPos.length, equalTo(freq[j]));
- assertThat(termStartOffset.length, equalTo(freq[j]));
- assertThat(termEndOffset.length, equalTo(freq[j]));
- for (int k = 0; k < freq[j]; k++) {
- int nextPosition = docsAndPositions.nextPosition();
- assertThat("term: " + string, nextPosition, equalTo(termPos[k]));
- assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k]));
- assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k]));
- assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word")));
- }
- }
- assertThat(iterator.next(), Matchers.nullValue());
-
- XContentBuilder xBuilder = XContentFactory.jsonBuilder();
- response.toXContent(xBuilder, ToXContent.EMPTY_PARAMS);
- String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");;
- String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\""
- + i
- + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}";
- assertThat(utf8, equalTo(expectedString));
- }
-
-}
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
index ba2f5de24b..cc7a73278e 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
@@ -193,7 +193,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase")));
+ .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
@@ -216,10 +216,9 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
public void testRandomSingleTermVectors() throws IOException {
FieldType ft = new FieldType();
- int config = randomInt(6);
+ int config = randomInt(4);
boolean storePositions = false;
boolean storeOffsets = false;
- boolean storePayloads = false;
boolean storeTermVectors = false;
switch (config) {
case 0: {
@@ -246,23 +245,11 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
storeOffsets = true;
break;
}
- case 5: {
- storeTermVectors = true;
- storePositions = true;
- storePayloads = true;
- break;
- }
- case 6: {
- storeTermVectors = true;
- storePositions = true;
- storeOffsets = true;
- storePayloads = true;
- break;
- }
+ default:
+ throw new IllegalArgumentException("Unsupported option: " + config);
}
ft.setStoreTermVectors(storeTermVectors);
ft.setStoreTermVectorOffsets(storeOffsets);
- ft.setStoreTermVectorPayloads(storePayloads);
ft.setStoreTermVectorPositions(storePositions);
String optionString = FieldMapper.termVectorOptionsToString(ft);
@@ -278,7 +265,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertAcked(prepareCreate("test").addMapping("type1", mapping)
.setSettings(Settings.builder()
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase")));
+ .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
@@ -293,13 +280,12 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
int[][] startOffset = {{10}, {40}, {16}, {20}, {35}, {26}, {4}, {0, 31}};
int[][] endOffset = {{15}, {43}, {19}, {25}, {39}, {30}, {9}, {3, 34}};
- boolean isPayloadRequested = randomBoolean();
boolean isOffsetRequested = randomBoolean();
boolean isPositionsRequested = randomBoolean();
- String infoString = createInfoString(isPositionsRequested, isOffsetRequested, isPayloadRequested, optionString);
+ String infoString = createInfoString(isPositionsRequested, isOffsetRequested, optionString);
for (int i = 0; i < 10; i++) {
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i))
- .setPayloads(isPayloadRequested).setOffsets(isOffsetRequested).setPositions(isPositionsRequested).setSelectedFields();
+ .setOffsets(isOffsetRequested).setPositions(isPositionsRequested).setSelectedFields();
TermVectorsResponse response = resp.execute().actionGet();
assertThat(infoString + "doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
@@ -340,13 +326,8 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
} else {
assertThat(infoString + "positions for term: ", nextPosition, equalTo(-1));
}
- // only return something useful if requested and stored
- if (isPayloadRequested && storePayloads) {
- assertThat(infoString + "payloads for term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef(
- "word")));
- } else {
- assertThat(infoString + "payloads for term: " + string, docsAndPositions.getPayload(), equalTo(null));
- }
+ // payloads are never made by the mapping in this test
+ assertNull(infoString + "payloads for term: " + string, docsAndPositions.getPayload());
// only return something useful if requested and stored
if (isOffsetRequested && storeOffsets) {
@@ -365,11 +346,9 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
}
}
- private String createInfoString(boolean isPositionsRequested, boolean isOffsetRequested, boolean isPayloadRequested,
- String optionString) {
+ private String createInfoString(boolean isPositionsRequested, boolean isOffsetRequested, String optionString) {
String ret = "Store config: " + optionString + "\n" + "Requested: pos-"
- + (isPositionsRequested ? "yes" : "no") + ", offsets-" + (isOffsetRequested ? "yes" : "no") + ", payload- "
- + (isPayloadRequested ? "yes" : "no") + "\n";
+ + (isPositionsRequested ? "yes" : "no") + ", offsets-" + (isOffsetRequested ? "yes" : "no") + "\n";
return ret;
}
@@ -585,7 +564,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase")));
+ .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
ensureGreen();
@@ -645,9 +624,8 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertThat("term: " + string, nextPosition, equalTo(termPos[k]));
assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k]));
assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k]));
- if (withPayloads) {
- assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word")));
- }
+ // We never configure an analyzer with payloads for this test so this is never returned
+ assertNull("term: " + string, docsAndPositions.getPayload());
}
}
assertThat(iterator.next(), nullValue());
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java
index e034cff3f1..2018218cc5 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java
@@ -269,7 +269,7 @@ public class TermVectorsUnitTests extends ESTestCase {
public void testStreamRequestWithXContentBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("AAABBWluZGV4BHR5cGUCaWQBAnt9AAABDnNvbWVQcmVmZXJlbmNlFgAAAAEA//////////0AAAA=");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
TermVectorsRequest request = new TermVectorsRequest();
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
index f9f4a136e1..b6b6b3024b 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
@@ -59,6 +59,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
@@ -327,7 +328,8 @@ public class ClusterRerouteIT extends ESIntegTestCase {
int toggle = nodesIds.indexOf(node.getName());
// Rerouting shards is not blocked
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_BLOCKS_METADATA,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test-blocks", blockSetting);
assertAcked(client().admin().cluster().prepareReroute()
diff --git a/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java b/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java
index a7fe1b918c..a84d160cf0 100644
--- a/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java
@@ -49,7 +49,7 @@ public class ClusterBlockTests extends ESTestCase {
}
ClusterBlock clusterBlock = new ClusterBlock(randomInt(), "cluster block #" + randomInt(), randomBoolean(),
- randomBoolean(), randomFrom(RestStatus.values()), levels);
+ randomBoolean(), false, randomFrom(RestStatus.values()), levels);
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(version);
@@ -75,7 +75,7 @@ public class ClusterBlockTests extends ESTestCase {
levels.add(randomFrom(ClusterBlockLevel.values()));
}
ClusterBlock clusterBlock = new ClusterBlock(randomInt(), "cluster block #" + randomInt(), randomBoolean(),
- randomBoolean(), randomFrom(RestStatus.values()), levels);
+ randomBoolean(), false, randomFrom(RestStatus.values()), levels);
assertThat(clusterBlock.toString(), not(endsWith(",")));
}
@@ -86,7 +86,7 @@ public class ClusterBlockTests extends ESTestCase {
levels.add(randomFrom(ClusterBlockLevel.values()));
}
ClusterBlock globalBlock = new ClusterBlock(randomInt(), "cluster block #" + randomInt(), randomBoolean(),
- randomBoolean(), randomFrom(RestStatus.values()), levels);
+ randomBoolean(), false, randomFrom(RestStatus.values()), levels);
ClusterBlocks clusterBlocks = new ClusterBlocks(Collections.singleton(globalBlock), ImmutableOpenMap.of());
ClusterBlockException exception = clusterBlocks.indicesBlockedException(randomFrom(globalBlock.levels()), new String[0]);
assertNotNull(exception);
diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java
index 548f9d407c..91b7a18236 100644
--- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java
@@ -76,8 +76,10 @@ public class DiscoveryNodeTests extends ESTestCase {
assertEquals(transportAddress.getAddress(), serialized.getHostAddress());
assertEquals(transportAddress.getAddress(), serialized.getAddress().getAddress());
assertEquals(transportAddress.getPort(), serialized.getAddress().getPort());
- assertFalse("if the minimum compatibility version moves past 5.0.3, remove the special casing in DiscoverNode(StreamInput) and " +
- "the TransportAddress(StreamInput, String) constructor",
- Version.CURRENT.minimumCompatibilityVersion().onOrAfter(Version.V_5_0_3_UNRELEASED));
+ assertFalse("if the minimum index compatibility version moves past 5.0.3, remove the special casing in DiscoverNode(StreamInput)" +
+ " and the TransportAddress(StreamInput, String) constructor",
+ Version.CURRENT.minimumIndexCompatibilityVersion().after(Version.V_5_0_2));
+ // serialization can happen from an old cluster-state in a full cluster restart
+ // hence we need to maintain this until we drop index bwc
}
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java
index 4e77741694..3b2fb365ca 100644
--- a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java
@@ -139,7 +139,7 @@ public class ClusterSerializationTests extends ESAllocationTestCase {
// serialize with old version
outStream = new BytesStreamOutput();
- outStream.setVersion(Version.CURRENT.minimumCompatibilityVersion());
+ outStream.setVersion(Version.CURRENT.minimumIndexCompatibilityVersion());
diffs.writeTo(outStream);
inStream = outStream.bytes().streamInput();
inStream = new NamedWriteableAwareStreamInput(inStream, new NamedWriteableRegistry(ClusterModule.getNamedWriteables()));
diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
index bab53b8f35..61e31666f3 100644
--- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
@@ -25,6 +25,7 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResp
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.common.logging.ESLoggerFactory;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.discovery.Discovery;
@@ -33,6 +34,7 @@ import org.elasticsearch.discovery.zen.ZenDiscovery;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
+import org.junit.After;
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
@@ -42,8 +44,15 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
-@ClusterScope(scope = TEST)
public class ClusterSettingsIT extends ESIntegTestCase {
+
+ @After
+ public void cleanup() throws Exception {
+ assertAcked(client().admin().cluster().prepareUpdateSettings()
+ .setPersistentSettings(Settings.builder().putNull("*"))
+ .setTransientSettings(Settings.builder().putNull("*")));
+ }
+
public void testClusterNonExistingSettingsUpdate() {
String key1 = "no_idea_what_you_are_talking_about";
int value1 = 10;
@@ -302,13 +311,25 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertBlocked(request, MetaData.CLUSTER_READ_ONLY_BLOCK);
// But it's possible to update the settings to update the "cluster.blocks.read_only" setting
- Settings settings = Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build();
+ Settings settings = Settings.builder().putNull(MetaData.SETTING_READ_ONLY_SETTING.getKey()).build();
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get());
} finally {
setClusterReadOnly(false);
}
+ // Cluster settings updates are blocked when the cluster is read only
+ try {
+ // But it's possible to update the settings to update the "cluster.blocks.read_only" setting
+ Settings settings = Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true).build();
+ assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get());
+ assertBlocked(request, MetaData.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK);
+ } finally {
+ // But it's possible to update the settings to update the "cluster.blocks.read_only" setting
+ Settings s = Settings.builder().putNull(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey()).build();
+ assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(s).get());
+ }
+
// It should work now
ClusterUpdateSettingsResponse response = request.execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java b/core/src/test/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java
index e648dce6a6..6e1475d0ae 100644
--- a/core/src/test/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java
@@ -33,6 +33,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.hamcrest.Matchers.equalTo;
@@ -136,7 +137,8 @@ public class ClusterSearchShardsIT extends ESIntegTestCase {
ensureGreen("test-blocks");
// Request is not blocked
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test-blocks", blockSetting);
ClusterSearchShardsResponse response = client().admin().cluster().prepareSearchShards("test-blocks").execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java
index b67000e2b2..34a48862e1 100644
--- a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java
@@ -805,7 +805,7 @@ public class BytesStreamsTests extends ESTestCase {
}
assertTrue("If we're not compatible with 5.1.1 we can drop the assertion below",
- Version.CURRENT.minimumCompatibilityVersion().onOrBefore(Version.V_5_1_1_UNRELEASED));
+ Version.CURRENT.minimumIndexCompatibilityVersion().onOrBefore(Version.V_5_1_1));
/* Read -1 as serialized by a version of Elasticsearch that supported writing negative numbers with writeVLong. Note that this
* should be the same test as the first case (when value is negative) but we've kept some bytes so no matter what we do to
* writeVLong in the future we can be sure we can read bytes as written by Elasticsearch before 5.1.2 */
diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
index 01ace21ad1..dd96acdd6c 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
@@ -103,6 +103,30 @@ public class ScopedSettingsTests extends ESTestCase {
assertNull(target.build().getAsInt("archived.foo.bar", null));
}
+ public void testResetSettingWithIPValidator() {
+ Settings currentSettings = Settings.builder().put("index.routing.allocation.require._ip", "192.168.0.1,127.0.0.1")
+ .put("index.some.dyn.setting", 1)
+ .build();
+ Setting<Integer> dynamicSetting = Setting.intSetting("index.some.dyn.setting", 1, Property.Dynamic, Property.IndexScope);
+
+ IndexScopedSettings settings = new IndexScopedSettings(currentSettings,
+ new HashSet<>(Arrays.asList(dynamicSetting, IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING)));
+ Settings s = IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING.get(currentSettings);
+ assertEquals(1, s.size());
+ assertEquals("192.168.0.1,127.0.0.1", s.get("_ip"));
+ Settings.Builder builder = Settings.builder();
+ Settings updates = Settings.builder().putNull("index.routing.allocation.require._ip")
+ .put("index.some.dyn.setting", 1).build();
+ settings.validate(updates);
+ settings.updateDynamicSettings(updates,
+ Settings.builder().put(currentSettings), builder, "node");
+ currentSettings = builder.build();
+ s = IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING.get(currentSettings);
+ assertEquals(0, s.size());
+ assertEquals(1, dynamicSetting.get(currentSettings).intValue());
+ assertEquals(1, currentSettings.size());
+ }
+
public void testAddConsumer() {
Setting<Integer> testSetting = Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope);
Setting<Integer> testSetting2 = Setting.intSetting("foo.bar.baz", 1, Property.Dynamic, Property.NodeScope);
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java
new file mode 100644
index 0000000000..732ec94ae1
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java
@@ -0,0 +1,226 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util.concurrent;
+
+import java.util.concurrent.TimeUnit;
+import java.util.function.Function;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.junit.annotations.TestLogging;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.lessThan;
+
+/**
+ * Tests for the automatic queue resizing of the {@code QueueResizingEsThreadPoolExecutorTests}
+ * based on the time taken for each event.
+ */
+public class QueueResizingEsThreadPoolExecutorTests extends ESTestCase {
+
+ public void testExactWindowSizeAdjustment() throws Exception {
+ ThreadContext context = new ThreadContext(Settings.EMPTY);
+ ResizableBlockingQueue<Runnable> queue =
+ new ResizableBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(), 100);
+
+ int threads = randomIntBetween(1, 3);
+ int measureWindow = 3;
+ logger.info("--> auto-queue with a measurement window of {} tasks", measureWindow);
+ QueueResizingEsThreadPoolExecutor executor =
+ new QueueResizingEsThreadPoolExecutor(
+ "test-threadpool", threads, threads, 1000,
+ TimeUnit.MILLISECONDS, queue, 10, 1000, fastWrapper(),
+ measureWindow, TimeValue.timeValueMillis(1), EsExecutors.daemonThreadFactory("queuetest"),
+ new EsAbortPolicy(), context);
+ executor.prestartAllCoreThreads();
+ logger.info("--> executor: {}", executor);
+
+ // Execute exactly 3 (measureWindow) times
+ executor.execute(() -> {});
+ executor.execute(() -> {});
+ executor.execute(() -> {});
+
+ // The queue capacity should have increased by 50 since they were very fast tasks
+ assertBusy(() -> {
+ assertThat(queue.capacity(), equalTo(150));
+ });
+ executor.shutdown();
+ executor.awaitTermination(10, TimeUnit.SECONDS);
+ context.close();
+ }
+
+ public void testAutoQueueSizingUp() throws Exception {
+ ThreadContext context = new ThreadContext(Settings.EMPTY);
+ ResizableBlockingQueue<Runnable> queue =
+ new ResizableBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(),
+ 2000);
+
+ int threads = randomIntBetween(1, 10);
+ int measureWindow = randomIntBetween(100, 200);
+ logger.info("--> auto-queue with a measurement window of {} tasks", measureWindow);
+ QueueResizingEsThreadPoolExecutor executor =
+ new QueueResizingEsThreadPoolExecutor(
+ "test-threadpool", threads, threads, 1000,
+ TimeUnit.MILLISECONDS, queue, 10, 3000, fastWrapper(),
+ measureWindow, TimeValue.timeValueMillis(1), EsExecutors.daemonThreadFactory("queuetest"),
+ new EsAbortPolicy(), context);
+ executor.prestartAllCoreThreads();
+ logger.info("--> executor: {}", executor);
+
+ // Execute a task multiple times that takes 1ms
+ executeTask(executor, (measureWindow * 5) + 2);
+
+ assertBusy(() -> {
+ assertThat(queue.capacity(), greaterThan(2000));
+ });
+ executor.shutdown();
+ executor.awaitTermination(10, TimeUnit.SECONDS);
+ context.close();
+ }
+
+ public void testAutoQueueSizingDown() throws Exception {
+ ThreadContext context = new ThreadContext(Settings.EMPTY);
+ ResizableBlockingQueue<Runnable> queue =
+ new ResizableBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(),
+ 2000);
+
+ int threads = randomIntBetween(1, 10);
+ int measureWindow = randomIntBetween(100, 200);
+ logger.info("--> auto-queue with a measurement window of {} tasks", measureWindow);
+ QueueResizingEsThreadPoolExecutor executor =
+ new QueueResizingEsThreadPoolExecutor(
+ "test-threadpool", threads, threads, 1000,
+ TimeUnit.MILLISECONDS, queue, 10, 3000, slowWrapper(), measureWindow, TimeValue.timeValueMillis(1),
+ EsExecutors.daemonThreadFactory("queuetest"), new EsAbortPolicy(), context);
+ executor.prestartAllCoreThreads();
+ logger.info("--> executor: {}", executor);
+
+ // Execute a task multiple times that takes 1m
+ executeTask(executor, (measureWindow * 5) + 2);
+
+ assertBusy(() -> {
+ assertThat(queue.capacity(), lessThan(2000));
+ });
+ executor.shutdown();
+ executor.awaitTermination(10, TimeUnit.SECONDS);
+ context.close();
+ }
+
+ public void testAutoQueueSizingWithMin() throws Exception {
+ ThreadContext context = new ThreadContext(Settings.EMPTY);
+ ResizableBlockingQueue<Runnable> queue =
+ new ResizableBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(),
+ 5000);
+
+ int threads = randomIntBetween(1, 5);
+ int measureWindow = randomIntBetween(10, 100);;
+ int min = randomIntBetween(4981, 4999);
+ logger.info("--> auto-queue with a measurement window of {} tasks", measureWindow);
+ QueueResizingEsThreadPoolExecutor executor =
+ new QueueResizingEsThreadPoolExecutor(
+ "test-threadpool", threads, threads, 1000,
+ TimeUnit.MILLISECONDS, queue, min, 100000, slowWrapper(), measureWindow, TimeValue.timeValueMillis(1),
+ EsExecutors.daemonThreadFactory("queuetest"), new EsAbortPolicy(), context);
+ executor.prestartAllCoreThreads();
+ logger.info("--> executor: {}", executor);
+
+ // Execute a task multiple times that takes 1m
+ executeTask(executor, (measureWindow * 5));
+
+ // The queue capacity should decrease, but no lower than the minimum
+ assertBusy(() -> {
+ assertThat(queue.capacity(), equalTo(min));
+ });
+ executor.shutdown();
+ executor.awaitTermination(10, TimeUnit.SECONDS);
+ context.close();
+ }
+
+ public void testAutoQueueSizingWithMax() throws Exception {
+ ThreadContext context = new ThreadContext(Settings.EMPTY);
+ ResizableBlockingQueue<Runnable> queue =
+ new ResizableBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(),
+ 5000);
+
+ int threads = randomIntBetween(1, 5);
+ int measureWindow = randomIntBetween(10, 100);
+ int max = randomIntBetween(5010, 5024);
+ logger.info("--> auto-queue with a measurement window of {} tasks", measureWindow);
+ QueueResizingEsThreadPoolExecutor executor =
+ new QueueResizingEsThreadPoolExecutor(
+ "test-threadpool", threads, threads, 1000,
+ TimeUnit.MILLISECONDS, queue, 10, max, fastWrapper(), measureWindow, TimeValue.timeValueMillis(1),
+ EsExecutors.daemonThreadFactory("queuetest"), new EsAbortPolicy(), context);
+ executor.prestartAllCoreThreads();
+ logger.info("--> executor: {}", executor);
+
+ // Execute a task multiple times that takes 1ms
+ executeTask(executor, measureWindow * 3);
+
+ // The queue capacity should increase, but no higher than the maximum
+ assertBusy(() -> {
+ assertThat(queue.capacity(), equalTo(max));
+ });
+ executor.shutdown();
+ executor.awaitTermination(10, TimeUnit.SECONDS);
+ context.close();
+ }
+
+ private Function<Runnable, Runnable> randomBetweenLimitsWrapper(final int minNs, final int maxNs) {
+ return (runnable) -> {
+ return new SettableTimedRunnable(randomIntBetween(minNs, maxNs));
+ };
+ }
+
+ private Function<Runnable, Runnable> fastWrapper() {
+ return (runnable) -> {
+ return new SettableTimedRunnable(TimeUnit.NANOSECONDS.toNanos(50));
+ };
+ }
+
+ private Function<Runnable, Runnable> slowWrapper() {
+ return (runnable) -> {
+ return new SettableTimedRunnable(TimeUnit.MINUTES.toNanos(2));
+ };
+ }
+
+ /** Execute a blank task {@code times} times for the executor */
+ private void executeTask(QueueResizingEsThreadPoolExecutor executor, int times) {
+ logger.info("--> executing a task [{}] times", times);
+ for (int i = 0; i < times; i++) {
+ executor.execute(() -> {});
+ }
+ }
+
+ public class SettableTimedRunnable extends TimedRunnable {
+ private final long timeTaken;
+
+ public SettableTimedRunnable(long timeTaken) {
+ super(() -> {});
+ this.timeTaken = timeTaken;
+ }
+
+ @Override
+ public long getTotalNanos() {
+ return timeTaken;
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ResizableBlockingQueueTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ResizableBlockingQueueTests.java
new file mode 100644
index 0000000000..b1d5b9bc1b
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ResizableBlockingQueueTests.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util.concurrent;
+
+import org.elasticsearch.test.ESTestCase;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class ResizableBlockingQueueTests extends ESTestCase {
+
+ public void testAdjustCapacity() throws Exception {
+ ResizableBlockingQueue<Runnable> queue =
+ new ResizableBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(),
+ 100);
+
+ assertThat(queue.capacity(), equalTo(100));
+ // Queue size already equal to desired capacity
+ queue.adjustCapacity(100, 25, 1, 1000);
+ assertThat(queue.capacity(), equalTo(100));
+ // Not worth adjusting
+ queue.adjustCapacity(99, 25, 1, 1000);
+ assertThat(queue.capacity(), equalTo(100));
+ // Not worth adjusting
+ queue.adjustCapacity(75, 25, 1, 1000);
+ assertThat(queue.capacity(), equalTo(100));
+ queue.adjustCapacity(74, 25, 1, 1000);
+ assertThat(queue.capacity(), equalTo(75));
+ queue.adjustCapacity(1000000, 25, 1, 1000);
+ assertThat(queue.capacity(), equalTo(100));
+ queue.adjustCapacity(1, 25, 80, 1000);
+ assertThat(queue.capacity(), equalTo(80));
+ queue.adjustCapacity(1000000, 25, 80, 100);
+ assertThat(queue.capacity(), equalTo(100));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java
index b961b6d6fb..65856add56 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java
@@ -361,8 +361,8 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
IllegalStateException ex = expectThrows(IllegalStateException.class, () ->
request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null));
assertEquals("index [test] version not supported: "
- + VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion())
- + " minimum compatible index version is: " + Version.CURRENT.minimumCompatibilityVersion(), ex.getMessage());
+ + VersionUtils.getPreviousVersion(Version.CURRENT.minimumIndexCompatibilityVersion())
+ + " minimum compatible index version is: " + Version.CURRENT.minimumIndexCompatibilityVersion(), ex.getMessage());
} else {
AtomicBoolean sendResponse = new AtomicBoolean(false);
request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), new TransportChannel() {
diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java
index ee1f654642..2fa56fa34a 100644
--- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java
+++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java
@@ -606,7 +606,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
public void testSerialization() throws IOException {
for (Version version : new Version[] {Version.CURRENT, Version.V_5_0_1}){
for (int i = 0; i < 20; i++) {
- assertSerialization(randomFieldStats(version.onOrAfter(Version.V_5_2_0_UNRELEASED)), version);
+ assertSerialization(randomFieldStats(version.onOrAfter(Version.V_5_2_0)), version);
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
index af3fdf9adb..74ec1cc02d 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
@@ -152,7 +152,7 @@ public class IndexSortSettingsTests extends ESTestCase {
.put("index.sort.field", "field1")
.build();
IllegalArgumentException exc =
- expectThrows(IllegalArgumentException.class, () -> indexSettings(settings, Version.V_5_4_0_UNRELEASED));
+ expectThrows(IllegalArgumentException.class, () -> indexSettings(settings, Version.V_5_4_0));
assertThat(exc.getMessage(),
containsString("unsupported index.version.created:5.4.0, " +
"can't set index.sort on versions prior to 6.0.0-alpha1"));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java
index 214515d170..b20972aded 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java
@@ -63,8 +63,8 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT),
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT));
// same lucene version should be cached
- assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_2_UNRELEASED),
- PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_3_UNRELEASED));
+ assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_1),
+ PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_2));
assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_0),
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_1));
diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index 7dc94d972c..e72f68e1d2 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -1486,7 +1486,7 @@ public class InternalEngineTests extends ESTestCase {
IndexSettings oldSettings = IndexSettingsModule.newIndexSettings("testOld", Settings.builder()
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "1h") // make sure this doesn't kick in on us
.put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName)
- .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_4_0_UNRELEASED)
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_4_0)
.put(MapperService.INDEX_MAPPING_SINGLE_TYPE_SETTING.getKey(), true)
.put(IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.getKey(),
between(10, 10 * IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.get(Settings.EMPTY)))
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperTests.java
index b42bda0a5a..72b1c95d8b 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/MapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperTests.java
@@ -56,7 +56,7 @@ public class MapperTests extends ESTestCase {
"As a replacement, you can use an [copy_to] on mapping fields to create your own catch all field.",
e.getMessage());
- settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_3_0_UNRELEASED).build();
+ settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_3_0).build();
// Create the mapping service with an older index creation version
final MapperService oldMapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings, "test");
diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java
index d19e8e32ff..ec34f6d87e 100644
--- a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java
@@ -336,7 +336,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
public void testItemSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("AQVpbmRleAEEdHlwZQEODXsiZm9vIjoiYmFyIn0A/wD//////////QAAAAAAAAAA");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
Item item = new Item(in);
diff --git a/core/src/test/java/org/elasticsearch/index/refresh/RefreshStatsTests.java b/core/src/test/java/org/elasticsearch/index/refresh/RefreshStatsTests.java
index 91ac42628e..27221b0af9 100644
--- a/core/src/test/java/org/elasticsearch/index/refresh/RefreshStatsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/refresh/RefreshStatsTests.java
@@ -34,15 +34,4 @@ public class RefreshStatsTests extends AbstractStreamableTestCase<RefreshStats>
protected RefreshStats createBlankInstance() {
return new RefreshStats();
}
-
- public void testPre5Dot2() throws IOException {
- // We can drop the compatibility once the assertion just below this list fails
- assertTrue(Version.CURRENT.minimumCompatibilityVersion().before(Version.V_5_2_0_UNRELEASED));
-
- RefreshStats instance = createTestInstance();
- RefreshStats copied = copyInstance(instance, Version.V_5_1_1_UNRELEASED);
- assertEquals(instance.getTotal(), copied.getTotal());
- assertEquals(instance.getTotalTimeInMillis(), copied.getTotalTimeInMillis());
- assertEquals(0, copied.getListeners());
- }
}
diff --git a/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java b/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java
index 982198c8fe..e2bf25ce1b 100644
--- a/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java
@@ -75,7 +75,7 @@ public class BulkByScrollTaskStatusTests extends ESTestCase {
assertEquals(expected.getRequestsPerSecond(), actual.getRequestsPerSecond(), 0f);
assertEquals(expected.getReasonCancelled(), actual.getReasonCancelled());
assertEquals(expected.getThrottledUntil(), actual.getThrottledUntil());
- if (version.onOrAfter(Version.V_5_1_1_UNRELEASED)) {
+ if (version.onOrAfter(Version.V_5_1_1)) {
assertThat(actual.getSliceStatuses(), Matchers.hasSize(expected.getSliceStatuses().size()));
for (int i = 0; i < expected.getSliceStatuses().size(); i++) {
BulkByScrollTask.StatusOrException sliceStatus = expected.getSliceStatuses().get(i);
diff --git a/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java b/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java
index c79a61a22b..c047235ada 100644
--- a/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java
@@ -19,6 +19,9 @@
package org.elasticsearch.index.termvectors;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.common.settings.Settings;
@@ -28,6 +31,7 @@ import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.ESSingleNodeTestCase;
+import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
@@ -71,4 +75,45 @@ public class TermVectorsServiceTests extends ESSingleNodeTestCase {
assertThat(response, notNullValue());
assertThat(response.getTookInMillis(), equalTo(TimeUnit.NANOSECONDS.toMillis(longs.get(1) - longs.get(0))));
}
+
+ public void testDocFreqs() throws IOException {
+ XContentBuilder mapping = jsonBuilder()
+ .startObject()
+ .startObject("doc")
+ .startObject("properties")
+ .startObject("text")
+ .field("type", "text")
+ .field("term_vector", "with_positions_offsets_payloads")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+ Settings settings = Settings.builder()
+ .put("number_of_shards", 1)
+ .build();
+ createIndex("test", settings, "doc", mapping);
+ ensureGreen();
+
+ int max = between(3, 10);
+ BulkRequestBuilder bulk = client().prepareBulk();
+ for (int i = 0; i < max; i++) {
+ bulk.add(client().prepareIndex("test", "doc", Integer.toString(i))
+ .setSource("text", "the quick brown fox jumped over the lazy dog"));
+ }
+ bulk.get();
+
+ TermVectorsRequest request = new TermVectorsRequest("test", "doc", "0").termStatistics(true);
+
+ IndicesService indicesService = getInstanceFromNode(IndicesService.class);
+ IndexService test = indicesService.indexService(resolveIndex("test"));
+ IndexShard shard = test.getShardOrNull(0);
+ assertThat(shard, notNullValue());
+ TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request);
+
+ Terms terms = response.getFields().terms("text");
+ TermsEnum iterator = terms.iterator();
+ while (iterator.next() != null) {
+ assertEquals(max, iterator.docFreq());
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
index 2572b7aeb0..298c8938dd 100644
--- a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
@@ -23,11 +23,8 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.ar.ArabicNormalizationFilter;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
-import org.apache.lucene.analysis.fa.PersianNormalizationFilter;
import org.apache.lucene.analysis.hunspell.Dictionary;
-import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.store.Directory;
@@ -127,12 +124,6 @@ public class AnalysisModuleTests extends ESTestCase {
testSimpleConfiguration(settings);
}
- public void testDefaultFactoryTokenFilters() throws IOException {
- assertTokenFilter("keyword_repeat", KeywordRepeatFilter.class);
- assertTokenFilter("persian_normalization", PersianNormalizationFilter.class);
- assertTokenFilter("arabic_normalization", ArabicNormalizationFilter.class);
- }
-
public void testAnalyzerAliasNotAllowedPost5x() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.foobar.type", "standard")
diff --git a/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java b/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java
index 1f56e4cfc5..dbfa5fb2d0 100644
--- a/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java
@@ -30,6 +30,7 @@ import java.util.Arrays;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
@@ -66,7 +67,8 @@ public class IndicesExistsIT extends ESIntegTestCase {
createIndex("ro");
// Request is not blocked
- for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
+ for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY,
+ SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("ro", blockSetting);
assertThat(client().admin().indices().prepareExists("ro").execute().actionGet().isExists(), equalTo(true));
diff --git a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java
index d53230d3a9..cb45a639c0 100644
--- a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java
@@ -30,6 +30,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.hamcrest.Matchers.equalTo;
@@ -43,7 +44,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase {
.put("index.merge.policy.expunge_deletes_allowed", "30")
.put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false)));
- for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
+ for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_READ_ONLY_ALLOW_DELETE)) {
try {
enableIndexBlock("test", block);
GetSettingsResponse response = client().admin().indices().prepareGetSettings("test").get();
diff --git a/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java
index 8dbaaf3e94..a867425f39 100644
--- a/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java
@@ -44,6 +44,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
@@ -190,7 +191,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
assertThat(openIndexResponse.isAcknowledged(), equalTo(true));
assertIndexIsOpened("test1", "test2", "test3");
}
-
+
public void testCloseNoIndex() {
Client client = client();
Exception e = expectThrows(ActionRequestValidationException.class, () ->
@@ -380,7 +381,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
assertIndexIsClosed("test");
// Opening an index is blocked
- for (String blockSetting : Arrays.asList(SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) {
+ for (String blockSetting : Arrays.asList(SETTING_READ_ONLY, SETTING_READ_ONLY_ALLOW_DELETE, SETTING_BLOCKS_METADATA)) {
try {
enableIndexBlock("test", blockSetting);
assertBlocked(client().admin().indices().prepareOpen("test"));
diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java
index f41d01b32c..6ca6b0ea8c 100644
--- a/core/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java
+++ b/core/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java
@@ -54,9 +54,9 @@ public class PipelineConfigurationTests extends ESTestCase {
public void testSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("ATECe30AAAA=");
- final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
- Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
try (StreamInput in = StreamInput.wrap(data)) {
+ final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
+ Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
in.setVersion(version);
PipelineConfiguration configuration = PipelineConfiguration.readFrom(in);
assertEquals(XContentType.JSON, configuration.getXContentType());
diff --git a/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java b/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java
deleted file mode 100644
index c455870779..0000000000
--- a/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java
+++ /dev/null
@@ -1,1029 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.script;
-
-import org.elasticsearch.action.search.SearchPhaseExecutionException;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.search.ShardSearchFailure;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
-import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.lookup.IndexField;
-import org.elasticsearch.search.lookup.IndexFieldTerm;
-import org.elasticsearch.search.lookup.IndexLookup;
-import org.elasticsearch.search.lookup.LeafIndexLookup;
-import org.elasticsearch.search.lookup.TermPosition;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.hamcrest.Matchers;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ExecutionException;
-import java.util.function.Function;
-
-import static java.util.Collections.emptyList;
-
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.greaterThan;
-import static org.hamcrest.Matchers.greaterThanOrEqualTo;
-
-public class IndexLookupIT extends ESIntegTestCase {
-
- private static final String INCLUDE_ALL = "_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS|_CACHE";
- private static final int ALL_FLAGS = IndexLookup.FLAG_FREQUENCIES
- | IndexLookup.FLAG_OFFSETS
- | IndexLookup.FLAG_PAYLOADS
- | IndexLookup.FLAG_POSITIONS
- | IndexLookup.FLAG_CACHE;
-
- private static final String INCLUDE_ALL_BUT_CACHE = "_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS";
- private static final int ALL_FLAGS_WITHOUT_CACHE = IndexLookup.FLAG_FREQUENCIES
- | IndexLookup.FLAG_OFFSETS
- | IndexLookup.FLAG_PAYLOADS
- | IndexLookup.FLAG_POSITIONS;
-
- private HashMap<String, List<Object>> expectedEndOffsetsArray;
- private HashMap<String, List<Object>> expectedPayloadsArray;
- private HashMap<String, List<Object>> expectedPositionsArray;
- private HashMap<String, List<Object>> expectedStartOffsetsArray;
-
- @Override
- protected Collection<Class<? extends Plugin>> nodePlugins() {
- return Collections.singleton(CustomScriptPlugin.class);
- }
-
- public static class CustomScriptPlugin extends MockScriptPlugin {
-
- @Override
- @SuppressWarnings("unchecked")
- protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
- Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
-
- scripts.put("term = _index['int_payload_field']['c']; term.tf()", vars -> tf(vars, "int_payload_field", "c"));
- scripts.put("term = _index['int_payload_field']['b']; term.tf()", vars -> tf(vars, "int_payload_field", "b"));
-
- scripts.put("Sum the payloads of [float_payload_field][b]", vars -> payloadSum(vars, "float_payload_field", "b"));
- scripts.put("Sum the payloads of [int_payload_field][b]", vars -> payloadSum(vars, "int_payload_field", "b"));
-
- scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",position]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.position));
- scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",startOffset]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.startOffset));
- scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",endOffset]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.endOffset));
- scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",payloadAsInt(-1)]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.payloadAsInt(-1)));
-
- scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,position]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.position));
- scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,startOffset]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.startOffset));
- scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,endOffset]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.endOffset));
- scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,payloadAsInt(-1)]",
- vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.payloadAsInt(-1)));
-
- scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,position]",
- vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.position));
- scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,startOffset]",
- vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.startOffset));
- scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,endOffset]",
- vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.endOffset));
- scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,payloadAsInt(-1)]",
- vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.payloadAsInt(-1)));
-
- scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS,position]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_POSITIONS, p -> p.position));
-
- scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,position]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.position));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,startOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.startOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,endOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.endOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,payloadAsInt(-1)]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.payloadAsInt(-1)));
-
- scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,position]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.position));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,startOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.startOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,endOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.endOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,payloadAsInt(-1)]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.payloadAsInt(-1)));
-
- int posoffpay = IndexLookup.FLAG_POSITIONS|IndexLookup.FLAG_OFFSETS|IndexLookup.FLAG_PAYLOADS;
- scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,position]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.position));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,startOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.startOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,endOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.endOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,payloadAsInt(-1)]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.payloadAsInt(-1)));
-
- scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,position]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.position));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,startOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.startOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,endOffset]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.endOffset));
- scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,payloadAsInt(-1)]",
- vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.payloadAsInt(-1)));
-
- scripts.put("createPositionsArrayScript" +
- "[float_payload_field,b," + INCLUDE_ALL + ",payloadAsFloat(-1)]",
- vars -> createPositionsArrayScript(vars,"float_payload_field", "b", ALL_FLAGS, p -> p.payloadAsFloat(-1)));
- scripts.put("createPositionsArrayScript" +
- "[string_payload_field,b," + INCLUDE_ALL + ",payloadAsString()]",
- vars -> createPositionsArrayScript(vars,"string_payload_field", "b", ALL_FLAGS, TermPosition::payloadAsString));
- scripts.put("createPositionsArrayScript" +
- "[int_payload_field,c," + INCLUDE_ALL + ",payloadAsInt(-1)]",
- vars -> createPositionsArrayScript(vars,"int_payload_field", "c", ALL_FLAGS, p -> p.payloadAsInt(-1)));
-
- // Call with different flags twice, equivalent to:
- // term = _index['int_payload_field']['b']; return _index['int_payload_field'].get('b', _POSITIONS).tf();
- scripts.put("Call with different flags twice", vars -> {
- LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index");
- IndexField indexField = leafIndexLookup.get("int_payload_field");
-
- // 1st call
- indexField.get("b");
- try {
- // 2nd call, must throws an exception
- return indexField.get("b", IndexLookup.FLAG_POSITIONS).tf();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "Call with different flags twice", CustomScriptPlugin.NAME);
- }
- });
-
- // Call with same flags twice: equivalent to:
- // term = _index['int_payload_field'].get('b', _POSITIONS | _FREQUENCIES);return _index['int_payload_field']['b'].tf();
- scripts.put("Call with same flags twice", vars -> {
- LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index");
- IndexField indexField = leafIndexLookup.get("int_payload_field");
-
- // 1st call
- indexField.get("b", IndexLookup.FLAG_POSITIONS | IndexLookup.FLAG_FREQUENCIES);
- try {
- // 2nd call, must throws an exception
- return indexField.get("b").tf();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "Call with same flags twice", CustomScriptPlugin.NAME);
- }
- });
-
- // get the number of all docs
- scripts.put("_index.numDocs()",
- vars -> ((LeafIndexLookup) vars.get("_index")).numDocs());
-
- // get the number of docs with field float_payload_field
- scripts.put("_index['float_payload_field'].docCount()",
- vars -> indexFieldScript(vars, "float_payload_field", indexField -> {
- try {
- return indexField.docCount();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "docCount()", CustomScriptPlugin.NAME);
- }
- }));
-
- // corner case: what if the field does not exist?
- scripts.put("_index['non_existent_field'].docCount()",
- vars -> indexFieldScript(vars, "non_existent_field", indexField -> {
- try {
- return indexField.docCount();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "docCount()", CustomScriptPlugin.NAME);
- }
- }));
-
- // get the number of all tokens in all docs
- scripts.put("_index['float_payload_field'].sumttf()",
- vars -> indexFieldScript(vars, "float_payload_field", indexField -> {
- try {
- return indexField.sumttf();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "sumttf()", CustomScriptPlugin.NAME);
- }
- }));
-
- // corner case get the number of all tokens in all docs for non existent
- // field
- scripts.put("_index['non_existent_field'].sumttf()",
- vars -> indexFieldScript(vars, "non_existent_field", indexField -> {
- try {
- return indexField.sumttf();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "sumttf()", CustomScriptPlugin.NAME);
- }
- }));
-
- // get the sum of doc freqs in all docs
- scripts.put("_index['float_payload_field'].sumdf()",
- vars -> indexFieldScript(vars, "float_payload_field", indexField -> {
- try {
- return indexField.sumdf();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "sumdf()", CustomScriptPlugin.NAME);
- }
- }));
-
- // get the sum of doc freqs in all docs for non existent field
- scripts.put("_index['non_existent_field'].sumdf()",
- vars -> indexFieldScript(vars, "non_existent_field", indexField -> {
- try {
- return indexField.sumdf();
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "sumdf()", CustomScriptPlugin.NAME);
- }
- }));
-
- // check term frequencies for 'a'
- scripts.put("term = _index['float_payload_field']['a']; if (term != null) {term.tf()}",
- vars -> indexFieldTermScript(vars, "float_payload_field", "a", indexFieldTerm -> {
- try {
- if (indexFieldTerm != null) {
- return indexFieldTerm.tf();
- }
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "term.tf()", CustomScriptPlugin.NAME);
- }
- return null;
- }));
-
- // check doc frequencies for 'c'
- scripts.put("term = _index['float_payload_field']['c']; if (term != null) {term.df()}",
- vars -> indexFieldTermScript(vars, "float_payload_field", "c", indexFieldTerm -> {
- try {
- if (indexFieldTerm != null) {
- return indexFieldTerm.df();
- }
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "term.df()", CustomScriptPlugin.NAME);
- }
- return null;
- }));
-
- // check doc frequencies for term that does not exist
- scripts.put("term = _index['float_payload_field']['non_existent_term']; if (term != null) {term.df()}",
- vars -> indexFieldTermScript(vars, "float_payload_field", "non_existent_term", indexFieldTerm -> {
- try {
- if (indexFieldTerm != null) {
- return indexFieldTerm.df();
- }
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "term.df()", CustomScriptPlugin.NAME);
- }
- return null;
- }));
-
- // check doc frequencies for term that does not exist
- scripts.put("term = _index['non_existent_field']['non_existent_term']; if (term != null) {term.tf()}",
- vars -> indexFieldTermScript(vars, "non_existent_field", "non_existent_term", indexFieldTerm -> {
- try {
- if (indexFieldTerm != null) {
- return indexFieldTerm.tf();
- }
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "term.tf()", CustomScriptPlugin.NAME);
- }
- return null;
- }));
-
- // check total term frequencies for 'a'
- scripts.put("term = _index['float_payload_field']['a']; if (term != null) {term.ttf()}",
- vars -> indexFieldTermScript(vars, "float_payload_field", "a", indexFieldTerm -> {
- try {
- if (indexFieldTerm != null) {
- return indexFieldTerm.ttf();
- }
- } catch (IOException e) {
- throw new ScriptException(e.getMessage(), e, emptyList(), "term.ttf()", CustomScriptPlugin.NAME);
- }
- return null;
- }));
-
- return scripts;
- }
-
- @SuppressWarnings("unchecked")
- static Object indexFieldScript(Map<String, Object> vars, String fieldName, Function<IndexField, Object> fn) {
- LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index");
- return fn.apply(leafIndexLookup.get(fieldName));
- }
-
- @SuppressWarnings("unchecked")
- static Object indexFieldTermScript(Map<String, Object> vars, String fieldName, String term, Function<IndexFieldTerm, Object> fn) {
- return indexFieldScript(vars, fieldName, indexField -> fn.apply(indexField.get(term)));
- }
-
- @SuppressWarnings("unchecked")
- static Object tf(Map<String, Object> vars, String fieldName, String term) {
- return indexFieldTermScript(vars, fieldName, term, indexFieldTerm -> {
- try {
- return indexFieldTerm.tf();
- } catch (IOException e) {
- throw new RuntimeException("Mocked script error when retrieving TF for [" + fieldName + "][" + term + "]");
- }
- });
- }
-
- // Sum the payloads for a given field term, equivalent to:
- // term = _index['float_payload_field'].get('b', _FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS|_CACHE);
- // payloadSum=0;
- // for (pos in term) {
- // payloadSum += pos.payloadAsInt(0)
- // };
- // return payloadSum;
- @SuppressWarnings("unchecked")
- static Object payloadSum(Map<String, Object> vars, String fieldName, String term) {
- return indexFieldScript(vars, fieldName, indexField -> {
- IndexFieldTerm indexFieldTerm = indexField.get(term, IndexLookup.FLAG_FREQUENCIES
- | IndexLookup.FLAG_OFFSETS
- | IndexLookup.FLAG_PAYLOADS
- | IndexLookup.FLAG_POSITIONS
- | IndexLookup.FLAG_CACHE);
- int payloadSum = 0;
- for (TermPosition position : indexFieldTerm) {
- payloadSum += position.payloadAsInt(0);
- }
- return payloadSum;
- });
- }
-
- @SuppressWarnings("unchecked")
- static List<Object> createPositionsArrayScriptGetInfoObjectTwice(Map<String, Object> vars, String term, int flags,
- Function<TermPosition, Object> fn) {
- LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index");
- IndexField indexField = leafIndexLookup.get("int_payload_field");
-
- // 1st call
- IndexFieldTerm indexFieldTerm = indexField.get(term, flags);
-
- List<Object> array = new ArrayList<>();
- for (TermPosition position : indexFieldTerm) {
- array.add(fn.apply(position));
- }
-
- // 2nd call
- indexField.get(term, flags);
-
- array = new ArrayList<>();
- for (TermPosition position : indexFieldTerm) {
- array.add(fn.apply(position));
- }
-
- return array;
- }
-
- @SuppressWarnings("unchecked")
- static List<Object> createPositionsArrayScriptIterateTwice(Map<String, Object> vars, String term, int flags,
- Function<TermPosition, Object> fn) {
- LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index");
- IndexField indexField = leafIndexLookup.get("int_payload_field");
-
- IndexFieldTerm indexFieldTerm = indexField.get(term, flags);
-
- // 1st iteration
- List<Object> array = new ArrayList<>();
- for (TermPosition position : indexFieldTerm) {
- array.add(fn.apply(position));
- }
-
- // 2nd iteration
- array = new ArrayList<>();
- for (TermPosition position : indexFieldTerm) {
- array.add(fn.apply(position));
- }
-
- return array;
- }
-
- @SuppressWarnings("unchecked")
- static List<Object> createPositionsArrayScript(Map<String, Object> vars, String field, String term, int flags,
- Function<TermPosition, Object> fn) {
-
- LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index");
- IndexField indexField = leafIndexLookup.get(field);
-
- IndexFieldTerm indexFieldTerm = indexField.get(term, flags);
- List<Object> array = new ArrayList<>();
- for (TermPosition position : indexFieldTerm) {
- array.add(fn.apply(position));
- }
- return array;
- }
- }
-
- void initTestData() throws InterruptedException, ExecutionException, IOException {
- HashMap<String, List<Object>> emptyArray = new HashMap<>();
- List<Object> empty1 = new ArrayList<>();
- empty1.add(-1);
- empty1.add(-1);
- emptyArray.put("1", empty1);
- List<Object> empty2 = new ArrayList<>();
- empty2.add(-1);
- empty2.add(-1);
- emptyArray.put("2", empty2);
- List<Object> empty3 = new ArrayList<>();
- empty3.add(-1);
- empty3.add(-1);
- emptyArray.put("3", empty3);
-
- expectedPositionsArray = new HashMap<>();
-
- List<Object> pos1 = new ArrayList<>();
- pos1.add(1);
- pos1.add(2);
- expectedPositionsArray.put("1", pos1);
- List<Object> pos2 = new ArrayList<>();
- pos2.add(0);
- pos2.add(1);
- expectedPositionsArray.put("2", pos2);
- List<Object> pos3 = new ArrayList<>();
- pos3.add(0);
- pos3.add(4);
- expectedPositionsArray.put("3", pos3);
-
- expectedPayloadsArray = new HashMap<>();
- List<Object> pay1 = new ArrayList<>();
- pay1.add(2);
- pay1.add(3);
- expectedPayloadsArray.put("1", pay1);
- List<Object> pay2 = new ArrayList<>();
- pay2.add(1);
- pay2.add(2);
- expectedPayloadsArray.put("2", pay2);
- List<Object> pay3 = new ArrayList<>();
- pay3.add(1);
- pay3.add(-1);
- expectedPayloadsArray.put("3", pay3);
- /*
- * "a|1 b|2 b|3 c|4 d " "b|1 b|2 c|3 d|4 a " "b|1 c|2 d|3 a|4 b "
- */
- expectedStartOffsetsArray = new HashMap<>();
- List<Object> starts1 = new ArrayList<>();
- starts1.add(4);
- starts1.add(8);
- expectedStartOffsetsArray.put("1", starts1);
- List<Object> starts2 = new ArrayList<>();
- starts2.add(0);
- starts2.add(4);
- expectedStartOffsetsArray.put("2", starts2);
- List<Object> starts3 = new ArrayList<>();
- starts3.add(0);
- starts3.add(16);
- expectedStartOffsetsArray.put("3", starts3);
-
- expectedEndOffsetsArray = new HashMap<>();
- List<Object> ends1 = new ArrayList<>();
- ends1.add(7);
- ends1.add(11);
- expectedEndOffsetsArray.put("1", ends1);
- List<Object> ends2 = new ArrayList<>();
- ends2.add(3);
- ends2.add(7);
- expectedEndOffsetsArray.put("2", ends2);
- List<Object> ends3 = new ArrayList<>();
- ends3.add(3);
- ends3.add(17);
- expectedEndOffsetsArray.put("3", ends3);
-
- XContentBuilder mapping = XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type1")
- .startObject("properties")
- .startObject("int_payload_field")
- .field("type", "text")
- .field("index_options", "offsets")
- .field("analyzer", "payload_int")
- .endObject()
- .endObject()
- .endObject()
- .endObject();
-
- assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings(
- Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.payload_int.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.payload_int.filter", "delimited_int")
- .put("index.analysis.filter.delimited_int.delimiter", "|")
- .put("index.analysis.filter.delimited_int.encoding", "int")
- .put("index.analysis.filter.delimited_int.type", "delimited_payload_filter")));
- indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("int_payload_field", "a|1 b|2 b|3 c|4 d "), client()
- .prepareIndex("test", "type1", "2").setSource("int_payload_field", "b|1 b|2 c|3 d|4 a "),
- client().prepareIndex("test", "type1", "3").setSource("int_payload_field", "b|1 c|2 d|3 a|4 b "));
- ensureGreen();
- }
-
- public void testTwoScripts() throws Exception {
- initTestData();
-
- Script scriptFieldScript = createScript("term = _index['int_payload_field']['c']; term.tf()");
- Script scoreScript = createScript("term = _index['int_payload_field']['b']; term.tf()");
- Map<String, Object> expectedResultsField = new HashMap<>();
- expectedResultsField.put("1", 1);
- expectedResultsField.put("2", 1);
- expectedResultsField.put("3", 1);
- Map<String, Object> expectedResultsScore = new HashMap<>();
- expectedResultsScore.put("1", 2f);
- expectedResultsScore.put("2", 2f);
- expectedResultsScore.put("3", 2f);
- checkOnlyFunctionScore(scoreScript, expectedResultsScore, 3);
- checkValueInEachDocWithFunctionScore(scriptFieldScript, expectedResultsField, scoreScript, expectedResultsScore, 3);
-
- }
-
- public void testCallWithDifferentFlagsFails() throws Exception {
- initTestData();
- final int numPrimaries = getNumShards("test").numPrimaries;
- final String expectedError = "You must call get with all required flags! " +
- "Instead of _index['int_payload_field'].get('b', _FREQUENCIES) and _index['int_payload_field'].get('b', _POSITIONS)" +
- " call _index['int_payload_field'].get('b', _FREQUENCIES | _POSITIONS) once]";
-
- // should throw an exception, we cannot call with different flags twice
- // if the flags of the second call were not included in the first call.
- Script script = createScript("Call with different flags twice");
- try {
- SearchResponse response = client().prepareSearch("test")
- .setQuery(QueryBuilders.matchAllQuery())
- .addScriptField("tvtest", script)
- .get();
-
- // (partial) success when at least one shard succeeds
- assertThat(numPrimaries, greaterThan(response.getShardFailures().length));
- assertThat(response.getFailedShards(), greaterThanOrEqualTo(1));
-
- for (ShardSearchFailure failure : response.getShardFailures()) {
- assertThat(failure.reason(), containsString(expectedError));
- }
- } catch (SearchPhaseExecutionException e) {
- // Exception thrown when *all* shards fail
- assertThat(numPrimaries, equalTo(e.shardFailures().length));
- for (ShardSearchFailure failure : e.shardFailures()) {
- assertThat(failure.reason(), containsString(expectedError));
- }
- }
-
- // Should not throw an exception this way round
- script = createScript("Call with same flags twice");
- assertThat(client().prepareSearch("test")
- .setQuery(QueryBuilders.matchAllQuery())
- .addScriptField("tvtest", script)
- .get().getHits().getTotalHits(), greaterThan(0L));
- }
-
- private void checkOnlyFunctionScore(Script scoreScript, Map<String, Object> expectedScore, int numExpectedDocs) {
- SearchResponse sr = client().prepareSearch("test")
- .setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript))).execute()
- .actionGet();
- assertHitCount(sr, numExpectedDocs);
- for (SearchHit hit : sr.getHits().getHits()) {
- assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(),
- Matchers.closeTo(hit.getScore(), 1.e-4));
- }
- }
-
- public void testDocumentationExample() throws Exception {
- initTestData();
-
- Script script = createScript("Sum the payloads of [float_payload_field][b]");
-
- // non existing field: sum should be 0
- HashMap<String, Object> zeroArray = new HashMap<>();
- zeroArray.put("1", 0);
- zeroArray.put("2", 0);
- zeroArray.put("3", 0);
- checkValueInEachDoc(script, zeroArray, 3);
-
- script = createScript("Sum the payloads of [int_payload_field][b]");
-
- // existing field: sums should be as here:
- zeroArray.put("1", 5);
- zeroArray.put("2", 3);
- zeroArray.put("3", 1);
- checkValueInEachDoc(script, zeroArray, 3);
- }
-
- public void testIteratorAndRecording() throws Exception {
- initTestData();
-
- // call twice with record: should work as expected
- Script script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "position");
- checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
- script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "startOffset");
- checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
- script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "endOffset");
- checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
- script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "payloadAsInt(-1)");
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
-
- // no record and get iterator twice: should fail
- script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "position");
- checkExceptions(script);
- script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "startOffset");
- checkExceptions(script);
- script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "endOffset");
- checkExceptions(script);
- script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "payloadAsInt(-1)");
- checkExceptions(script);
-
- // no record and get termObject twice and iterate: should fail
- script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "position");
- checkExceptions(script);
- script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "startOffset");
- checkExceptions(script);
- script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "endOffset");
- checkExceptions(script);
- script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "payloadAsInt(-1)");
- checkExceptions(script);
-
- }
-
- private Script createPositionsArrayScriptGetInfoObjectTwice(String term, String flags, String what) {
- return createScript("createPositionsArrayScriptGetInfoObjectTwice[" + term + "," + flags + "," + what + "]");
- }
-
- private Script createPositionsArrayScriptIterateTwice(String term, String flags, String what) {
- return createScript("createPositionsArrayScriptIterateTwice[" + term + "," + flags + "," + what + "]");
- }
-
- private Script createPositionsArrayScript(String field, String term, String flags, String what) {
- return createScript("createPositionsArrayScript[" + field + "," + term + "," + flags + "," + what + "]");
- }
-
- private Script createPositionsArrayScriptDefaultGet(String field, String term, String what) {
- return createScript("createPositionsArrayScriptDefaultGet[" + field + "," + term + "," + what + "]");
- }
-
- private Script createScript(String script) {
- return new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, script, Collections.emptyMap());
- }
-
- public void testFlags() throws Exception {
- initTestData();
-
- // check default flag
- Script script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "position");
- // there should be no positions
- /* TODO: the following tests fail with the new postings enum apis because of a bogus assert in BlockDocsEnum
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "startOffset");
- // there should be no offsets
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "endOffset");
- // there should be no offsets
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "payloadAsInt(-1)");
- // there should be no payload
- checkArrayValsInEachDoc(script, emptyArray, 3);
-
- // check FLAG_FREQUENCIES flag
- script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "position");
- // there should be no positions
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "startOffset");
- // there should be no offsets
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "endOffset");
- // there should be no offsets
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "payloadAsInt(-1)");
- // there should be no payloads
- checkArrayValsInEachDoc(script, emptyArray, 3);*/
-
- // check FLAG_POSITIONS flag
- script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "position");
- // there should be positions
- checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
- /* TODO: these tests make a bogus assumption that asking for positions will return only positions
- script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "startOffset");
- // there should be no offsets
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "endOffset");
- // there should be no offsets
- checkArrayValsInEachDoc(script, emptyArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "payloadAsInt(-1)");
- // there should be no payloads
- checkArrayValsInEachDoc(script, emptyArray, 3);*/
-
- // check FLAG_OFFSETS flag
- script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "position");
- // there should be positions and s forth ...
- checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "startOffset");
- checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "endOffset");
- checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "payloadAsInt(-1)");
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
-
- // check FLAG_PAYLOADS flag
- script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "position");
- checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "startOffset");
- checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "endOffset");
- checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "payloadAsInt(-1)");
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
-
- // check all flags
- String allFlags = "_POSITIONS|_OFFSETS|_PAYLOADS";
- script = createPositionsArrayScript("int_payload_field", "b", allFlags, "position");
- checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", allFlags, "startOffset");
- checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", allFlags, "endOffset");
- checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", allFlags, "payloadAsInt(-1)");
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
-
- // check all flags without record
- script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "position");
- checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "startOffset");
- checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "endOffset");
- checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
- script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "payloadAsInt(-1)");
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
-
- }
-
- private void checkArrayValsInEachDoc(Script script, HashMap<String, List<Object>> expectedArray, int expectedHitSize) {
- SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
- .execute().actionGet();
- assertHitCount(sr, expectedHitSize);
- int nullCounter = 0;
- for (SearchHit hit : sr.getHits().getHits()) {
- Object result = hit.getFields().get("tvtest").getValues();
- Object expectedResult = expectedArray.get(hit.getId());
- assertThat("for doc " + hit.getId(), result, equalTo(expectedResult));
- if (expectedResult != null) {
- nullCounter++;
- }
- }
- assertThat(nullCounter, equalTo(expectedArray.size()));
- }
-
- public void testAllExceptPosAndOffset() throws Exception {
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("float_payload_field").field("type", "text").field("index_options", "offsets").field("term_vector", "no")
- .field("analyzer", "payload_float").endObject().startObject("string_payload_field").field("type", "text")
- .field("index_options", "offsets").field("term_vector", "no").field("analyzer", "payload_string").endObject()
- .startObject("int_payload_field").field("type", "text").field("index_options", "offsets")
- .field("analyzer", "payload_int").endObject().endObject().endObject().endObject();
- assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings(
- Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.payload_float.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.payload_float.filter", "delimited_float")
- .put("index.analysis.filter.delimited_float.delimiter", "|")
- .put("index.analysis.filter.delimited_float.encoding", "float")
- .put("index.analysis.filter.delimited_float.type", "delimited_payload_filter")
- .put("index.analysis.analyzer.payload_string.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.payload_string.filter", "delimited_string")
- .put("index.analysis.filter.delimited_string.delimiter", "|")
- .put("index.analysis.filter.delimited_string.encoding", "identity")
- .put("index.analysis.filter.delimited_string.type", "delimited_payload_filter")
- .put("index.analysis.analyzer.payload_int.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.payload_int.filter", "delimited_int")
- .put("index.analysis.filter.delimited_int.delimiter", "|")
- .put("index.analysis.filter.delimited_int.encoding", "int")
- .put("index.analysis.filter.delimited_int.type", "delimited_payload_filter")
- .put("index.number_of_shards", 1)));
- indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("float_payload_field", "a|1 b|2 a|3 b "), client()
- .prepareIndex("test", "type1", "2").setSource("string_payload_field", "a|a b|b a|a b "),
- client().prepareIndex("test", "type1", "3").setSource("float_payload_field", "a|4 b|5 a|6 b "),
- client().prepareIndex("test", "type1", "4").setSource("string_payload_field", "a|b b|a a|b b "),
- client().prepareIndex("test", "type1", "5").setSource("float_payload_field", "c "),
- client().prepareIndex("test", "type1", "6").setSource("int_payload_field", "c|1"));
-
- // get the number of all docs
- Script script = createScript("_index.numDocs()");
- checkValueInEachDoc(6, script, 6);
-
- // get the number of docs with field float_payload_field
- script = createScript("_index['float_payload_field'].docCount()");
- checkValueInEachDoc(3, script, 6);
-
- // corner case: what if the field does not exist?
- script = createScript("_index['non_existent_field'].docCount()");
- checkValueInEachDoc(0, script, 6);
-
- // get the number of all tokens in all docs
- script = createScript("_index['float_payload_field'].sumttf()");
- checkValueInEachDoc(9, script, 6);
-
- // corner case get the number of all tokens in all docs for non existent
- // field
- script = createScript("_index['non_existent_field'].sumttf()");
- checkValueInEachDoc(0, script, 6);
-
- // get the sum of doc freqs in all docs
- script = createScript("_index['float_payload_field'].sumdf()");
- checkValueInEachDoc(5, script, 6);
-
- // get the sum of doc freqs in all docs for non existent field
- script = createScript("_index['non_existent_field'].sumdf()");
- checkValueInEachDoc(0, script, 6);
-
- // check term frequencies for 'a'
- script = createScript("term = _index['float_payload_field']['a']; if (term != null) {term.tf()}");
- Map<String, Object> expectedResults = new HashMap<>();
- expectedResults.put("1", 2);
- expectedResults.put("2", 0);
- expectedResults.put("3", 2);
- expectedResults.put("4", 0);
- expectedResults.put("5", 0);
- expectedResults.put("6", 0);
- checkValueInEachDoc(script, expectedResults, 6);
- expectedResults.clear();
-
- // check doc frequencies for 'c'
- script = createScript("term = _index['float_payload_field']['c']; if (term != null) {term.df()}");
- expectedResults.put("1", 1L);
- expectedResults.put("2", 1L);
- expectedResults.put("3", 1L);
- expectedResults.put("4", 1L);
- expectedResults.put("5", 1L);
- expectedResults.put("6", 1L);
- checkValueInEachDoc(script, expectedResults, 6);
- expectedResults.clear();
-
- // check doc frequencies for term that does not exist
- script = createScript("term = _index['float_payload_field']['non_existent_term']; if (term != null) {term.df()}");
- expectedResults.put("1", 0L);
- expectedResults.put("2", 0L);
- expectedResults.put("3", 0L);
- expectedResults.put("4", 0L);
- expectedResults.put("5", 0L);
- expectedResults.put("6", 0L);
- checkValueInEachDoc(script, expectedResults, 6);
- expectedResults.clear();
-
- // check doc frequencies for term that does not exist
- script = createScript("term = _index['non_existent_field']['non_existent_term']; if (term != null) {term.tf()}");
- expectedResults.put("1", 0);
- expectedResults.put("2", 0);
- expectedResults.put("3", 0);
- expectedResults.put("4", 0);
- expectedResults.put("5", 0);
- expectedResults.put("6", 0);
- checkValueInEachDoc(script, expectedResults, 6);
- expectedResults.clear();
-
- // check total term frequencies for 'a'
- script = createScript("term = _index['float_payload_field']['a']; if (term != null) {term.ttf()}");
- expectedResults.put("1", 4L);
- expectedResults.put("2", 4L);
- expectedResults.put("3", 4L);
- expectedResults.put("4", 4L);
- expectedResults.put("5", 4L);
- expectedResults.put("6", 4L);
- checkValueInEachDoc(script, expectedResults, 6);
- expectedResults.clear();
-
- // check float payload for 'b'
- HashMap<String, List<Object>> expectedPayloadsArray = new HashMap<>();
- script = createPositionsArrayScript("float_payload_field", "b", INCLUDE_ALL, "payloadAsFloat(-1)");
- float missingValue = -1;
- List<Object> payloadsFor1 = new ArrayList<>();
- payloadsFor1.add(2f);
- payloadsFor1.add(missingValue);
- expectedPayloadsArray.put("1", payloadsFor1);
- List<Object> payloadsFor2 = new ArrayList<>();
- payloadsFor2.add(5f);
- payloadsFor2.add(missingValue);
- expectedPayloadsArray.put("3", payloadsFor2);
- expectedPayloadsArray.put("6", new ArrayList<>());
- expectedPayloadsArray.put("5", new ArrayList<>());
- expectedPayloadsArray.put("4", new ArrayList<>());
- expectedPayloadsArray.put("2", new ArrayList<>());
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 6);
-
- // check string payload for 'b'
- expectedPayloadsArray.clear();
- payloadsFor1.clear();
- payloadsFor2.clear();
- script = createPositionsArrayScript("string_payload_field", "b", INCLUDE_ALL, "payloadAsString()");
- payloadsFor1.add("b");
- payloadsFor1.add(null);
- expectedPayloadsArray.put("2", payloadsFor1);
- payloadsFor2.add("a");
- payloadsFor2.add(null);
- expectedPayloadsArray.put("4", payloadsFor2);
- expectedPayloadsArray.put("6", new ArrayList<>());
- expectedPayloadsArray.put("5", new ArrayList<>());
- expectedPayloadsArray.put("3", new ArrayList<>());
- expectedPayloadsArray.put("1", new ArrayList<>());
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 6);
-
- // check int payload for 'c'
- expectedPayloadsArray.clear();
- payloadsFor1.clear();
- payloadsFor2.clear();
- script = createPositionsArrayScript("int_payload_field", "c", INCLUDE_ALL, "payloadAsInt(-1)");
- payloadsFor1 = new ArrayList<>();
- payloadsFor1.add(1);
- expectedPayloadsArray.put("6", payloadsFor1);
- expectedPayloadsArray.put("5", new ArrayList<>());
- expectedPayloadsArray.put("4", new ArrayList<>());
- expectedPayloadsArray.put("3", new ArrayList<>());
- expectedPayloadsArray.put("2", new ArrayList<>());
- expectedPayloadsArray.put("1", new ArrayList<>());
- checkArrayValsInEachDoc(script, expectedPayloadsArray, 6);
-
- }
-
- private void checkExceptions(Script script) {
- try {
- SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
- .execute().actionGet();
- assertThat(sr.getHits().getHits().length, equalTo(0));
- ShardSearchFailure[] shardFails = sr.getShardFailures();
- for (ShardSearchFailure fail : shardFails) {
- assertThat(fail.reason().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."),
- Matchers.greaterThan(-1));
- }
- } catch (SearchPhaseExecutionException ex) {
- assertThat(
- "got " + ex.toString(),
- ex.toString().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."),
- Matchers.greaterThan(-1));
- }
- }
-
- private void checkValueInEachDocWithFunctionScore(Script fieldScript, Map<String, Object> expectedFieldVals, Script scoreScript,
- Map<String, Object> expectedScore, int numExpectedDocs) {
- SearchResponse sr = client().prepareSearch("test")
- .setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript)))
- .addScriptField("tvtest", fieldScript).execute().actionGet();
- assertHitCount(sr, numExpectedDocs);
- for (SearchHit hit : sr.getHits().getHits()) {
- Object result = hit.getFields().get("tvtest").getValues().get(0);
- Object expectedResult = expectedFieldVals.get(hit.getId());
- assertThat("for doc " + hit.getId(), result, equalTo(expectedResult));
- assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(),
- Matchers.closeTo(hit.getScore(), 1.e-4));
- }
- }
-
- private void checkValueInEachDoc(Script script, Map<String, Object> expectedResults, int numExpectedDocs) {
- SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
- .execute().actionGet();
- assertHitCount(sr, numExpectedDocs);
- for (SearchHit hit : sr.getHits().getHits()) {
- Object result = hit.getFields().get("tvtest").getValues().get(0);
- Object expectedResult = expectedResults.get(hit.getId());
- assertThat("for doc " + hit.getId(), result, equalTo(expectedResult));
- }
- }
-
- private void checkValueInEachDoc(int value, Script script, int numExpectedDocs) {
- SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
- .execute().actionGet();
- assertHitCount(sr, numExpectedDocs);
- for (SearchHit hit : sr.getHits().getHits()) {
- Object result = hit.getFields().get("tvtest").getValues().get(0);
- if (result instanceof Integer) {
- assertThat(result, equalTo(value));
- } else if (result instanceof Long) {
- assertThat(((Long) result).intValue(), equalTo(value));
- } else {
- fail();
- }
- }
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java
index 7f56f3de4b..0960bc71be 100644
--- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java
+++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java
@@ -55,6 +55,7 @@ public class NativeScriptTests extends ESTestCase {
CompiledScript compiledScript = scriptModule.getScriptService().compile(script, ScriptContext.Standard.SEARCH);
ExecutableScript executable = scriptModule.getScriptService().executable(compiledScript, script.getParams());
assertThat(executable.run().toString(), equalTo("test"));
+ assertWarnings("Native scripts are deprecated. Use a custom ScriptEngine to write scripts in java.");
}
public void testFineGrainedSettingsDontAffectNativeScripts() throws IOException {
@@ -82,6 +83,7 @@ public class NativeScriptTests extends ESTestCase {
assertThat(scriptService.compile(new Script(ScriptType.INLINE, NativeScriptEngine.NAME, "my", Collections.emptyMap()),
scriptContext), notNullValue());
}
+ assertWarnings("Native scripts are deprecated. Use a custom ScriptEngine to write scripts in java.");
}
public static class MyNativeScriptFactory implements NativeScriptFactory {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
index 8a167c0daf..ce57596497 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
@@ -51,6 +52,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
@@ -866,6 +868,19 @@ public class DateRangeIT extends ESIntegTestCase {
assertThat(buckets.get(0).getAggregations().asList().isEmpty(), is(true));
}
+ public void testNoRangesInQuery() {
+ try {
+ client().prepareSearch("idx")
+ .addAggregation(dateRange("my_date_range_agg").field("value"))
+ .execute().actionGet();
+ fail();
+ } catch (SearchPhaseExecutionException spee){
+ Throwable rootCause = spee.getCause().getCause();
+ assertThat(rootCause, instanceOf(IllegalArgumentException.class));
+ assertEquals(rootCause.getMessage(), "No [ranges] specified for the [my_date_range_agg] aggregation");
+ }
+ }
+
/**
* Make sure that a request using a script does not get cached and a request
* not using a script does get cached.
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
index d8aab691d2..032bb8d591 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
@@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.GeoPoint;
@@ -52,6 +53,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogra
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
import static org.hamcrest.core.IsNull.notNullValue;
@@ -441,6 +443,19 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(buckets.get(0).getDocCount(), equalTo(0L));
}
+ public void testNoRangesInQuery() {
+ try {
+ client().prepareSearch("idx")
+ .addAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)))
+ .execute().actionGet();
+ fail();
+ } catch (SearchPhaseExecutionException spee){
+ Throwable rootCause = spee.getCause().getCause();
+ assertThat(rootCause, instanceOf(IllegalArgumentException.class));
+ assertEquals(rootCause.getMessage(), "No [ranges] specified for the [geo_dist] aggregation");
+ }
+ }
+
public void testMultiValues() throws Exception {
SearchResponse response = client().prepareSearch("idx-multi")
.addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894))
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java
index 571a32b87b..67bec2acf7 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java
@@ -78,17 +78,14 @@ public class GlobalAggregatorTests extends AggregatorTestCase {
aggregationBuilder.subAggregation(new MinAggregationBuilder("in_global").field("number"));
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- try {
- aggregator.preCollection();
- indexSearcher.search(new MatchAllDocsQuery(), aggregator);
- aggregator.postCollection();
- InternalGlobal result = (InternalGlobal) aggregator.buildAggregation(0L);
- verify.accept(result, (InternalMin) result.getAggregations().asMap().get("in_global"));
- } finally {
- IOUtils.close(aggregator.subAggregators());
- }
- }
+
+ GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ InternalGlobal result = (InternalGlobal) aggregator.buildAggregation(0L);
+ verify.accept(result, (InternalMin) result.getAggregations().asMap().get("in_global"));
+
indexReader.close();
directory.close();
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java
index cc4818963a..b9bb46501d 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java
@@ -18,18 +18,9 @@
*/
package org.elasticsearch.search.aggregations.bucket;
-import org.elasticsearch.cluster.health.ClusterHealthStatus;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
-import static org.hamcrest.Matchers.containsString;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
@@ -42,6 +33,17 @@ import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.test.ESIntegTestCase;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.instanceOf;
+
@ESIntegTestCase.SuiteScopeTestCase
public class IpRangeIT extends ESIntegTestCase {
@@ -221,6 +223,20 @@ public class IpRangeIT extends ESIntegTestCase {
assertThat(e.getMessage(), containsString("[ip_range] does not support scripts"));
}
+ public void testNoRangesInQuery() {
+ try {
+ client().prepareSearch("idx").addAggregation(
+ AggregationBuilders.ipRange("my_range")
+ .field("ip"))
+ .execute().actionGet();
+ fail();
+ } catch (SearchPhaseExecutionException spee){
+ Throwable rootCause = spee.getCause().getCause();
+ assertThat(rootCause, instanceOf(IllegalArgumentException.class));
+ assertEquals(rootCause.getMessage(), "No [ranges] specified for the [my_range] aggregation");
+ }
+ }
+
public static class DummyScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public List<NativeScriptFactory> getNativeScripts() {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java
index b4bb3c819d..c2a2405098 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.ScriptDocValues;
@@ -53,6 +54,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
@@ -661,6 +663,20 @@ public class RangeIT extends ESIntegTestCase {
assertThat(bucket.getDocCount(), equalTo(0L));
}
+ public void testNoRangesInQuery() {
+ try {
+ client().prepareSearch("idx")
+ .addAggregation(range("foobar")
+ .field(SINGLE_VALUED_FIELD_NAME))
+ .execute().actionGet();
+ fail();
+ } catch (SearchPhaseExecutionException spee){
+ Throwable rootCause = spee.getCause().getCause();
+ assertThat(rootCause, instanceOf(IllegalArgumentException.class));
+ assertEquals(rootCause.getMessage(), "No [ranges] specified for the [foobar] aggregation");
+ }
+ }
+
public void testScriptMultiValued() throws Exception {
Script script =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "'].values", Collections.emptyMap());
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java
index 04147b245c..45b6b64cdd 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java
@@ -112,12 +112,13 @@ public class GeoHashGridAggregatorTests extends AggregatorTestCase {
MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType();
fieldType.setHasDocValues(true);
fieldType.setName(FIELD_NAME);
- try (Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
- verify.accept((InternalGeoHashGrid) aggregator.buildAggregation(0L));
- }
+
+ Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
+ verify.accept((InternalGeoHashGrid) aggregator.buildAggregation(0L));
+
indexReader.close();
directory.close();
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java
index 7b93653fff..f54cb902d9 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java
@@ -75,21 +75,22 @@ public class TermsAggregatorTests extends AggregatorTestCase {
MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType();
fieldType.setName("string");
fieldType.setHasDocValues(true );
- try (TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(new MatchAllDocsQuery(), aggregator);
- aggregator.postCollection();
- Terms result = (Terms) aggregator.buildAggregation(0L);
- assertEquals(4, result.getBuckets().size());
- assertEquals("a", result.getBuckets().get(0).getKeyAsString());
- assertEquals(2L, result.getBuckets().get(0).getDocCount());
- assertEquals("b", result.getBuckets().get(1).getKeyAsString());
- assertEquals(2L, result.getBuckets().get(1).getDocCount());
- assertEquals("c", result.getBuckets().get(2).getKeyAsString());
- assertEquals(1L, result.getBuckets().get(2).getDocCount());
- assertEquals("d", result.getBuckets().get(3).getKeyAsString());
- assertEquals(1L, result.getBuckets().get(3).getDocCount());
- }
+
+ TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ Terms result = (Terms) aggregator.buildAggregation(0L);
+ assertEquals(4, result.getBuckets().size());
+ assertEquals("a", result.getBuckets().get(0).getKeyAsString());
+ assertEquals(2L, result.getBuckets().get(0).getDocCount());
+ assertEquals("b", result.getBuckets().get(1).getKeyAsString());
+ assertEquals(2L, result.getBuckets().get(1).getDocCount());
+ assertEquals("c", result.getBuckets().get(2).getKeyAsString());
+ assertEquals(1L, result.getBuckets().get(2).getDocCount());
+ assertEquals("d", result.getBuckets().get(3).getKeyAsString());
+ assertEquals(1L, result.getBuckets().get(3).getDocCount());
+
}
indexReader.close();
directory.close();
@@ -191,12 +192,11 @@ public class TermsAggregatorTests extends AggregatorTestCase {
private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedFieldType fieldType,
IndexSearcher searcher) throws IOException {
- try (TermsAggregator aggregator = createAggregator(builder, searcher, fieldType)) {
- aggregator.preCollection();
- searcher.search(new MatchAllDocsQuery(), aggregator);
- aggregator.postCollection();
- return aggregator.buildAggregation(0L);
- }
+ TermsAggregator aggregator = createAggregator(builder, searcher, fieldType);
+ aggregator.preCollection();
+ searcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ return aggregator.buildAggregation(0L);
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java
index b80dd163fc..90afe09529 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java
@@ -118,13 +118,13 @@ public class CardinalityAggregatorTests extends AggregatorTestCase {
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (CardinalityAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher,
- fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
- verify.accept((InternalCardinality) aggregator.buildAggregation(0L));
- }
+ CardinalityAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher,
+ fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
+ verify.accept((InternalCardinality) aggregator.buildAggregation(0L));
+
indexReader.close();
directory.close();
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java
index c53927a55b..8a1bc036fb 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java
@@ -112,12 +112,13 @@ public class MaxAggregatorTests extends AggregatorTestCase {
MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
- verify.accept((InternalMax) aggregator.buildAggregation(0L));
- }
+
+ MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
+ verify.accept((InternalMax) aggregator.buildAggregation(0L));
+
indexReader.close();
directory.close();
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java
index 9ba7ecb71b..4e90a9083c 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java
@@ -113,12 +113,13 @@ public class AvgAggregatorTests extends AggregatorTestCase {
AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (AvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
- verify.accept((InternalAvg) aggregator.buildAggregation(0L));
- }
+
+ AvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
+ verify.accept((InternalAvg) aggregator.buildAggregation(0L));
+
indexReader.close();
directory.close();
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java
index 48b0b115e8..3408742160 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java
@@ -62,13 +62,14 @@ public class MinAggregatorTests extends AggregatorTestCase {
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(new MatchAllDocsQuery(), aggregator);
- aggregator.postCollection();
- InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
- assertEquals(-1.0, result.getValue(), 0);
- }
+
+ MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
+ assertEquals(-1.0, result.getValue(), 0);
+
indexReader.close();
directory.close();
}
@@ -96,13 +97,14 @@ public class MinAggregatorTests extends AggregatorTestCase {
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(new MatchAllDocsQuery(), aggregator);
- aggregator.postCollection();
- InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
- assertEquals(-1.0, result.getValue(), 0);
- }
+
+ MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
+ assertEquals(-1.0, result.getValue(), 0);
+
indexReader.close();
directory.close();
}
@@ -127,13 +129,14 @@ public class MinAggregatorTests extends AggregatorTestCase {
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number2");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number2");
- try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(new MatchAllDocsQuery(), aggregator);
- aggregator.postCollection();
- InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
- assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0);
- }
+
+ MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
+ assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0);
+
indexReader.close();
directory.close();
}
@@ -149,13 +152,14 @@ public class MinAggregatorTests extends AggregatorTestCase {
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(new MatchAllDocsQuery(), aggregator);
- aggregator.postCollection();
- InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
- assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0);
- }
+
+ MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
+ assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0);
+
indexReader.close();
directory.close();
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java
index f264243044..8aa160c8a8 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java
@@ -127,12 +127,12 @@ public class HDRPercentilesAggregatorTests extends AggregatorTestCase {
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (HDRPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
- verify.accept((InternalHDRPercentiles) aggregator.buildAggregation(0L));
- }
+ HDRPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
+ verify.accept((InternalHDRPercentiles) aggregator.buildAggregation(0L));
+
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java
index 90cc2464a1..7f95b06b5a 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java
@@ -148,12 +148,11 @@ public class TDigestPercentilesAggregatorTests extends AggregatorTestCase {
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
- try (TDigestPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
- verify.accept((InternalTDigestPercentiles) aggregator.buildAggregation(0L));
- }
+ TDigestPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
+ verify.accept((InternalTDigestPercentiles) aggregator.buildAggregation(0L));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorTests.java
index 53731c5853..20f7512761 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorTests.java
@@ -132,13 +132,12 @@ public class SumAggregatorTests extends AggregatorTestCase {
SumAggregationBuilder aggregationBuilder = new SumAggregationBuilder("_name");
aggregationBuilder.field(FIELD_NAME);
- try (SumAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
+ SumAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
- verify.accept((Sum) aggregator.buildAggregation(0L));
- }
+ verify.accept((Sum) aggregator.buildAggregation(0L));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java
index b5aacbfcca..1da1807bfe 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java
@@ -120,13 +120,11 @@ public class ValueCountAggregatorTests extends AggregatorTestCase {
ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("_name", valueType);
aggregationBuilder.field(FIELD_NAME);
- try (ValueCountAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
- aggregator.preCollection();
- indexSearcher.search(query, aggregator);
- aggregator.postCollection();
-
- verify.accept((ValueCount) aggregator.buildAggregation(0L));
- }
+ ValueCountAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ aggregator.preCollection();
+ indexSearcher.search(query, aggregator);
+ aggregator.postCollection();
+ verify.accept((ValueCount) aggregator.buildAggregation(0L));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
index d920c6a67b..316277973f 100644
--- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
@@ -1550,30 +1550,6 @@ public class SearchQueryIT extends ESIntegTestCase {
assertHitCount(searchResponse, 2);
}
- public void testMatchQueryWithStackedStems() throws IOException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.index.type", "custom")
- .put("index.analysis.analyzer.index.tokenizer", "standard")
- .put("index.analysis.analyzer.index.filter", "lowercase")
- .put("index.analysis.analyzer.search.type", "custom")
- .put("index.analysis.analyzer.search.tokenizer", "standard")
- .putArray("index.analysis.analyzer.search.filter", "lowercase", "keyword_repeat", "porter_stem", "unique_stem")
- .put("index.analysis.filter.unique_stem.type", "unique")
- .put("index.analysis.filter.unique_stem.only_on_same_position", true));
- assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));
-
- client().prepareIndex("test", "test", "1").setSource("text", "the fox runs across the street").get();
- refresh();
- SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fox runs").operator(Operator.AND)).get();
- assertHitCount(searchResponse, 1);
-
- client().prepareIndex("test", "test", "2").setSource("text", "run fox run").get();
- refresh();
- searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fox runs").operator(Operator.AND)).get();
- assertHitCount(searchResponse, 2);
- }
-
public void testQueryStringWithSynonyms() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
index 864f060be0..46a94e641c 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
@@ -694,107 +694,6 @@ public class SuggestSearchIT extends ESIntegTestCase {
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
}
- public void testPhraseBoundaryCases() throws IOException, URISyntaxException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) // to get reliable statistics we should put this all into one shard
- .put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
- .put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
- .put("index.analysis.analyzer.ngram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.ngram.filter", "my_shingle2", "lowercase")
- .put("index.analysis.analyzer.myDefAnalyzer.tokenizer", "standard")
- .putArray("index.analysis.analyzer.myDefAnalyzer.filter", "shingle", "lowercase")
- .put("index.analysis.filter.my_shingle.type", "shingle")
- .put("index.analysis.filter.my_shingle.output_unigrams", false)
- .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle.max_shingle_size", 2)
- .put("index.analysis.filter.my_shingle2.type", "shingle")
- .put("index.analysis.filter.my_shingle2.output_unigrams", true)
- .put("index.analysis.filter.my_shingle2.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle2.max_shingle_size", 2));
-
- XContentBuilder mapping = XContentFactory.jsonBuilder()
- .startObject().startObject("type1")
- .startObject("properties")
- .startObject("body").field("type", "text").field("analyzer", "body").endObject()
- .startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject()
- .startObject("ngram").field("type", "text").field("analyzer", "ngram").endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- String[] strings = new String[]{
- "Xorr the God-Jewel",
- "Grog the God-Crusher",
- "Xorn",
- "Walter Newell",
- "Wanda Maximoff",
- "Captain America",
- "American Ace",
- "Wundarr the Aquarian",
- "Will o' the Wisp",
- "Xemnu the Titan"
- };
- for (String line : strings) {
- index("test", "type1", line, "body", line, "bigram", line, "ngram", line);
- }
- refresh();
-
- NumShards numShards = getNumShards("test");
-
- // Lets make sure some things throw exceptions
- PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram")
- .analyzer("body")
- .addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always"))
- .realWordErrorLikelihood(0.95f)
- .maxErrors(0.5f)
- .size(1);
- phraseSuggestion.clearCandidateGenerators().analyzer(null);
- try {
- searchSuggest("xor the got-jewel", numShards.numPrimaries, Collections.singletonMap("simple_phrase", phraseSuggestion));
- fail("analyzer does only produce ngrams");
- } catch (SearchPhaseExecutionException e) {
- }
-
- phraseSuggestion.analyzer("bigram");
- try {
- searchSuggest("xor the got-jewel", numShards.numPrimaries, Collections.singletonMap("simple_phrase", phraseSuggestion));
- fail("analyzer does only produce ngrams");
- } catch (SearchPhaseExecutionException e) {
- }
-
- // Now we'll make sure some things don't
- phraseSuggestion.forceUnigrams(false);
- searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
-
- // Field doesn't produce unigrams but the analyzer does
- phraseSuggestion.forceUnigrams(true).analyzer("ngram");
- searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
-
- phraseSuggestion = phraseSuggestion("ngram")
- .analyzer("myDefAnalyzer")
- .forceUnigrams(true)
- .realWordErrorLikelihood(0.95f)
- .maxErrors(0.5f)
- .size(1)
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"));
- Suggest suggest = searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
-
- // "xorr the god jewel" and and "xorn the god jewel" have identical scores (we are only using unigrams to score), so we tie break by
- // earlier term (xorn):
- assertSuggestion(suggest, 0, "simple_phrase", "xorn the god jewel");
-
- phraseSuggestion.analyzer(null);
- suggest = searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
-
- // In this case xorr has a better score than xorn because we set the field back to the default (my_shingle2) analyzer, so the
- // probability that the term is not in the dictionary but is NOT a misspelling is relatively high in this case compared to the
- // others that have no n-gram with the other terms in the phrase :) you can set this realWorldErrorLikelyhood
- assertSuggestion(suggest, 0, "simple_phrase", "xorr the god jewel");
- }
-
public void testDifferentShardSize() throws Exception {
createIndex("test");
ensureGreen();
diff --git a/core/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java b/core/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java
new file mode 100644
index 0000000000..836193423f
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.threadpool;
+
+import org.elasticsearch.common.settings.Settings;
+
+import static org.hamcrest.CoreMatchers.containsString;
+
+public class AutoQueueAdjustingExecutorBuilderTests extends ESThreadPoolTestCase {
+
+ public void testValidatingMinMaxSettings() throws Exception {
+ Settings settings = Settings.builder()
+ .put("thread_pool.search.min_queue_size", randomIntBetween(30, 100))
+ .put("thread_pool.search.max_queue_size", randomIntBetween(1,25))
+ .build();
+ try {
+ new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 15, 1, 100, 10);
+ fail("should have thrown an exception");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("Failed to parse value"));
+ }
+ }
+
+}