summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.gradle1
-rw-r--r--buildSrc/src/main/resources/checkstyle_suppressions.xml3
-rw-r--r--client/rest-high-level/build.gradle2
-rw-r--r--client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java4
-rw-r--r--client/transport/build.gradle1
-rw-r--r--client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java7
-rw-r--r--client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java4
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java9
-rw-r--r--core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java18
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java10
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java7
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java1
-rw-r--r--core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java5
-rw-r--r--core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java2
-rw-r--r--core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java4
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/Discovery.java18
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java4
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/TribeDiscovery.java30
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java55
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java8
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java69
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java2
-rw-r--r--core/src/main/java/org/elasticsearch/env/Environment.java13
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/Gateway.java11
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/GatewayService.java9
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexWarmer.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java30
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java24
-rw-r--r--core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java2
-rw-r--r--core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java2
-rw-r--r--core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java9
-rw-r--r--core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java35
-rw-r--r--core/src/main/java/org/elasticsearch/node/Node.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchModule.java12
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java17
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java11
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java5
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TcpTransport.java16
-rw-r--r--core/src/main/java/org/elasticsearch/transport/Transport.java8
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java8
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportService.java39
-rw-r--r--core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java149
-rw-r--r--core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java23
-rw-r--r--core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java186
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java27
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java25
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java18
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java30
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java60
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java25
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java74
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java44
-rw-r--r--core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java30
-rw-r--r--core/src/test/java/org/elasticsearch/search/SearchModuleTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java458
-rw-r--r--core/src/test/java/org/elasticsearch/test/NoopDiscovery.java17
-rw-r--r--core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java4
-rw-r--r--core/src/test/resources/config/elasticsearch.properties2
-rw-r--r--core/src/test/resources/config/elasticsearch.yml (renamed from core/src/test/resources/config/elasticsearch.yaml)0
-rw-r--r--docs/java-api/aggregations/bucket/children-aggregation.asciidoc2
-rw-r--r--docs/reference/setup/sysconfig/swap.asciidoc94
-rw-r--r--modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java2
-rw-r--r--modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java2
-rw-r--r--modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java2
-rw-r--r--modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java8
-rw-r--r--modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java6
-rw-r--r--modules/parent-join/build.gradle24
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/ParentJoinPlugin.java54
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/Children.java (renamed from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/Children.java)2
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java (renamed from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java)2
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregatorFactory.java (renamed from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorFactory.java)2
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/InternalChildren.java (renamed from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/InternalChildren.java)2
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/JoinAggregationBuilders.java (renamed from core/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java)18
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java (renamed from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java)2
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedChildren.java (renamed from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParsedChildren.java)2
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java (renamed from core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java)38
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java (renamed from core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java)9
-rw-r--r--modules/parent-join/src/main/java/org/elasticsearch/join/query/JoinQueryBuilders.java50
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java37
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java (renamed from core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java)73
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenTests.java (renamed from core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java)13
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalChildrenTests.java (renamed from core/src/test/java/org/elasticsearch/search/aggregations/bucket/children/InternalChildrenTests.java)15
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java (renamed from core/src/test/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregatorTests.java)2
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/query/ChildQuerySearchIT.java (renamed from core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java)348
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java (renamed from core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java)81
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java (renamed from core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java)30
-rw-r--r--modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java568
-rw-r--r--modules/parent-join/src/test/resources/rest-api-spec/test/10_basic.yaml48
-rw-r--r--modules/percolator/build.gradle4
-rw-r--r--modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java7
-rw-r--r--modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java8
-rw-r--r--modules/reindex/build.gradle2
-rw-r--r--modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java26
-rw-r--r--modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml79
-rw-r--r--modules/transport-netty4/build.gradle14
-rw-r--r--modules/transport-netty4/licenses/netty-buffer-4.1.10.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-buffer-4.1.11.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-codec-4.1.10.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-codec-4.1.11.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-codec-http-4.1.10.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-codec-http-4.1.11.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-common-4.1.10.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-common-4.1.11.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-handler-4.1.10.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-handler-4.1.11.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-resolver-4.1.10.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-resolver-4.1.11.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-transport-4.1.10.Final.jar.sha11
-rw-r--r--modules/transport-netty4/licenses/netty-transport-4.1.11.Final.jar.sha11
-rw-r--r--modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java6
-rw-r--r--modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java2
-rw-r--r--modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy4
-rw-r--r--plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java4
-rw-r--r--plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java2
-rw-r--r--plugins/repository-hdfs/build.gradle2
-rw-r--r--qa/smoke-test-reindex-with-all-modules/build.gradle (renamed from qa/smoke-test-reindex-with-painless/build.gradle)4
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java (renamed from qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/10_script.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/10_script.yaml)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/20_broken.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/20_broken.yaml)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/50_reindex_with_parentchild.yaml79
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/20_broken.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/20_broken.yaml)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/30_timeout.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/30_timeout.yaml)0
-rw-r--r--qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml (renamed from qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml)0
-rw-r--r--qa/vagrant/build.gradle2
-rw-r--r--rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yaml39
-rw-r--r--settings.gradle3
-rw-r--r--test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java2
-rw-r--r--test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java (renamed from core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java)0
-rw-r--r--test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java (renamed from core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java)12
-rw-r--r--test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java (renamed from core/src/test/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregationTestCase.java)8
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java87
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java9
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java2
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java2
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java5
-rw-r--r--test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java58
-rw-r--r--test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java3
166 files changed, 2191 insertions, 1606 deletions
diff --git a/build.gradle b/build.gradle
index 09748ea1e8..31784a1798 100644
--- a/build.gradle
+++ b/build.gradle
@@ -138,6 +138,7 @@ subprojects {
"org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4',
"org.elasticsearch.plugin:reindex-client:${version}": ':modules:reindex',
"org.elasticsearch.plugin:lang-mustache-client:${version}": ':modules:lang-mustache',
+ "org.elasticsearch.plugin:parent-join-client:${version}": ':modules:parent-join',
"org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator',
]
project.afterEvaluate {
diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index de14fe158e..77b6e0ee93 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -620,7 +620,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoolQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoostingQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]GeoDistanceQueryBuilderTests.java" checks="LineLength" />
- <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]HasChildQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MoreLikeThisQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MultiMatchQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SpanMultiTermQueryBuilderTests.java" checks="LineLength" />
@@ -689,7 +688,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]MultiValueModeTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]SearchWithRejectionsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]MissingValueIT.java" checks="LineLength" />
- <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]ChildrenIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]DateHistogramIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]DateHistogramOffsetIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]GeoDistanceIT.java" checks="LineLength" />
@@ -716,7 +714,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomExceptionsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomIOExceptionsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportTwoNodesSearchIT.java" checks="LineLength" />
- <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ChildQuerySearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ParentFieldLoadingIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoBoundingBoxIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoFilterIT.java" checks="LineLength" />
diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle
index 44ac53d9d5..7c5df5760a 100644
--- a/client/rest-high-level/build.gradle
+++ b/client/rest-high-level/build.gradle
@@ -29,6 +29,8 @@ dependencies {
testCompile "org.elasticsearch.client:test:${version}"
testCompile "org.elasticsearch.test:framework:${version}"
+ // for parent/child testing
+ testCompile "org.elasticsearch.plugin:parent-join-client:${version}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java
index f01e4824b3..269606c9cc 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java
@@ -51,8 +51,6 @@ import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery;
import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery;
import static org.elasticsearch.index.query.QueryBuilders.geoPolygonQuery;
import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery;
-import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
-import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
@@ -79,6 +77,8 @@ import static org.elasticsearch.index.query.QueryBuilders.typeQuery;
import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
/**
* Examples of using the transport client that are imported into the transport client documentation.
diff --git a/client/transport/build.gradle b/client/transport/build.gradle
index 77833c1f26..b2edc9c8fc 100644
--- a/client/transport/build.gradle
+++ b/client/transport/build.gradle
@@ -31,6 +31,7 @@ dependencies {
compile "org.elasticsearch.plugin:reindex-client:${version}"
compile "org.elasticsearch.plugin:lang-mustache-client:${version}"
compile "org.elasticsearch.plugin:percolator-client:${version}"
+ compile "org.elasticsearch.plugin:parent-join-client:${version}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
diff --git a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java
index 3233470a25..1669373829 100644
--- a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java
+++ b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java
@@ -26,6 +26,7 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.reindex.ReindexPlugin;
+import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.percolator.PercolatorPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.mustache.MustachePlugin;
@@ -41,7 +42,8 @@ import java.util.concurrent.TimeUnit;
* {@link Netty4Plugin},
* {@link ReindexPlugin},
* {@link PercolatorPlugin},
- * and {@link MustachePlugin}
+ * {@link MustachePlugin},
+ * {@link ParentJoinPlugin}
* plugins for the client. These plugins are all the required modules for Elasticsearch.
*/
@SuppressWarnings({"unchecked","varargs"})
@@ -83,7 +85,8 @@ public class PreBuiltTransportClient extends TransportClient {
Netty4Plugin.class,
ReindexPlugin.class,
PercolatorPlugin.class,
- MustachePlugin.class));
+ MustachePlugin.class,
+ ParentJoinPlugin.class));
/**
* Creates a new transport client with pre-installed plugins.
diff --git a/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java b/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java
index 161b1d7e89..dbcf357112 100644
--- a/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java
+++ b/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java
@@ -25,6 +25,7 @@ import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.reindex.ReindexPlugin;
+import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.percolator.PercolatorPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.mustache.MustachePlugin;
@@ -50,7 +51,8 @@ public class PreBuiltTransportClientTests extends RandomizedTest {
@Test
public void testInstallPluginTwice() {
- for (Class<? extends Plugin> plugin : Arrays.asList(ReindexPlugin.class, PercolatorPlugin.class, MustachePlugin.class)) {
+ for (Class<? extends Plugin> plugin :
+ Arrays.asList(ParentJoinPlugin.class, ReindexPlugin.class, PercolatorPlugin.class, MustachePlugin.class)) {
try {
new PreBuiltTransportClient(Settings.EMPTY, plugin);
fail("exception expected");
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java
index c933156fcb..6d4cb83934 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java
@@ -60,7 +60,7 @@ public class GetRepositoriesResponse extends ActionResponse implements Iterable<
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
- List<RepositoryMetaData> repositoryListBuilder = new ArrayList<>();
+ List<RepositoryMetaData> repositoryListBuilder = new ArrayList<>(size);
for (int j = 0; j < size; j++) {
repositoryListBuilder.add(new RepositoryMetaData(
in.readString(),
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java
index 308a846c90..0d1e5eda7f 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java
@@ -59,7 +59,7 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContentOb
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
- List<SnapshotInfo> builder = new ArrayList<>();
+ List<SnapshotInfo> builder = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
builder.add(new SnapshotInfo(in));
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java
index 27276b27dd..41cacf2a85 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java
@@ -65,8 +65,8 @@ public class ClusterStatsNodes implements ToXContent {
this.plugins = new HashSet<>();
Set<InetAddress> seenAddresses = new HashSet<>(nodeResponses.size());
- List<NodeInfo> nodeInfos = new ArrayList<>();
- List<NodeStats> nodeStats = new ArrayList<>();
+ List<NodeInfo> nodeInfos = new ArrayList<>(nodeResponses.size());
+ List<NodeStats> nodeStats = new ArrayList<>(nodeResponses.size());
for (ClusterStatsNodeResponse nodeResponse : nodeResponses) {
nodeInfos.add(nodeResponse.nodeInfo());
nodeStats.add(nodeResponse.nodeStats());
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java
index 6c2e462752..36bfa81a33 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java
@@ -114,7 +114,7 @@ public class GetIndexResponse extends ActionResponse {
for (int i = 0; i < aliasesSize; i++) {
String key = in.readString();
int valueSize = in.readVInt();
- List<AliasMetaData> aliasEntryBuilder = new ArrayList<>();
+ List<AliasMetaData> aliasEntryBuilder = new ArrayList<>(valueSize);
for (int j = 0; j < valueSize; j++) {
aliasEntryBuilder.add(new AliasMetaData(in));
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java
index b62cfd714b..4ddbe54199 100644
--- a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java
@@ -81,13 +81,13 @@ public class BaseTasksResponse extends ActionResponse {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
- List<TaskOperationFailure> taskFailures = new ArrayList<>();
+ List<TaskOperationFailure> taskFailures = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
taskFailures.add(new TaskOperationFailure(in));
}
size = in.readVInt();
this.taskFailures = Collections.unmodifiableList(taskFailures);
- List<FailedNodeException> nodeFailures = new ArrayList<>();
+ List<FailedNodeException> nodeFailures = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
nodeFailures.add(new FailedNodeException(in));
}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
index d0462efc39..74fc600d62 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
@@ -298,7 +298,6 @@ final class Bootstrap {
throw new BootstrapException(e);
}
checkForCustomConfFile();
- checkConfigExtension(environment.configExtension());
if (environment.pidFile() != null) {
try {
@@ -414,14 +413,6 @@ final class Bootstrap {
}
}
- // pkg private for tests
- static void checkConfigExtension(String extension) {
- if (".yml".equals(extension) || ".json".equals(extension)) {
- final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(Bootstrap.class));
- deprecationLogger.deprecated("elasticsearch{} is deprecated; rename your configuration file to elasticsearch.yaml", extension);
- }
- }
-
@SuppressForbidden(reason = "Allowed to exit explicitly in bootstrap phase")
private static void exit(int status) {
System.exit(status);
diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
index 145208baf0..3e50b4d74c 100644
--- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
+++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
@@ -469,14 +469,17 @@ final class TransportClientNodesService extends AbstractComponent implements Clo
*/
Transport.Connection connectionToClose = null;
- @Override
- public void onAfter() {
- IOUtils.closeWhileHandlingException(connectionToClose);
+ void onDone() {
+ try {
+ IOUtils.closeWhileHandlingException(connectionToClose);
+ } finally {
+ latch.countDown();
+ }
}
@Override
public void onFailure(Exception e) {
- latch.countDown();
+ onDone();
if (e instanceof ConnectTransportException) {
logger.debug((Supplier<?>)
() -> new ParameterizedMessage("failed to connect to node [{}], ignoring...", nodeToPing), e);
@@ -522,7 +525,7 @@ final class TransportClientNodesService extends AbstractComponent implements Clo
@Override
public void handleResponse(ClusterStateResponse response) {
clusterStateResponses.put(nodeToPing, response);
- latch.countDown();
+ onDone();
}
@Override
@@ -532,9 +535,8 @@ final class TransportClientNodesService extends AbstractComponent implements Clo
"failed to get local cluster state for {}, disconnecting...", nodeToPing), e);
try {
hostFailureListener.onNodeDisconnected(nodeToPing, e);
- }
- finally {
- latch.countDown();
+ } finally {
+ onDone();
}
}
});
diff --git a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java
index 253206222b..f09e1dd9cd 100644
--- a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java
+++ b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java
@@ -125,7 +125,7 @@ public class ClusterBlock implements Streamable, ToXContent {
id = in.readVInt();
description = in.readString();
final int len = in.readVInt();
- ArrayList<ClusterBlockLevel> levels = new ArrayList<>();
+ ArrayList<ClusterBlockLevel> levels = new ArrayList<>(len);
for (int i = 0; i < len; i++) {
levels.add(ClusterBlockLevel.fromId(in.readVInt()));
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
index d4c6ec587d..711d685c1d 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
@@ -29,6 +29,8 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
+import org.elasticsearch.common.logging.DeprecationLogger;
+import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
@@ -55,6 +57,8 @@ public class IndexNameExpressionResolver extends AbstractComponent {
private final List<ExpressionResolver> expressionResolvers;
private final DateMathExpressionResolver dateMathExpressionResolver;
+ private static final DeprecationLogger DEPRECATION_LOGGER =
+ new DeprecationLogger(Loggers.getLogger(IndexNameExpressionResolver.class));
public IndexNameExpressionResolver(Settings settings) {
super(settings);
@@ -159,7 +163,6 @@ public class IndexNameExpressionResolver extends AbstractComponent {
if (indexExpressions.length == 1) {
failNoIndices = options.allowNoIndices() == false;
}
-
List<String> expressions = Arrays.asList(indexExpressions);
for (ExpressionResolver expressionResolver : expressionResolvers) {
expressions = expressionResolver.resolve(context, expressions);
@@ -588,6 +591,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
private Set<String> innerResolve(Context context, List<String> expressions, IndicesOptions options, MetaData metaData) {
Set<String> result = null;
boolean wildcardSeen = false;
+ boolean plusSeen = false;
for (int i = 0; i < expressions.size(); i++) {
String expression = expressions.get(i);
if (aliasOrIndexExists(metaData, expression)) {
@@ -602,6 +606,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
boolean add = true;
if (expression.charAt(0) == '+') {
// if its the first, add empty result set
+ plusSeen = true;
if (i == 0) {
result = new HashSet<>();
}
@@ -649,6 +654,9 @@ public class IndexNameExpressionResolver extends AbstractComponent {
wildcardSeen = true;
}
}
+ if (plusSeen) {
+ DEPRECATION_LOGGER.deprecated("support for '+' as part of index expressions is deprecated");
+ }
return result;
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java
index ef3135af24..aa5c74e15e 100644
--- a/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java
+++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java
@@ -24,9 +24,14 @@ import org.elasticsearch.cluster.ClusterStateTaskListener;
import java.util.function.Supplier;
-@FunctionalInterface
public interface ClusterApplier {
/**
+ * Sets the initial state for this applier. Should only be called once.
+ * @param initialState the initial state to set
+ */
+ void setInitialState(ClusterState initialState);
+
+ /**
* Method to invoke when a new cluster state is available to be applied
*
* @param source information where the cluster state came from
diff --git a/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java
index 540881718f..b029f10f5f 100644
--- a/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java
@@ -116,6 +116,7 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements
this.nodeConnectionsService = nodeConnectionsService;
}
+ @Override
public void setInitialState(ClusterState initialState) {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial state when started");
diff --git a/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java b/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java
index 3321b75f4e..6928033c69 100644
--- a/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java
+++ b/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java
@@ -20,6 +20,7 @@
package org.elasticsearch.common.inject;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@@ -28,9 +29,7 @@ public class ModulesBuilder implements Iterable<Module> {
private final List<Module> modules = new ArrayList<>();
public ModulesBuilder add(Module... newModules) {
- for (Module module : newModules) {
- modules.add(module);
- }
+ Collections.addAll(modules, newModules);
return this;
}
diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java
index edd45c290a..cb434a9036 100644
--- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java
+++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java
@@ -49,7 +49,7 @@ class AssistedConstructor<T> {
Annotation[][] annotations = constructor.getParameterAnnotations();
List<Type> typeList = new ArrayList<>();
- allParameters = new ArrayList<>();
+ allParameters = new ArrayList<>(parameterTypes.size());
// categorize params as @Assisted or @Injected
for (int i = 0; i < parameterTypes.size(); i++) {
diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java
index a76428e829..dbfc1f0af1 100644
--- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java
+++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java
@@ -124,9 +124,7 @@ public class MultiPhrasePrefixQuery extends Query {
Term[][] terms = new Term[termArrays.size()][];
for (int i = 0; i < termArrays.size(); i++) {
terms[i] = new Term[termArrays.get(i).length];
- for (int j = 0; j < termArrays.get(i).length; j++) {
- terms[i][j] = termArrays.get(i)[j];
- }
+ System.arraycopy(termArrays.get(i), 0, terms[i], 0, termArrays.get(i).length);
}
return terms;
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/Discovery.java b/core/src/main/java/org/elasticsearch/discovery/Discovery.java
index 7f68f417fc..3842e68d10 100644
--- a/core/src/main/java/org/elasticsearch/discovery/Discovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/Discovery.java
@@ -21,7 +21,6 @@ package org.elasticsearch.discovery;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.ClusterChangedEvent;
-import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.LifecycleComponent;
@@ -48,18 +47,6 @@ public interface Discovery extends LifecycleComponent {
*/
void publish(ClusterChangedEvent clusterChangedEvent, AckListener ackListener);
- /**
- * Returns the initial cluster state provided by the discovery module. Used by
- * {@link org.elasticsearch.cluster.service.ClusterApplierService} as initial applied state.
- */
- ClusterState getInitialClusterState();
-
- /**
- * Returns latest cluster state used by the discovery module. Used by {@link org.elasticsearch.cluster.service.MasterService} to
- * calculate the next prospective state to publish.
- */
- ClusterState clusterState();
-
interface AckListener {
void onNodeAck(DiscoveryNode node, @Nullable Exception e);
void onTimeout();
@@ -90,9 +77,4 @@ public interface Discovery extends LifecycleComponent {
*/
void startInitialJoin();
- /***
- * @return the current value of minimum master nodes, or -1 for not set
- */
- int getMinimumMasterNodes();
-
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java
index b2367c6e95..c410cb88d6 100644
--- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java
+++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java
@@ -86,8 +86,8 @@ public class DiscoveryModule {
discoveryTypes.put("zen",
() -> new ZenDiscovery(settings, threadPool, transportService, namedWriteableRegistry, masterService, clusterApplier,
clusterSettings, hostsProvider, allocationService));
- discoveryTypes.put("tribe", () -> new TribeDiscovery(settings, transportService, clusterApplier));
- discoveryTypes.put("single-node", () -> new SingleNodeDiscovery(settings, transportService, clusterApplier));
+ discoveryTypes.put("tribe", () -> new TribeDiscovery(settings, transportService, masterService, clusterApplier));
+ discoveryTypes.put("single-node", () -> new SingleNodeDiscovery(settings, transportService, masterService, clusterApplier));
for (DiscoveryPlugin plugin : plugins) {
plugin.getDiscoveryTypes(threadPool, transportService, namedWriteableRegistry,
masterService, clusterApplier, clusterSettings, hostsProvider, allocationService).entrySet().forEach(entry -> {
diff --git a/core/src/main/java/org/elasticsearch/discovery/TribeDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/TribeDiscovery.java
index 9f802cc270..f3200be456 100644
--- a/core/src/main/java/org/elasticsearch/discovery/TribeDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/TribeDiscovery.java
@@ -25,6 +25,7 @@ import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterApplier;
+import org.elasticsearch.cluster.service.MasterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.single.SingleNodeDiscovery;
@@ -44,26 +45,23 @@ import static org.elasticsearch.tribe.TribeService.TRIBE_WRITE_BLOCK;
public class TribeDiscovery extends SingleNodeDiscovery implements Discovery {
@Inject
- public TribeDiscovery(Settings settings, TransportService transportService, ClusterApplier clusterApplier) {
- super(settings, transportService, clusterApplier);
+ public TribeDiscovery(Settings settings, TransportService transportService,
+ MasterService masterService, ClusterApplier clusterApplier) {
+ super(settings, transportService, masterService, clusterApplier);
}
@Override
- public synchronized ClusterState getInitialClusterState() {
- if (initialState == null) {
- ClusterBlocks.Builder clusterBlocks = ClusterBlocks.builder(); // don't add no_master / state recovery block
- if (BLOCKS_WRITE_SETTING.get(settings)) {
- clusterBlocks.addGlobalBlock(TRIBE_WRITE_BLOCK);
- }
- if (BLOCKS_METADATA_SETTING.get(settings)) {
- clusterBlocks.addGlobalBlock(TRIBE_METADATA_BLOCK);
- }
- DiscoveryNode localNode = transportService.getLocalNode();
- initialState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings))
- .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).build())
- .blocks(clusterBlocks).build();
+ protected ClusterState createInitialState(DiscoveryNode localNode) {
+ ClusterBlocks.Builder clusterBlocks = ClusterBlocks.builder(); // don't add no_master / state recovery block
+ if (BLOCKS_WRITE_SETTING.get(settings)) {
+ clusterBlocks.addGlobalBlock(TRIBE_WRITE_BLOCK);
}
- return initialState;
+ if (BLOCKS_METADATA_SETTING.get(settings)) {
+ clusterBlocks.addGlobalBlock(TRIBE_METADATA_BLOCK);
+ }
+ return ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings))
+ .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).build())
+ .blocks(clusterBlocks).build();
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java
index 1152696179..a61253b7c2 100644
--- a/core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java
@@ -28,6 +28,7 @@ import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterApplier;
+import org.elasticsearch.cluster.service.MasterService;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
@@ -48,13 +49,13 @@ public class SingleNodeDiscovery extends AbstractLifecycleComponent implements D
protected final TransportService transportService;
private final ClusterApplier clusterApplier;
- protected volatile ClusterState initialState;
private volatile ClusterState clusterState;
public SingleNodeDiscovery(final Settings settings, final TransportService transportService,
- ClusterApplier clusterApplier) {
+ final MasterService masterService, final ClusterApplier clusterApplier) {
super(Objects.requireNonNull(settings));
this.transportService = Objects.requireNonNull(transportService);
+ masterService.setClusterStateSupplier(() -> clusterState);
this.clusterApplier = clusterApplier;
}
@@ -82,7 +83,7 @@ public class SingleNodeDiscovery extends AbstractLifecycleComponent implements D
e);
}
};
- clusterApplier.onNewClusterState("apply-locally-on-node[" + event.source() + "]", this::clusterState, listener);
+ clusterApplier.onNewClusterState("apply-locally-on-node[" + event.source() + "]", () -> clusterState, listener);
try {
latch.await();
@@ -92,48 +93,38 @@ public class SingleNodeDiscovery extends AbstractLifecycleComponent implements D
}
@Override
- public synchronized ClusterState getInitialClusterState() {
- if (initialState == null) {
- DiscoveryNode localNode = transportService.getLocalNode();
- initialState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings))
- .nodes(DiscoveryNodes.builder().add(localNode)
- .localNodeId(localNode.getId())
- .masterNodeId(localNode.getId())
- .build())
- .blocks(ClusterBlocks.builder()
- .addGlobalBlock(STATE_NOT_RECOVERED_BLOCK))
- .build();
- }
- return initialState;
- }
-
- @Override
- public ClusterState clusterState() {
- return clusterState;
- }
-
- @Override
public DiscoveryStats stats() {
return new DiscoveryStats((PendingClusterStateStats) null);
}
@Override
public synchronized void startInitialJoin() {
+ if (lifecycle.started() == false) {
+ throw new IllegalStateException("can't start initial join when not started");
+ }
// apply a fresh cluster state just so that state recovery gets triggered by GatewayService
// TODO: give discovery module control over GatewayService
- clusterState = ClusterState.builder(getInitialClusterState()).build();
- clusterApplier.onNewClusterState("single-node-start-initial-join", this::clusterState, (source, e) -> {});
+ clusterState = ClusterState.builder(clusterState).build();
+ clusterApplier.onNewClusterState("single-node-start-initial-join", () -> clusterState, (source, e) -> {});
}
@Override
- public int getMinimumMasterNodes() {
- return 1;
+ protected synchronized void doStart() {
+ // set initial state
+ DiscoveryNode localNode = transportService.getLocalNode();
+ clusterState = createInitialState(localNode);
+ clusterApplier.setInitialState(clusterState);
}
- @Override
- protected synchronized void doStart() {
- initialState = getInitialClusterState();
- clusterState = initialState;
+ protected ClusterState createInitialState(DiscoveryNode localNode) {
+ return ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings))
+ .nodes(DiscoveryNodes.builder().add(localNode)
+ .localNodeId(localNode.getId())
+ .masterNodeId(localNode.getId())
+ .build())
+ .blocks(ClusterBlocks.builder()
+ .addGlobalBlock(STATE_NOT_RECOVERED_BLOCK))
+ .build();
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java
index 3a68b2b4cd..d0a9a212bd 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java
@@ -116,7 +116,7 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing {
private final int limitPortCounts;
- private volatile PingContextProvider contextProvider;
+ private final PingContextProvider contextProvider;
private final AtomicInteger pingingRoundIdGenerator = new AtomicInteger();
@@ -137,12 +137,13 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing {
private volatile boolean closed = false;
public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService transportService,
- UnicastHostsProvider unicastHostsProvider) {
+ UnicastHostsProvider unicastHostsProvider, PingContextProvider contextProvider) {
super(settings);
this.threadPool = threadPool;
this.transportService = transportService;
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
this.hostsProvider = unicastHostsProvider;
+ this.contextProvider = contextProvider;
final int concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings);
if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) {
@@ -260,8 +261,7 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing {
}
@Override
- public void start(PingContextProvider contextProvider) {
- this.contextProvider = contextProvider;
+ public void start() {
}
/**
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
index d08b148554..09e6357ba5 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
@@ -143,9 +143,8 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
private final NodeRemovalClusterStateTaskExecutor nodeRemovalExecutor;
private final ClusterApplier clusterApplier;
- private final AtomicReference<ClusterState> state; // last committed cluster state
+ private final AtomicReference<ClusterState> committedState; // last committed cluster state
private final Object stateMutex = new Object();
- private volatile ClusterState initialState; // set lazily when discovery layer is started
public ZenDiscovery(Settings settings, ThreadPool threadPool, TransportService transportService,
NamedWriteableRegistry namedWriteableRegistry, MasterService masterService, ClusterApplier clusterApplier,
@@ -165,7 +164,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
this.sendLeaveRequest = SEND_LEAVE_REQUEST_SETTING.get(settings);
this.threadPool = threadPool;
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
- this.state = new AtomicReference<>();
+ this.committedState = new AtomicReference<>();
this.masterElectionIgnoreNonMasters = MASTER_ELECTION_IGNORE_NON_MASTER_PINGS_SETTING.get(settings);
this.masterElectionWaitForJoinsTimeout = MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.get(settings);
@@ -214,6 +213,8 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
this.nodeJoinController = new NodeJoinController(masterService, allocationService, electMaster, settings);
this.nodeRemovalExecutor = new NodeRemovalClusterStateTaskExecutor(allocationService, electMaster, this::submitRejoin, logger);
+ masterService.setClusterStateSupplier(this::clusterState);
+
transportService.registerRequestHandler(
DISCOVERY_REJOIN_ACTION_NAME, RejoinClusterRequest::new, ThreadPool.Names.SAME, new RejoinClusterRequestHandler());
}
@@ -221,7 +222,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
// protected to allow overriding in tests
protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService,
UnicastHostsProvider hostsProvider) {
- return new UnicastZenPing(settings, threadPool, transportService, hostsProvider);
+ return new UnicastZenPing(settings, threadPool, transportService, hostsProvider, this);
}
@Override
@@ -229,12 +230,21 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
DiscoveryNode localNode = transportService.getLocalNode();
assert localNode != null;
synchronized (stateMutex) {
- initialState = getInitialClusterState();
- state.set(initialState);
+ // set initial state
+ assert committedState.get() == null;
+ assert localNode != null;
+ ClusterState initialState = ClusterState.builder(clusterName)
+ .blocks(ClusterBlocks.builder()
+ .addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)
+ .addGlobalBlock(discoverySettings.getNoMasterBlock()))
+ .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()))
+ .build();
+ committedState.set(initialState);
+ clusterApplier.setInitialState(initialState);
nodesFD.setLocalNode(localNode);
joinThreadControl.start();
}
- zenPing.start(this);
+ zenPing.start();
}
@Override
@@ -286,7 +296,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public ClusterState clusterState() {
- ClusterState clusterState = state.get();
+ ClusterState clusterState = committedState.get();
assert clusterState != null : "accessing cluster state before it is set";
return clusterState;
}
@@ -297,7 +307,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
assert newState.getNodes().isLocalNodeElectedMaster() : "Shouldn't publish state when not master " + clusterChangedEvent.source();
// state got changed locally (maybe because another master published to us)
- if (clusterChangedEvent.previousState() != this.state.get()) {
+ if (clusterChangedEvent.previousState() != this.committedState.get()) {
throw new FailedToCommitClusterStateException("state was mutated while calculating new CS update");
}
@@ -345,7 +355,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
});
synchronized (stateMutex) {
- if (clusterChangedEvent.previousState() != this.state.get()) {
+ if (clusterChangedEvent.previousState() != this.committedState.get()) {
throw new FailedToCommitClusterStateException("local state was mutated while CS update was published to other nodes");
}
@@ -371,22 +381,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
}
}
- @Override
- public synchronized ClusterState getInitialClusterState() {
- if (initialState == null) {
- assert state.get() == null;
- DiscoveryNode localNode = transportService.getLocalNode();
- assert localNode != null;
- initialState = ClusterState.builder(clusterName)
- .blocks(ClusterBlocks.builder()
- .addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)
- .addGlobalBlock(discoverySettings.getNoMasterBlock()))
- .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()))
- .build();
- }
- return initialState;
- }
-
/**
* Gets the current set of nodes involved in the node fault detection.
* NB: for testing purposes
@@ -405,11 +399,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
return discoverySettings;
}
- @Override
- public int getMinimumMasterNodes() {
- return electMaster.minimumMasterNodes();
- }
-
/**
* returns true if zen discovery is started and there is a currently a background thread active for (re)joining
* the cluster used for testing.
@@ -548,9 +537,9 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
}
// visible for testing
- void setState(ClusterState clusterState) {
+ void setCommittedState(ClusterState clusterState) {
synchronized (stateMutex) {
- state.set(clusterState);
+ committedState.set(clusterState);
}
}
@@ -693,7 +682,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
}
synchronized (stateMutex) {
// check if we have enough master nodes, if not, we need to move into joining the cluster again
- if (!electMaster.hasEnoughMasterNodes(state.get().nodes())) {
+ if (!electMaster.hasEnoughMasterNodes(committedState.get().nodes())) {
rejoin("not enough master nodes on change of minimum_master_nodes from [" + prevMinimumMasterNode + "] to [" + minimumMasterNodes + "]");
}
}
@@ -712,7 +701,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
logger.info((Supplier<?>) () -> new ParameterizedMessage("master_left [{}], reason [{}]", masterNode, reason), cause);
synchronized (stateMutex) {
- if (localNodeMaster() == false && masterNode.equals(state.get().nodes().getMasterNode())) {
+ if (localNodeMaster() == false && masterNode.equals(committedState.get().nodes().getMasterNode())) {
// flush any pending cluster states from old master, so it will not be set as master again
publishClusterState.pendingStatesQueue().failAllStatesAndClear(new ElasticsearchException("master left [{}]", reason));
rejoin("master left (reason = " + reason + ")");
@@ -725,7 +714,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
assert Thread.holdsLock(stateMutex);
final ClusterState newClusterState = publishClusterState.pendingStatesQueue().getNextClusterStateToProcess();
- final ClusterState currentState = state.get();
+ final ClusterState currentState = committedState.get();
final ClusterState adaptedNewClusterState;
// all pending states have been processed
if (newClusterState == null) {
@@ -801,7 +790,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
return false;
}
- state.set(adaptedNewClusterState);
+ committedState.set(adaptedNewClusterState);
// update failure detection only after the state has been updated to prevent race condition with handleLeaveRequest
// and handleNodeFailure as those check the current state to determine whether the failure is to be handled by this node
@@ -997,7 +986,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
protected void rejoin(String reason) {
assert Thread.holdsLock(stateMutex);
- ClusterState clusterState = state.get();
+ ClusterState clusterState = committedState.get();
logger.warn("{}, current nodes: {}", reason, clusterState.nodes());
nodesFD.stop();
@@ -1021,7 +1010,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
.nodes(discoveryNodes)
.build();
- state.set(clusterState);
+ committedState.set(clusterState);
clusterApplier.onNewClusterState(reason, this::clusterState, (source, e) -> {}); // don't wait for state to be applied
}
}
@@ -1123,7 +1112,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
}
logger.debug("got a ping from another master {}. resolving who should rejoin. current ping count: [{}]", pingRequest.masterNode(), pingsWhileMaster.get());
synchronized (stateMutex) {
- ClusterState currentState = state.get();
+ ClusterState currentState = committedState.get();
if (currentState.nodes().isLocalNodeElectedMaster()) {
pingsWhileMaster.set(0);
handleAnotherMaster(currentState, pingRequest.masterNode(), pingRequest.clusterStateVersion(), "node fd ping");
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java
index 016d2a5423..d91dad8aee 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java
@@ -40,7 +40,7 @@ import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK
public interface ZenPing extends Releasable {
- void start(PingContextProvider contextProvider);
+ void start();
void ping(Consumer<PingCollection> resultsConsumer, TimeValue timeout);
diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java
index df859ab7de..7ac442d716 100644
--- a/core/src/main/java/org/elasticsearch/env/Environment.java
+++ b/core/src/main/java/org/elasticsearch/env/Environment.java
@@ -71,8 +71,6 @@ public class Environment {
private final Settings settings;
- private final String configExtension;
-
private final Path[] dataFiles;
private final Path[] dataWithClusterFiles;
@@ -104,12 +102,6 @@ public class Environment {
private final Path tmpFile = PathUtils.get(System.getProperty("java.io.tmpdir"));
public Environment(Settings settings) {
- this(settings, null);
- }
-
- // Note: Do not use this ctor, it is for correct deprecation logging in 5.5 and will be removed
- public Environment(Settings settings, String configExtension) {
- this.configExtension = configExtension;
final Path homeFile;
if (PATH_HOME_SETTING.exists(settings)) {
homeFile = PathUtils.get(cleanPath(PATH_HOME_SETTING.get(settings)));
@@ -281,11 +273,6 @@ public class Environment {
}
}
- /** Return then extension of the config file that was loaded, or*/
- public String configExtension() {
- return configExtension;
- }
-
// TODO: rename all these "file" methods to "dir"
/**
* The config directory.
diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java
index 0d562fa4b2..4407e97d5a 100644
--- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java
+++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java
@@ -32,13 +32,12 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.discovery.Discovery;
+import org.elasticsearch.discovery.zen.ElectMasterService;
import org.elasticsearch.index.Index;
import org.elasticsearch.indices.IndicesService;
import java.util.Arrays;
import java.util.Map;
-import java.util.function.Supplier;
public class Gateway extends AbstractComponent implements ClusterStateApplier {
@@ -48,18 +47,18 @@ public class Gateway extends AbstractComponent implements ClusterStateApplier {
private final TransportNodesListGatewayMetaState listGatewayMetaState;
- private final Supplier<Integer> minimumMasterNodesProvider;
+ private final int minimumMasterNodes;
private final IndicesService indicesService;
public Gateway(Settings settings, ClusterService clusterService, GatewayMetaState metaState,
- TransportNodesListGatewayMetaState listGatewayMetaState, Discovery discovery,
+ TransportNodesListGatewayMetaState listGatewayMetaState,
IndicesService indicesService) {
super(settings);
this.indicesService = indicesService;
this.clusterService = clusterService;
this.metaState = metaState;
this.listGatewayMetaState = listGatewayMetaState;
- this.minimumMasterNodesProvider = discovery::getMinimumMasterNodes;
+ this.minimumMasterNodes = ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.get(settings);
clusterService.addLowPriorityApplier(this);
}
@@ -69,7 +68,7 @@ public class Gateway extends AbstractComponent implements ClusterStateApplier {
TransportNodesListGatewayMetaState.NodesGatewayMetaState nodesState = listGatewayMetaState.list(nodesIds, null).actionGet();
- int requiredAllocation = Math.max(1, minimumMasterNodesProvider.get());
+ int requiredAllocation = Math.max(1, minimumMasterNodes);
if (nodesState.hasFailures()) {
diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
index 0353deab6e..6e884af3b8 100644
--- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
+++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
@@ -42,7 +42,6 @@ import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
-import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.ThreadPool;
@@ -93,10 +92,10 @@ public class GatewayService extends AbstractLifecycleComponent implements Cluste
@Inject
public GatewayService(Settings settings, AllocationService allocationService, ClusterService clusterService,
ThreadPool threadPool, GatewayMetaState metaState,
- TransportNodesListGatewayMetaState listGatewayMetaState, Discovery discovery,
+ TransportNodesListGatewayMetaState listGatewayMetaState,
IndicesService indicesService) {
super(settings);
- this.gateway = new Gateway(settings, clusterService, metaState, listGatewayMetaState, discovery,
+ this.gateway = new Gateway(settings, clusterService, metaState, listGatewayMetaState,
indicesService);
this.allocationService = allocationService;
this.clusterService = clusterService;
@@ -227,10 +226,6 @@ public class GatewayService extends AbstractLifecycleComponent implements Cluste
}
}
- public Gateway getGateway() {
- return gateway;
- }
-
class GatewayRecoveryListener implements Gateway.GatewayStateRecoveredListener {
@Override
diff --git a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java
index fdaad19e52..e177ca668f 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java
@@ -54,9 +54,8 @@ public final class IndexWarmer extends AbstractComponent {
ArrayList<Listener> list = new ArrayList<>();
final Executor executor = threadPool.executor(ThreadPool.Names.WARMER);
list.add(new FieldDataWarmer(executor));
- for (Listener listener : listeners) {
- list.add(listener);
- }
+
+ Collections.addAll(list, listeners);
this.listeners = Collections.unmodifiableList(list);
}
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
index 282edaeaf7..f112304562 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
@@ -55,8 +55,8 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]");
}
- List<CharFilterFactory> charFiltersList = new ArrayList<>();
String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
+ List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
@@ -65,8 +65,8 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
charFiltersList.add(charFilter);
}
- List<TokenFilterFactory> tokenFilterList = new ArrayList<>();
String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
+ List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
index 4f50a34dd9..2fcc987df6 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
@@ -50,8 +50,8 @@ public final class CustomNormalizerProvider extends AbstractIndexAnalyzerProvide
throw new IllegalArgumentException("Custom normalizer [" + name() + "] cannot configure a tokenizer");
}
- List<CharFilterFactory> charFiltersList = new ArrayList<>();
String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
+ List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
@@ -66,8 +66,8 @@ public final class CustomNormalizerProvider extends AbstractIndexAnalyzerProvide
charFiltersList.add(charFilter);
}
- List<TokenFilterFactory> tokenFilterList = new ArrayList<>();
String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
+ List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java b/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java
index b07492740a..ca8fcd1ffd 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java
@@ -318,7 +318,7 @@ public class DynamicTemplate implements ToXContent {
}
private List processList(List list, String name, String dynamicType) {
- List processedList = new ArrayList();
+ List processedList = new ArrayList(list.size());
for (Object value : list) {
if (value instanceof Map) {
value = processMap((Map<String, Object>) value, name, dynamicType);
diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java
index 6a3cd2f1ed..9d18e42138 100644
--- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java
@@ -250,8 +250,8 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder<QB>>
}
protected static final List<QueryBuilder> readQueries(StreamInput in) throws IOException {
- List<QueryBuilder> queries = new ArrayList<>();
int size = in.readVInt();
+ List<QueryBuilder> queries = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
queries.add(in.readNamedWriteable(QueryBuilder.class));
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java
index cc25ac4772..1f9e81cecc 100644
--- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java
@@ -163,7 +163,7 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
- List<GeoPoint> shell = new ArrayList<GeoPoint>();
+ List<GeoPoint> shell = new ArrayList<>(this.shell.size());
for (GeoPoint geoPoint : this.shell) {
shell.add(new GeoPoint(geoPoint));
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
index dd23429dbd..b6eb84b03b 100644
--- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
@@ -195,7 +195,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
}
}
- InnerHitBuilder(InnerHitBuilder other, QueryBuilder query, String parentChildType, boolean ignoreUnmapped) {
+ // NORELEASE Do not use this ctr, it is public for hasChild and hasParent query but this is temporary
+ public InnerHitBuilder(InnerHitBuilder other, QueryBuilder query, String parentChildType, boolean ignoreUnmapped) {
this(other);
this.query = query;
this.parentChildType = parentChildType;
@@ -751,7 +752,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
}
}
- static InnerHitBuilder rewrite(InnerHitBuilder original, QueryBuilder rewrittenQuery) {
+ // TODO public for hasParent and hasChild query
+ public static InnerHitBuilder rewrite(InnerHitBuilder original, QueryBuilder rewrittenQuery) {
if (original == null) {
return null;
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java
index b0c49a3f4a..2d23f256f0 100644
--- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java
@@ -23,7 +23,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.ScoreMode;
-import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
@@ -36,6 +35,7 @@ import org.elasticsearch.index.search.ESToParentBlockJoinQuery;
import org.elasticsearch.index.search.NestedHelper;
import java.io.IOException;
+import java.util.Locale;
import java.util.Map;
import java.util.Objects;
@@ -144,7 +144,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
builder.field(PATH_FIELD.getPreferredName(), path);
builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
if (scoreMode != null) {
- builder.field(SCORE_MODE_FIELD.getPreferredName(), HasChildQueryBuilder.scoreModeAsString(scoreMode));
+ builder.field(SCORE_MODE_FIELD.getPreferredName(), scoreModeAsString(scoreMode));
}
printBoostAndQueryName(builder);
if (innerHitBuilder != null) {
@@ -183,7 +183,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
} else if (IGNORE_UNMAPPED_FIELD.match(currentFieldName)) {
ignoreUnmapped = parser.booleanValue();
} else if (SCORE_MODE_FIELD.match(currentFieldName)) {
- scoreMode = HasChildQueryBuilder.parseScoreMode(parser.text());
+ scoreMode = parseScoreMode(parser.text());
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName)) {
queryName = parser.text();
} else {
@@ -201,6 +201,30 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
return queryBuilder;
}
+ public static ScoreMode parseScoreMode(String scoreModeString) {
+ if ("none".equals(scoreModeString)) {
+ return ScoreMode.None;
+ } else if ("min".equals(scoreModeString)) {
+ return ScoreMode.Min;
+ } else if ("max".equals(scoreModeString)) {
+ return ScoreMode.Max;
+ } else if ("avg".equals(scoreModeString)) {
+ return ScoreMode.Avg;
+ } else if ("sum".equals(scoreModeString)) {
+ return ScoreMode.Total;
+ }
+ throw new IllegalArgumentException("No score mode for child query [" + scoreModeString + "] found");
+ }
+
+ public static String scoreModeAsString(ScoreMode scoreMode) {
+ if (scoreMode == ScoreMode.Total) {
+ // Lucene uses 'total' but 'sum' is more consistent with other elasticsearch APIs
+ return "sum";
+ } else {
+ return scoreMode.name().toLowerCase(Locale.ROOT);
+ }
+ }
+
@Override
public final String getWriteableName() {
return NAME;
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java
index 805ec5fa78..df0493d61c 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java
@@ -472,30 +472,6 @@ public abstract class QueryBuilders {
}
/**
- * Constructs a new has_child query, with the child type and the query to run on the child documents. The
- * results of this query are the parent docs that those child docs matched.
- *
- * @param type The child type.
- * @param query The query.
- * @param scoreMode How the scores from the children hits should be aggregated into the parent hit.
- */
- public static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder query, ScoreMode scoreMode) {
- return new HasChildQueryBuilder(type, query, scoreMode);
- }
-
- /**
- * Constructs a new parent query, with the parent type and the query to run on the parent documents. The
- * results of this query are the children docs that those parent docs matched.
- *
- * @param type The parent type.
- * @param query The query.
- * @param score Whether the score from the parent hit should propagate to the child hit
- */
- public static HasParentQueryBuilder hasParentQuery(String type, QueryBuilder query, boolean score) {
- return new HasParentQueryBuilder(type, query, score);
- }
-
- /**
* Constructs a new parent id query that returns all child documents of the specified type that
* point to the specified id.
*/
diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java
index 359c3165f5..56c343a5ae 100644
--- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java
+++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java
@@ -284,7 +284,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
}
}
- List<SnapshotFiles> snapshots = new ArrayList<>();
+ List<SnapshotFiles> snapshots = new ArrayList<>(snapshotsMap.size());
for (Map.Entry<String, List<String>> entry : snapshotsMap.entrySet()) {
List<FileInfo> fileInfosBuilder = new ArrayList<>();
for (String file : entry.getValue()) {
diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java
index 88cff7f559..b8ec92ba15 100644
--- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java
+++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java
@@ -177,7 +177,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
@Override
public AllCircuitBreakerStats stats() {
long parentEstimated = 0;
- List<CircuitBreakerStats> allStats = new ArrayList<>();
+ List<CircuitBreakerStats> allStats = new ArrayList<>(this.breakers.size());
// Gather the "estimated" count for the parent breaker by adding the
// estimations for each individual breaker
for (CircuitBreaker breaker : this.breakers.values()) {
diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
index fcd925c258..ff7c3009dc 100644
--- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
+++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.fielddata.cache;
+import java.util.Collections;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
@@ -134,9 +135,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
//noinspection unchecked
final Accountable accountable = cache.computeIfAbsent(key, k -> {
cacheHelper.addClosedListener(IndexFieldCache.this);
- for (Listener listener : this.listeners) {
- k.listeners.add(listener);
- }
+ Collections.addAll(k.listeners, this.listeners);
final AtomicFieldData fieldData = indexFieldData.loadDirect(context);
for (Listener listener : k.listeners) {
try {
@@ -162,9 +161,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
//noinspection unchecked
final Accountable accountable = cache.computeIfAbsent(key, k -> {
ElasticsearchDirectoryReader.addReaderCloseListener(indexReader, IndexFieldCache.this);
- for (Listener listener : this.listeners) {
- k.listeners.add(listener);
- }
+ Collections.addAll(k.listeners, this.listeners);
final Accountable ifd = (Accountable) indexFieldData.localGlobalDirect(indexReader);
for (Listener listener : k.listeners) {
try {
diff --git a/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java
index b8862fb20e..32ad3e9332 100644
--- a/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java
+++ b/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java
@@ -86,27 +86,23 @@ public class InternalSettingsPreparer {
initializeSettings(output, input, properties);
Environment environment = new Environment(output.build());
+ if (Files.exists(environment.configFile().resolve("elasticsearch.yaml"))) {
+ throw new SettingsException("elasticsearch.yaml was deprecated in 5.5.0 and must be renamed to elasticsearch.yml");
+ }
+
+ if (Files.exists(environment.configFile().resolve("elasticsearch.json"))) {
+ throw new SettingsException("elasticsearch.json was deprecated in 5.5.0 and must be converted to elasticsearch.yml");
+ }
+
output = Settings.builder(); // start with a fresh output
- boolean settingsFileFound = false;
- Set<String> foundSuffixes = new HashSet<>();
- for (String allowedSuffix : ALLOWED_SUFFIXES) {
- Path path = environment.configFile().resolve("elasticsearch" + allowedSuffix);
- if (Files.exists(path)) {
- if (!settingsFileFound) {
- try {
- output.loadFromPath(path);
- } catch (IOException e) {
- throw new SettingsException("Failed to load settings from " + path.toString(), e);
- }
- }
- settingsFileFound = true;
- foundSuffixes.add(allowedSuffix);
+ Path path = environment.configFile().resolve("elasticsearch.yml");
+ if (Files.exists(path)) {
+ try {
+ output.loadFromPath(path);
+ } catch (IOException e) {
+ throw new SettingsException("Failed to load settings from " + path.toString(), e);
}
}
- if (foundSuffixes.size() > 1) {
- throw new SettingsException("multiple settings files found with suffixes: "
- + Strings.collectionToDelimitedString(foundSuffixes, ","));
- }
// re-initialize settings now that the config file has been loaded
initializeSettings(output, input, properties);
@@ -116,8 +112,7 @@ public class InternalSettingsPreparer {
// we put back the path.logs so we can use it in the logging configuration file
output.put(Environment.PATH_LOGS_SETTING.getKey(), cleanPath(environment.logsFile().toAbsolutePath().toString()));
- String configExtension = foundSuffixes.isEmpty() ? null : foundSuffixes.iterator().next();
- return new Environment(output.build(), configExtension);
+ return new Environment(output.build());
}
/**
diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java
index 0643c6b96a..08f9856c76 100644
--- a/core/src/main/java/org/elasticsearch/node/Node.java
+++ b/core/src/main/java/org/elasticsearch/node/Node.java
@@ -684,11 +684,9 @@ public class Node implements Closeable {
.flatMap(p -> p.getBootstrapChecks().stream()).collect(Collectors.toList()));
clusterService.addStateApplier(transportService.getTaskManager());
- clusterService.getMasterService().setClusterStateSupplier(discovery::clusterState);
- clusterService.getClusterApplierService().setInitialState(discovery.getInitialClusterState());
// start after transport service so the local disco is known
+ discovery.start(); // start before cluster service so that it can set initial state on ClusterApplierService
clusterService.start();
- discovery.start();
assert clusterService.localNode().equals(localNodeFactory.getNode())
: "clusterService has a different local node than the factory provided";
transportService.acceptIncomingRequests();
diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java
index 8707d851d3..85308f0c24 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchModule.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java
@@ -20,8 +20,6 @@
package org.elasticsearch.search;
import org.apache.lucene.search.BooleanQuery;
-import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.NamedRegistry;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.geo.builders.ShapeBuilders;
@@ -45,8 +43,6 @@ import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder;
import org.elasticsearch.index.query.GeoDistanceQueryBuilder;
import org.elasticsearch.index.query.GeoPolygonQueryBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
-import org.elasticsearch.index.query.HasChildQueryBuilder;
-import org.elasticsearch.index.query.HasParentQueryBuilder;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
@@ -103,8 +99,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.adjacency.InternalAdjacencyMatrix;
-import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.children.InternalChildren;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
@@ -256,7 +250,6 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
@@ -410,9 +403,6 @@ public class SearchModule {
GeoCentroidAggregationBuilder::parse).addResultReader(InternalGeoCentroid::new));
registerAggregation(new AggregationSpec(ScriptedMetricAggregationBuilder.NAME, ScriptedMetricAggregationBuilder::new,
ScriptedMetricAggregationBuilder::parse).addResultReader(InternalScriptedMetric::new));
- registerAggregation(new AggregationSpec(ChildrenAggregationBuilder.NAME, ChildrenAggregationBuilder::new,
- ChildrenAggregationBuilder::parse).addResultReader(InternalChildren::new));
-
registerFromPlugin(plugins, SearchPlugin::getAggregations, this::registerAggregation);
}
@@ -706,8 +696,6 @@ public class SearchModule {
MatchPhrasePrefixQueryBuilder::fromXContent));
registerQuery(new QuerySpec<>(MultiMatchQueryBuilder.NAME, MultiMatchQueryBuilder::new, MultiMatchQueryBuilder::fromXContent));
registerQuery(new QuerySpec<>(NestedQueryBuilder.NAME, NestedQueryBuilder::new, NestedQueryBuilder::fromXContent));
- registerQuery(new QuerySpec<>(HasChildQueryBuilder.NAME, HasChildQueryBuilder::new, HasChildQueryBuilder::fromXContent));
- registerQuery(new QuerySpec<>(HasParentQueryBuilder.NAME, HasParentQueryBuilder::new, HasParentQueryBuilder::fromXContent));
registerQuery(new QuerySpec<>(DisMaxQueryBuilder.NAME, DisMaxQueryBuilder::new, DisMaxQueryBuilder::fromXContent));
registerQuery(new QuerySpec<>(IdsQueryBuilder.NAME, IdsQueryBuilder::new, IdsQueryBuilder::fromXContent));
registerQuery(new QuerySpec<>(MatchAllQueryBuilder.NAME, MatchAllQueryBuilder::new, MatchAllQueryBuilder::fromXContent));
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java
index f43c2670ab..8b704ee8a6 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java
@@ -23,8 +23,6 @@ import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrix;
import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.children.Children;
-import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filters.Filters;
@@ -163,20 +161,20 @@ public class AggregationBuilders {
public static FiltersAggregationBuilder filters(String name, QueryBuilder... filters) {
return new FiltersAggregationBuilder(name, filters);
}
-
+
/**
* Create a new {@link AdjacencyMatrix} aggregation with the given name.
*/
public static AdjacencyMatrixAggregationBuilder adjacencyMatrix(String name, Map<String, QueryBuilder> filters) {
return new AdjacencyMatrixAggregationBuilder(name, filters);
- }
-
+ }
+
/**
* Create a new {@link AdjacencyMatrix} aggregation with the given name and separator
*/
public static AdjacencyMatrixAggregationBuilder adjacencyMatrix(String name, String separator, Map<String, QueryBuilder> filters) {
return new AdjacencyMatrixAggregationBuilder(name, separator, filters);
- }
+ }
/**
* Create a new {@link Sampler} aggregation with the given name.
@@ -221,13 +219,6 @@ public class AggregationBuilders {
}
/**
- * Create a new {@link Children} aggregation with the given name.
- */
- public static ChildrenAggregationBuilder children(String name, String childType) {
- return new ChildrenAggregationBuilder(name, childType);
- }
-
- /**
* Create a new {@link GeoDistance} aggregation with the given name.
*/
public static GeoDistanceAggregationBuilder geoDistance(String name, GeoPoint origin) {
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java
index 106335380c..d635952ec1 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java
@@ -183,7 +183,7 @@ public class AggregatorFactories {
}
public List<PipelineAggregator> createPipelineAggregators() throws IOException {
- List<PipelineAggregator> pipelineAggregators = new ArrayList<>();
+ List<PipelineAggregator> pipelineAggregators = new ArrayList<>(this.pipelineAggregatorFactories.size());
for (PipelineAggregationBuilder factory : this.pipelineAggregatorFactories) {
pipelineAggregators.add(factory.create());
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java b/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java
index 2de6ae0cf9..40e66bd964 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java
@@ -70,7 +70,7 @@ public abstract class BucketCollector implements Collector {
@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException {
- List<LeafBucketCollector> leafCollectors = new ArrayList<>();
+ List<LeafBucketCollector> leafCollectors = new ArrayList<>(collectors.length);
for (BucketCollector c : collectors) {
leafCollectors.add(c.getLeafCollector(ctx));
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java
index 526db6a79b..eeb60d393e 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java
@@ -197,7 +197,7 @@ public class AdjacencyMatrixAggregationBuilder extends AbstractAggregationBuilde
+ "] index level setting.");
}
- List<KeyedFilter> rewrittenFilters = new ArrayList<>();
+ List<KeyedFilter> rewrittenFilters = new ArrayList<>(filters.size());
for (KeyedFilter kf : filters) {
rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(), context.getQueryShardContext())));
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java
index f34df7368a..b80ce74c3d 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java
@@ -169,7 +169,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
@Override
protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
throws IOException {
- List<KeyedFilter> rewrittenFilters = new ArrayList<>();
+ List<KeyedFilter> rewrittenFilters = new ArrayList<>(filters.size());
for(KeyedFilter kf : filters) {
rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(),
context.getQueryShardContext())));
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java
index e4885bc053..b87ce51c4b 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java
@@ -164,7 +164,7 @@ public class DoubleTerms extends InternalMappedTerms<DoubleTerms, DoubleTerms.Bu
if (promoteToDouble == false) {
return super.doReduce(aggregations, reduceContext);
}
- List<InternalAggregation> newAggs = new ArrayList<>();
+ List<InternalAggregation> newAggs = new ArrayList<>(aggregations.size());
for (InternalAggregation agg : aggregations) {
if (agg instanceof LongTerms) {
DoubleTerms dTerms = LongTerms.convertLongTermsToDouble((LongTerms) agg, format);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java
index e79ba1047e..8a1b70fdd1 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java
@@ -75,7 +75,7 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator {
InternalMultiBucketAggregation.InternalBucket>) aggregation;
List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
HistogramFactory factory = (HistogramFactory) histo;
- List<Bucket> newBuckets = new ArrayList<>();
+ List<Bucket> newBuckets = new ArrayList<>(buckets.size());
double sum = 0;
for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS);
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java
index e25e7b7494..2f728e21b6 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java
@@ -26,7 +26,6 @@ import org.apache.lucene.search.highlight.WeightedSpanTerm;
import org.apache.lucene.search.highlight.WeightedSpanTermExtractor;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
-import org.elasticsearch.index.query.HasChildQueryBuilder;
import java.io.IOException;
import java.util.Map;
@@ -83,7 +82,7 @@ public final class CustomQueryScorer extends QueryScorer {
}
protected void extract(Query query, float boost, Map<String, WeightedSpanTerm> terms) throws IOException {
- if (query instanceof HasChildQueryBuilder.LateParsingQuery) {
+ if (isChildOrParentQuery(query.getClass())) {
// skip has_child or has_parent queries, see: https://github.com/elastic/elasticsearch/issues/14999
return;
} else if (query instanceof FunctionScoreQuery) {
@@ -94,5 +93,13 @@ public final class CustomQueryScorer extends QueryScorer {
super.extract(query, boost, terms);
}
}
+
+ /**
+ * Workaround to detect parent/child query
+ */
+ private static final String PARENT_CHILD_QUERY_NAME = "HasChildQueryBuilder$LateParsingQuery";
+ private static boolean isChildOrParentQuery(Class<?> clazz) {
+ return clazz.getName().endsWith(PARENT_CHILD_QUERY_NAME);
+ }
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
index 24f2647167..b874c3aeca 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
@@ -229,9 +229,8 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
// Merge new candidates into existing ones,
// deduping:
final Set<Candidate> set = new HashSet<>(candidates);
- for (int i = 0; i < this.candidates.length; i++) {
- set.add(this.candidates[i]);
- }
+ Collections.addAll(set, this.candidates);
+
this.candidates = set.toArray(new Candidate[set.size()]);
// Sort strongest to weakest:
Arrays.sort(this.candidates, Collections.reverseOrder());
diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
index 30a18e0561..ba355e4147 100644
--- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
+++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
@@ -101,6 +101,7 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@@ -357,8 +358,9 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
private final DiscoveryNode node;
private final AtomicBoolean closed = new AtomicBoolean(false);
private final Version version;
+ private final Consumer<Connection> onClose;
- public NodeChannels(DiscoveryNode node, Channel[] channels, ConnectionProfile connectionProfile) {
+ public NodeChannels(DiscoveryNode node, Channel[] channels, ConnectionProfile connectionProfile, Consumer<Connection> onClose) {
this.node = node;
this.channels = channels;
assert channels.length == connectionProfile.getNumConnections() : "expected channels size to be == "
@@ -369,6 +371,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
typeMapping.put(type, handle);
}
version = node.getVersion();
+ this.onClose = onClose;
}
NodeChannels(NodeChannels channels, Version handshakeVersion) {
@@ -376,6 +379,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
this.channels = channels.channels;
this.typeMapping = channels.typeMapping;
this.version = handshakeVersion;
+ this.onClose = channels.onClose;
}
@Override
@@ -407,7 +411,11 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
@Override
public synchronized void close() throws IOException {
if (closed.compareAndSet(false, true)) {
- closeChannels(Arrays.stream(channels).filter(Objects::nonNull).collect(Collectors.toList()));
+ try {
+ closeChannels(Arrays.stream(channels).filter(Objects::nonNull).collect(Collectors.toList()));
+ } finally {
+ onClose.accept(this);
+ }
}
}
@@ -519,8 +527,8 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
final TimeValue handshakeTimeout = connectionProfile.getHandshakeTimeout() == null ?
connectTimeout : connectionProfile.getHandshakeTimeout();
final Version version = executeHandshake(node, channel, handshakeTimeout);
- transportServiceAdapter.onConnectionOpened(node);
- nodeChannels = new NodeChannels(nodeChannels, version);// clone the channels - we now have the correct version
+ transportServiceAdapter.onConnectionOpened(nodeChannels);
+ nodeChannels = new NodeChannels(nodeChannels, version); // clone the channels - we now have the correct version
success = true;
return nodeChannels;
} catch (ConnectTransportException e) {
diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java
index d3dcd8bb5c..a32289332e 100644
--- a/core/src/main/java/org/elasticsearch/transport/Transport.java
+++ b/core/src/main/java/org/elasticsearch/transport/Transport.java
@@ -132,5 +132,13 @@ public interface Transport extends LifecycleComponent {
default Version getVersion() {
return getNode().getVersion();
}
+
+ /**
+ * Returns a key that this connection can be cached on. Delegating subclasses must delegate method call to
+ * the original connection.
+ */
+ default Object getCacheKey() {
+ return this;
+ }
}
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java b/core/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java
index 3f277a0ee1..de767986b9 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java
@@ -34,7 +34,13 @@ public interface TransportConnectionListener {
default void onNodeDisconnected(DiscoveryNode node) {}
/**
+ * Called once a node connection is closed. The connection might not have been registered in the
+ * transport as a shared connection to a specific node
+ */
+ default void onConnectionClosed(Transport.Connection connection) {}
+
+ /**
* Called once a node connection is opened.
*/
- default void onConnectionOpened(DiscoveryNode node) {}
+ default void onConnectionOpened(Transport.Connection connection) {}
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java
index 7de9606361..e5382e4e26 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportService.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java
@@ -569,7 +569,7 @@ public class TransportService extends AbstractLifecycleComponent {
}
Supplier<ThreadContext.StoredContext> storedContextSupplier = threadPool.getThreadContext().newRestorableContext(true);
TransportResponseHandler<T> responseHandler = new ContextRestoreResponseHandler<>(storedContextSupplier, handler);
- clientHandlers.put(requestId, new RequestHolder<>(responseHandler, connection.getNode(), action, timeoutHandler));
+ clientHandlers.put(requestId, new RequestHolder<>(responseHandler, connection, action, timeoutHandler));
if (lifecycle.stoppedOrClosed()) {
// if we are not started the exception handling will remove the RequestHolder again and calls the handler to notify
// the caller. It will only notify if the toStop code hasn't done the work yet.
@@ -810,7 +810,7 @@ public class TransportService extends AbstractLifecycleComponent {
}
holder.cancelTimeout();
if (traceEnabled() && shouldTraceAction(holder.action())) {
- traceReceivedResponse(requestId, holder.node(), holder.action());
+ traceReceivedResponse(requestId, holder.connection().getNode(), holder.action());
}
return holder.handler();
}
@@ -855,12 +855,12 @@ public class TransportService extends AbstractLifecycleComponent {
}
@Override
- public void onConnectionOpened(DiscoveryNode node) {
+ public void onConnectionOpened(Transport.Connection connection) {
// capture listeners before spawning the background callback so the following pattern won't trigger a call
// connectToNode(); connection is completed successfully
// addConnectionListener(); this listener shouldn't be called
final Stream<TransportConnectionListener> listenersToNotify = TransportService.this.connectionListeners.stream();
- threadPool.generic().execute(() -> listenersToNotify.forEach(listener -> listener.onConnectionOpened(node)));
+ threadPool.generic().execute(() -> listenersToNotify.forEach(listener -> listener.onConnectionOpened(connection)));
}
@Override
@@ -871,20 +871,28 @@ public class TransportService extends AbstractLifecycleComponent {
connectionListener.onNodeDisconnected(node);
}
});
+ } catch (EsRejectedExecutionException ex) {
+ logger.debug("Rejected execution on NodeDisconnected", ex);
+ }
+ }
+
+ @Override
+ public void onConnectionClosed(Transport.Connection connection) {
+ try {
for (Map.Entry<Long, RequestHolder> entry : clientHandlers.entrySet()) {
RequestHolder holder = entry.getValue();
- if (holder.node().equals(node)) {
+ if (holder.connection().getCacheKey().equals(connection.getCacheKey())) {
final RequestHolder holderToNotify = clientHandlers.remove(entry.getKey());
if (holderToNotify != null) {
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
- threadPool.generic().execute(() -> holderToNotify.handler().handleException(new NodeDisconnectedException(node,
- holderToNotify.action())));
+ threadPool.generic().execute(() -> holderToNotify.handler().handleException(new NodeDisconnectedException(
+ connection.getNode(), holderToNotify.action())));
}
}
}
} catch (EsRejectedExecutionException ex) {
- logger.debug("Rejected execution on NodeDisconnected", ex);
+ logger.debug("Rejected execution on onConnectionClosed", ex);
}
}
@@ -929,13 +937,14 @@ public class TransportService extends AbstractLifecycleComponent {
if (holder != null) {
// add it to the timeout information holder, in case we are going to get a response later
long timeoutTime = System.currentTimeMillis();
- timeoutInfoHandlers.put(requestId, new TimeoutInfoHolder(holder.node(), holder.action(), sentTime, timeoutTime));
+ timeoutInfoHandlers.put(requestId, new TimeoutInfoHolder(holder.connection().getNode(), holder.action(), sentTime,
+ timeoutTime));
// now that we have the information visible via timeoutInfoHandlers, we try to remove the request id
final RequestHolder removedHolder = clientHandlers.remove(requestId);
if (removedHolder != null) {
assert removedHolder == holder : "two different holder instances for request [" + requestId + "]";
removedHolder.handler().handleException(
- new ReceiveTimeoutTransportException(holder.node(), holder.action(),
+ new ReceiveTimeoutTransportException(holder.connection().getNode(), holder.action(),
"request_id [" + requestId + "] timed out after [" + (timeoutTime - sentTime) + "ms]"));
} else {
// response was processed, remove timeout info.
@@ -990,15 +999,15 @@ public class TransportService extends AbstractLifecycleComponent {
private final TransportResponseHandler<T> handler;
- private final DiscoveryNode node;
+ private final Transport.Connection connection;
private final String action;
private final TimeoutHandler timeoutHandler;
- RequestHolder(TransportResponseHandler<T> handler, DiscoveryNode node, String action, TimeoutHandler timeoutHandler) {
+ RequestHolder(TransportResponseHandler<T> handler, Transport.Connection connection, String action, TimeoutHandler timeoutHandler) {
this.handler = handler;
- this.node = node;
+ this.connection = connection;
this.action = action;
this.timeoutHandler = timeoutHandler;
}
@@ -1007,8 +1016,8 @@ public class TransportService extends AbstractLifecycleComponent {
return handler;
}
- public DiscoveryNode node() {
- return this.node;
+ public Transport.Connection connection() {
+ return this.connection;
}
public String action() {
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java
index 21930b3763..cf41042ab8 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java
+++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java
@@ -450,156 +450,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
assertHitCount(countResponse, numDocs);
}
- /*
- Test for https://github.com/elastic/elasticsearch/issues/3444
- */
- public void testBulkUpdateDocAsUpsertWithParent() throws Exception {
- client().admin().indices().prepareCreate("test")
- .setSettings("index.mapping.single_type", false)
- .addMapping("parent", "{\"parent\":{}}", XContentType.JSON)
- .addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}", XContentType.JSON)
- .execute().actionGet();
- ensureGreen();
-
- BulkRequestBuilder builder = client().prepareBulk();
-
- // It's important to use JSON parsing here and request objects: issue 3444 is related to incomplete option parsing
- byte[] addParent = new BytesArray(
- "{" +
- " \"index\" : {" +
- " \"_index\" : \"test\"," +
- " \"_type\" : \"parent\"," +
- " \"_id\" : \"parent1\"" +
- " }" +
- "}" +
- "\n" +
- "{" +
- " \"field1\" : \"value1\"" +
- "}" +
- "\n").array();
-
- byte[] addChild = new BytesArray(
- "{" +
- " \"update\" : {" +
- " \"_index\" : \"test\"," +
- " \"_type\" : \"child\"," +
- " \"_id\" : \"child1\"," +
- " \"parent\" : \"parent1\"" +
- " }" +
- "}" +
- "\n" +
- "{" +
- " \"doc\" : {" +
- " \"field1\" : \"value1\"" +
- " }," +
- " \"doc_as_upsert\" : \"true\"" +
- "}" +
- "\n").array();
-
- builder.add(addParent, 0, addParent.length, XContentType.JSON);
- builder.add(addChild, 0, addChild.length, XContentType.JSON);
-
- BulkResponse bulkResponse = builder.get();
- assertThat(bulkResponse.getItems().length, equalTo(2));
- assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
- assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
-
- client().admin().indices().prepareRefresh("test").get();
- //we check that the _parent field was set on the child document by using the has parent query
- SearchResponse searchResponse = client().prepareSearch("test")
- .setQuery(QueryBuilders.hasParentQuery("parent", QueryBuilders.matchAllQuery(), false))
- .get();
-
- assertNoFailures(searchResponse);
- assertSearchHits(searchResponse, "child1");
- }
-
- /*
- Test for https://github.com/elastic/elasticsearch/issues/3444
- */
- public void testBulkUpdateUpsertWithParent() throws Exception {
- assertAcked(prepareCreate("test")
- .setSettings("index.mapping.single_type", false)
- .addMapping("parent", "{\"parent\":{}}", XContentType.JSON)
- .addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}", XContentType.JSON));
- ensureGreen();
-
- BulkRequestBuilder builder = client().prepareBulk();
-
- byte[] addParent = new BytesArray(
- "{" +
- " \"index\" : {" +
- " \"_index\" : \"test\"," +
- " \"_type\" : \"parent\"," +
- " \"_id\" : \"parent1\"" +
- " }" +
- "}" +
- "\n" +
- "{" +
- " \"field1\" : \"value1\"" +
- "}" +
- "\n").array();
-
- byte[] addChild1 = new BytesArray(
- "{" +
- " \"update\" : {" +
- " \"_index\" : \"test\"," +
- " \"_type\" : \"child\"," +
- " \"_id\" : \"child1\"," +
- " \"parent\" : \"parent1\"" +
- " }" +
- "}" +
- "\n" +
- "{" +
- " \"script\" : {" +
- " \"inline\" : \"ctx._source.field2 = 'value2'\"" +
- " }," +
- " \"lang\" : \"" + CustomScriptPlugin.NAME + "\"," +
- " \"upsert\" : {" +
- " \"field1\" : \"value1'\"" +
- " }" +
- "}" +
- "\n").array();
-
- byte[] addChild2 = new BytesArray(
- "{" +
- " \"update\" : {" +
- " \"_index\" : \"test\"," +
- " \"_type\" : \"child\"," +
- " \"_id\" : \"child1\"," +
- " \"parent\" : \"parent1\"" +
- " }" +
- "}" +
- "\n" +
- "{" +
- " \"script\" : \"ctx._source.field2 = 'value2'\"," +
- " \"upsert\" : {" +
- " \"field1\" : \"value1'\"" +
- " }" +
- "}" +
- "\n").array();
-
- builder.add(addParent, 0, addParent.length, XContentType.JSON);
- builder.add(addChild1, 0, addChild1.length, XContentType.JSON);
- builder.add(addChild2, 0, addChild2.length, XContentType.JSON);
-
- BulkResponse bulkResponse = builder.get();
- assertThat(bulkResponse.getItems().length, equalTo(3));
- assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
- assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
- assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(true));
- assertThat(bulkResponse.getItems()[2].getFailure().getCause().getCause().getMessage(),
- equalTo("script_lang not supported [painless]"));
-
- client().admin().indices().prepareRefresh("test").get();
-
- SearchResponse searchResponse = client().prepareSearch("test")
- .setQuery(QueryBuilders.hasParentQuery("parent", QueryBuilders.matchAllQuery(), false))
- .get();
-
- assertSearchHits(searchResponse, "child1");
- }
/*
* Test for https://github.com/elastic/elasticsearch/issues/8365
diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java
index 711804153c..e711117fb6 100644
--- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java
+++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java
@@ -63,8 +63,6 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
-import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
-import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
@@ -824,27 +822,6 @@ public class IndexAliasesIT extends ESIntegTestCase {
}
}
- public void testAliasesFilterWithHasChildQuery() throws Exception {
- assertAcked(prepareCreate("my-index")
- .setSettings("index.mapping.single_type", false)
- .addMapping("parent")
- .addMapping("child", "_parent", "type=parent")
- );
- client().prepareIndex("my-index", "parent", "1").setSource("{}", XContentType.JSON).get();
- client().prepareIndex("my-index", "child", "2").setSource("{}", XContentType.JSON).setParent("1").get();
- refresh();
-
- assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", hasChildQuery("child", matchAllQuery(), ScoreMode.None)));
- assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("parent", matchAllQuery(), false)));
-
- SearchResponse response = client().prepareSearch("filter1").get();
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
- response = client().prepareSearch("filter2").get();
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).getId(), equalTo("2"));
- }
-
public void testAliasesWithBlocks() {
createIndex("test");
ensureGreen();
diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java
index a6e2431a15..ed15660c85 100644
--- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java
@@ -19,21 +19,13 @@
package org.elasticsearch.client.transport;
-import java.io.Closeable;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicReference;
-
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse;
import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction;
+import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
+import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
+import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -41,19 +33,40 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.ConnectionProfile;
import org.elasticsearch.transport.Transport;
+import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportInterceptor;
import org.elasticsearch.transport.TransportRequest;
+import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import org.hamcrest.CustomMatcher;
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static org.elasticsearch.test.transport.MockTransportService.createNewService;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.everyItem;
import static org.hamcrest.CoreMatchers.hasItem;
@@ -322,6 +335,157 @@ public class TransportClientNodesServiceTests extends ESTestCase {
}
}
+ public void testSniffNodesSamplerClosesConnections() throws Exception {
+ final TestThreadPool threadPool = new TestThreadPool("testSniffNodesSamplerClosesConnections");
+
+ Settings remoteSettings = Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "remote").build();
+ try (MockTransportService remoteService = createNewService(remoteSettings, Version.CURRENT, threadPool, null)) {
+ final MockHandler handler = new MockHandler(remoteService);
+ remoteService.registerRequestHandler(ClusterStateAction.NAME, ClusterStateRequest::new, ThreadPool.Names.SAME, handler);
+ remoteService.start();
+ remoteService.acceptIncomingRequests();
+
+ Settings clientSettings = Settings.builder()
+ .put(TransportClient.CLIENT_TRANSPORT_SNIFF.getKey(), true)
+ .put(TransportClient.CLIENT_TRANSPORT_PING_TIMEOUT.getKey(), TimeValue.timeValueSeconds(1))
+ .put(TransportClient.CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL.getKey(), TimeValue.timeValueSeconds(30))
+ .build();
+
+ try (MockTransportService clientService = createNewService(clientSettings, Version.CURRENT, threadPool, null)) {
+ final List<MockConnection> establishedConnections = new CopyOnWriteArrayList<>();
+ final List<MockConnection> reusedConnections = new CopyOnWriteArrayList<>();
+
+ clientService.addDelegate(remoteService, new MockTransportService.DelegateTransport(clientService.original()) {
+ @Override
+ public Connection openConnection(DiscoveryNode node, ConnectionProfile profile) throws IOException {
+ MockConnection connection = new MockConnection(super.openConnection(node, profile));
+ establishedConnections.add(connection);
+ return connection;
+ }
+
+ @Override
+ public Connection getConnection(DiscoveryNode node) {
+ MockConnection connection = new MockConnection(super.getConnection(node));
+ reusedConnections.add(connection);
+ return connection;
+ }
+ });
+
+ clientService.start();
+ clientService.acceptIncomingRequests();
+
+ try (TransportClientNodesService transportClientNodesService =
+ new TransportClientNodesService(clientSettings, clientService, threadPool, (a, b) -> {})) {
+ assertEquals(0, transportClientNodesService.connectedNodes().size());
+ assertEquals(0, establishedConnections.size());
+ assertEquals(0, reusedConnections.size());
+
+ transportClientNodesService.addTransportAddresses(remoteService.getLocalDiscoNode().getAddress());
+ assertEquals(1, transportClientNodesService.connectedNodes().size());
+ assertClosedConnections(establishedConnections, 1);
+
+ transportClientNodesService.doSample();
+ assertClosedConnections(establishedConnections, 2);
+ assertOpenConnections(reusedConnections, 1);
+
+ handler.blockRequest();
+ Thread thread = new Thread(transportClientNodesService::doSample);
+ thread.start();
+
+ assertBusy(() -> assertEquals(3, establishedConnections.size()));
+ assertFalse("Temporary ping connection must be opened", establishedConnections.get(2).isClosed());
+
+ handler.releaseRequest();
+ thread.join();
+
+ assertClosedConnections(establishedConnections, 3);
+ }
+ }
+ } finally {
+ terminate(threadPool);
+ }
+ }
+
+ private void assertClosedConnections(final List<MockConnection> connections, final int size) {
+ assertEquals("Expecting " + size + " closed connections but got " + connections.size(), size, connections.size());
+ connections.forEach(c -> assertConnection(c, true));
+ }
+
+ private void assertOpenConnections(final List<MockConnection> connections, final int size) {
+ assertEquals("Expecting " + size + " open connections but got " + connections.size(), size, connections.size());
+ connections.forEach(c -> assertConnection(c, false));
+ }
+
+ private static void assertConnection(final MockConnection connection, final boolean closed) {
+ assertEquals("Connection [" + connection + "] must be " + (closed ? "closed" : "open"), closed, connection.isClosed());
+ }
+
+ class MockConnection implements Transport.Connection {
+ private final AtomicBoolean closed = new AtomicBoolean(false);
+ private final Transport.Connection connection;
+
+ private MockConnection(Transport.Connection connection) {
+ this.connection = connection;
+ }
+
+ @Override
+ public DiscoveryNode getNode() {
+ return connection.getNode();
+ }
+
+ @Override
+ public Version getVersion() {
+ return connection.getVersion();
+ }
+
+ @Override
+ public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
+ throws IOException, TransportException {
+ connection.sendRequest(requestId, action, request, options);
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (closed.compareAndSet(false, true)) {
+ connection.close();
+ }
+ }
+
+ boolean isClosed() {
+ return closed.get();
+ }
+ }
+
+ class MockHandler implements TransportRequestHandler<ClusterStateRequest> {
+ private final AtomicBoolean block = new AtomicBoolean(false);
+ private final CountDownLatch release = new CountDownLatch(1);
+ private final MockTransportService transportService;
+
+ MockHandler(MockTransportService transportService) {
+ this.transportService = transportService;
+ }
+
+ @Override
+ public void messageReceived(ClusterStateRequest request, TransportChannel channel) throws Exception {
+ if (block.get()) {
+ release.await();
+ return;
+ }
+ DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().add(transportService.getLocalDiscoNode()).build();
+ ClusterState build = ClusterState.builder(ClusterName.DEFAULT).nodes(discoveryNodes).build();
+ channel.sendResponse(new ClusterStateResponse(ClusterName.DEFAULT, build, 0L));
+ }
+
+ void blockRequest() {
+ if (block.compareAndSet(false, true) == false) {
+ throw new AssertionError("Request handler is already marked as blocking");
+ }
+ }
+ void releaseRequest() {
+ release.countDown();
+ }
+ }
+
public static class TestRequest extends TransportRequest {
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
index 7d3ca04e5a..2143e5e67d 100644
--- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
@@ -33,7 +33,6 @@ import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
-import java.util.function.Predicate;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
import static org.hamcrest.Matchers.arrayContaining;
@@ -621,7 +620,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "test*", "-testXXX")),
equalTo(newHashSet("testYYX", "testXYY", "testYYY", "testXXY")));
- assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "+testXXX", "+testXXY", "+testYYY", "-testYYY")),
+ assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testXXX", "testXXY", "testYYY", "-testYYY")),
equalTo(newHashSet("testXXX", "testXXY", "testYYY", "-testYYY")));
assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testYYY", "testYYX", "testX*", "-testXXX")),
@@ -637,7 +636,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
equalTo(newHashSet("-testXYZ", "-testXZZ", "-testYYY")));
assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(state, IndicesOptions.lenientExpandOpen(),
- "+testXXX", "+testXXY", "+testXYY", "-testXXY")),
+ "testXXX", "testXXY", "testXYY", "-testXXY")),
equalTo(newHashSet("testXXX", "testXYY", "testXXY")));
indexNames = indexNameExpressionResolver.concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), "*", "-*");
@@ -817,7 +816,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
}
public void testIsPatternMatchingAllIndicesMatchingSingleExclusion() throws Exception {
- String[] indicesOrAliases = new String[]{"-index1", "+index1"};
+ String[] indicesOrAliases = new String[]{"-index1", "index1"};
String[] concreteIndices = new String[]{"index1", "index2", "index3"};
MetaData metaData = metaDataBuilder(concreteIndices);
assertThat(indexNameExpressionResolver.isPatternMatchingAllIndices(metaData, indicesOrAliases, concreteIndices), equalTo(true));
@@ -832,7 +831,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
}
public void testIsPatternMatchingAllIndicesMatchingTrailingWildcardAndExclusion() throws Exception {
- String[] indicesOrAliases = new String[]{"index*", "-index1", "+index1"};
+ String[] indicesOrAliases = new String[]{"index*", "-index1", "index1"};
String[] concreteIndices = new String[]{"index1", "index2", "index3"};
MetaData metaData = metaDataBuilder(concreteIndices);
assertThat(indexNameExpressionResolver.isPatternMatchingAllIndices(metaData, indicesOrAliases, concreteIndices), equalTo(true));
@@ -970,4 +969,22 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
Arrays.sort(strings);
assertArrayEquals(new String[] {"test-alias-0", "test-alias-1", "test-alias-non-filtering"}, strings);
}
+
+ public void testConcreteIndicesForDeprecatedPattern() {
+ MetaData.Builder mdBuilder = MetaData.builder()
+ .put(indexBuilder("testXXX").state(State.OPEN))
+ .put(indexBuilder("testXXY").state(State.OPEN))
+ .put(indexBuilder("testYYY").state(State.OPEN));
+ ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build();
+
+ IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state,
+ IndicesOptions.fromOptions(true, true, true, true));
+ assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "+testX*")),
+ equalTo(newHashSet("testXXX", "testXXY")));
+ assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "+testXXX", "+testXXY", "+testYYY", "-testYYY")),
+ equalTo(newHashSet("testXXX", "testXXY", "testYYY")));
+ assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "+testXX*", "+testY*")),
+ equalTo(newHashSet("testXXX", "testXXY", "testYYY")));
+ assertWarnings("support for '+' as part of index expressions is deprecated");
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java
index bac9a68134..2778525f7d 100644
--- a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java
@@ -49,9 +49,9 @@ public class WildcardExpressionResolverTests extends ESTestCase {
assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testX*", "kuku"))), equalTo(newHashSet("testXXX", "testXYY", "kuku")));
assertThat(newHashSet(resolver.resolve(context, Arrays.asList("*"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")));
assertThat(newHashSet(resolver.resolve(context, Arrays.asList("*", "-kuku"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY")));
- assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testXXX", "+testYYY"))), equalTo(newHashSet("testXXX", "testYYY")));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), equalTo(newHashSet("testXXX", "testYYY")));
assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), equalTo(newHashSet("testXXX", "-testXXX")));
- assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testXXX", "+testY*"))), equalTo(newHashSet("testXXX", "testYYY")));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testXXX", "testY*"))), equalTo(newHashSet("testXXX", "testYYY")));
assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), equalTo(newHashSet("testXXX")));
}
@@ -67,9 +67,9 @@ public class WildcardExpressionResolverTests extends ESTestCase {
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen());
assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testYY*", "alias*"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY")));
assertThat(newHashSet(resolver.resolve(context, Arrays.asList("-kuku"))), equalTo(newHashSet("-kuku")));
- assertThat(newHashSet(resolver.resolve(context, Arrays.asList("+test*", "-testYYY"))), equalTo(newHashSet("testXXX", "testXYY")));
- assertThat(newHashSet(resolver.resolve(context, Arrays.asList("+testX*", "+testYYY"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY")));
- assertThat(newHashSet(resolver.resolve(context, Arrays.asList("+testYYY", "+testX*"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY")));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("test*", "-testYYY"))), equalTo(newHashSet("testXXX", "testXYY")));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testX*", "testYYY"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY")));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testYYY", "testX*"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY")));
}
public void testConvertWildcardsOpenClosedIndicesTests() {
@@ -129,4 +129,19 @@ public class WildcardExpressionResolverTests extends ESTestCase {
return IndexMetaData.builder(index).settings(settings(Version.CURRENT).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0));
}
+ public void testForDeprecatedPlusPattern() {
+ MetaData.Builder mdBuilder = MetaData.builder()
+ .put(indexBuilder("testXXX").state(IndexMetaData.State.OPEN))
+ .put(indexBuilder("testXYY").state(IndexMetaData.State.OPEN))
+ .put(indexBuilder("testYYY").state(IndexMetaData.State.OPEN));
+ ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build();
+ IndexNameExpressionResolver.WildcardExpressionResolver resolver = new IndexNameExpressionResolver.WildcardExpressionResolver();
+
+ IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, true, true, true));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("+testX*", "-testYYY"))), equalTo(newHashSet("testXXX", "testXYY")));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("+testYYY", "+testXY*"))), equalTo(newHashSet("testYYY", "testXYY")));
+ assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testYYY", "+testXX*"))), equalTo(newHashSet("testXXX", "testYYY")));
+ assertWarnings("support for '+' as part of index expressions is deprecated");
+ }
+
}
diff --git a/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java
index d2a520c328..d1b86aeaa1 100644
--- a/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java
@@ -85,14 +85,6 @@ public class SingleNodeDiscoveryIT extends ESIntegTestCase {
final UnicastHostsProvider provider =
() -> Collections.singletonList(nodeTransport.getLocalNode());
final CountDownLatch latch = new CountDownLatch(1);
- final UnicastZenPing unicastZenPing =
- new UnicastZenPing(settings, threadPool, pingTransport, provider) {
- @Override
- protected void finishPingingRound(PingingRound pingingRound) {
- latch.countDown();
- super.finishPingingRound(pingingRound);
- }
- };
final DiscoveryNodes nodes = DiscoveryNodes.builder()
.add(nodeTransport.getLocalNode())
.add(pingTransport.getLocalNode())
@@ -100,7 +92,15 @@ public class SingleNodeDiscoveryIT extends ESIntegTestCase {
.build();
final ClusterName clusterName = new ClusterName(internalCluster().getClusterName());
final ClusterState state = ClusterState.builder(clusterName).nodes(nodes).build();
- unicastZenPing.start(() -> state);
+ final UnicastZenPing unicastZenPing =
+ new UnicastZenPing(settings, threadPool, pingTransport, provider, () -> state) {
+ @Override
+ protected void finishPingingRound(PingingRound pingingRound) {
+ latch.countDown();
+ super.finishPingingRound(pingingRound);
+ }
+ };
+ unicastZenPing.start();
closeables.push(unicastZenPing);
final CompletableFuture<ZenPing.PingCollection> responses = new CompletableFuture<>();
unicastZenPing.ping(responses::complete, TimeValue.timeValueSeconds(3));
diff --git a/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java
index 917894b60d..135692fd64 100644
--- a/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java
@@ -21,9 +21,12 @@ package org.elasticsearch.discovery.single;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Version;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
-import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.cluster.service.ClusterApplier;
+import org.elasticsearch.cluster.service.MasterService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransportService;
@@ -32,8 +35,10 @@ import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.util.Stack;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Supplier;
-import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
+import static org.elasticsearch.test.ClusterServiceUtils.createMasterService;
import static org.hamcrest.Matchers.equalTo;
public class SingleNodeDiscoveryTests extends ESTestCase {
@@ -49,13 +54,26 @@ public class SingleNodeDiscoveryTests extends ESTestCase {
stack.push(transportService);
transportService.start();
final DiscoveryNode node = transportService.getLocalNode();
- final ClusterService clusterService = createClusterService(threadPool, node);
- stack.push(clusterService);
+ final MasterService masterService = createMasterService(threadPool, node);
+ AtomicReference<ClusterState> clusterState = new AtomicReference<>();
final SingleNodeDiscovery discovery =
new SingleNodeDiscovery(Settings.EMPTY, transportService,
- clusterService.getClusterApplierService());
+ masterService, new ClusterApplier() {
+ @Override
+ public void setInitialState(ClusterState initialState) {
+ clusterState.set(initialState);
+ }
+
+ @Override
+ public void onNewClusterState(String source, Supplier<ClusterState> clusterStateSupplier,
+ ClusterStateTaskListener listener) {
+ clusterState.set(clusterStateSupplier.get());
+ listener.clusterStateProcessed(source, clusterState.get(), clusterState.get());
+ }
+ });
+ discovery.start();
discovery.startInitialJoin();
- final DiscoveryNodes nodes = discovery.getInitialClusterState().nodes();
+ final DiscoveryNodes nodes = clusterState.get().nodes();
assertThat(nodes.getSize(), equalTo(1));
assertThat(nodes.getMasterNode().getId(), equalTo(node.getId()));
} finally {
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
index fa53f94f42..6aa47d27bb 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
@@ -188,40 +188,40 @@ public class UnicastZenPingTests extends ESTestCase {
.build();
Settings hostsSettingsMismatch = Settings.builder().put(hostsSettings).put(settingsMismatch).build();
- TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER);
ClusterState stateA = ClusterState.builder(state)
.blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK))
.nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A"))
.build();
- zenPingA.start(() -> stateA);
+ TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA);
+ zenPingA.start();
closeables.push(zenPingA);
- TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER);
ClusterState stateB = ClusterState.builder(state)
.nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B"))
.build();
- zenPingB.start(() -> stateB);
+ TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB);
+ zenPingB.start();
closeables.push(zenPingB);
+ ClusterState stateC = ClusterState.builder(stateMismatch)
+ .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C"))
+ .build();
TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC,
- EMPTY_HOSTS_PROVIDER) {
+ EMPTY_HOSTS_PROVIDER, () -> stateC) {
@Override
protected Version getVersion() {
return versionD;
}
};
- ClusterState stateC = ClusterState.builder(stateMismatch)
- .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C"))
- .build();
- zenPingC.start(() -> stateC);
+ zenPingC.start();
closeables.push(zenPingC);
- TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD,
- EMPTY_HOSTS_PROVIDER);
ClusterState stateD = ClusterState.builder(stateMismatch)
.nodes(DiscoveryNodes.builder().add(handleD.node).localNodeId("UZP_D"))
.build();
- zenPingD.start(() -> stateD);
+ TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD,
+ EMPTY_HOSTS_PROVIDER, () -> stateD);
+ zenPingD.start();
closeables.push(zenPingD);
logger.info("ping from UZP_A");
@@ -311,26 +311,26 @@ public class UnicastZenPingTests extends ESTestCase {
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build();
- final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER);
ClusterState stateA = ClusterState.builder(state)
.blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK))
.nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A"))
.build();
- zenPingA.start(() -> stateA);
+ final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA);
+ zenPingA.start();
closeables.push(zenPingA);
- TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER);
ClusterState stateB = ClusterState.builder(state)
.nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B"))
.build();
- zenPingB.start(() -> stateB);
+ TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB);
+ zenPingB.start();
closeables.push(zenPingB);
- TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, EMPTY_HOSTS_PROVIDER);
ClusterState stateC = ClusterState.builder(state)
.nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C"))
.build();
- zenPingC.start(() -> stateC);
+ TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, EMPTY_HOSTS_PROVIDER, () -> stateC);
+ zenPingC.start();
closeables.push(zenPingC);
// the presence of an unresolvable host should not prevent resolvable hosts from being pinged
@@ -604,24 +604,24 @@ public class UnicastZenPingTests extends ESTestCase {
// install a listener to check that no new connections are made
handleA.transportService.addConnectionListener(new TransportConnectionListener() {
@Override
- public void onConnectionOpened(DiscoveryNode node) {
- fail("should not open any connections. got [" + node + "]");
+ public void onConnectionOpened(Transport.Connection connection) {
+ fail("should not open any connections. got [" + connection.getNode() + "]");
}
});
- final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER);
final ClusterState stateA = ClusterState.builder(state)
.blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK))
.nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A"))
.build();
- zenPingA.start(() -> stateA);
+ final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA);
+ zenPingA.start();
closeables.push(zenPingA);
- TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER);
final ClusterState stateB = ClusterState.builder(state)
.nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B"))
.build();
- zenPingB.start(() -> stateB);
+ TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB);
+ zenPingB.start();
closeables.push(zenPingB);
Collection<ZenPing.PingResponse> pingResponses = zenPingA.pingAndWait().toList();
@@ -660,15 +660,15 @@ public class UnicastZenPingTests extends ESTestCase {
.blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK))
.nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A")).build();
- final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER);
- zenPingA.start(() -> stateA);
+ final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA);
+ zenPingA.start();
closeables.push(zenPingA);
// Node B doesn't know about A!
final ClusterState stateB = ClusterState.builder(state).nodes(
DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")).build();
- TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER);
- zenPingB.start(() -> stateB);
+ TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB);
+ zenPingB.start();
closeables.push(zenPingB);
{
@@ -796,9 +796,9 @@ public class UnicastZenPingTests extends ESTestCase {
private static class TestUnicastZenPing extends UnicastZenPing {
TestUnicastZenPing(Settings settings, ThreadPool threadPool, NetworkHandle networkHandle,
- UnicastHostsProvider unicastHostsProvider) {
+ UnicastHostsProvider unicastHostsProvider, PingContextProvider contextProvider) {
super(Settings.builder().put("node.name", networkHandle.node.getName()).put(settings).build(),
- threadPool, networkHandle.transportService, unicastHostsProvider);
+ threadPool, networkHandle.transportService, unicastHostsProvider, contextProvider);
}
volatile CountDownLatch allTasksCompleted;
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java
index d0c138954a..b961b6d6fb 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java
@@ -26,6 +26,8 @@ import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.ClusterStateTaskListener;
+import org.elasticsearch.cluster.ESAllocationTestCase;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -37,6 +39,7 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
+import org.elasticsearch.cluster.service.ClusterApplier;
import org.elasticsearch.cluster.service.MasterService;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.ClusterSettings;
@@ -67,7 +70,7 @@ import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import static java.util.Collections.emptyMap;
@@ -195,7 +198,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
ZenDiscovery masterZen = buildZenDiscovery(
settingsWithClusterName,
masterTransport, masterMasterService, threadPool);
- masterZen.setState(state);
+ masterZen.setCommittedState(state);
toClose.addFirst(masterZen);
masterTransport.acceptIncomingRequests();
@@ -209,7 +212,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
MasterService otherMasterService = ClusterServiceUtils.createMasterService(threadPool, otherNode);
toClose.addFirst(otherMasterService);
ZenDiscovery otherZen = buildZenDiscovery(settingsWithClusterName, otherTransport, otherMasterService, threadPool);
- otherZen.setState(otherState);
+ otherZen.setCommittedState(otherState);
toClose.addFirst(otherZen);
otherTransport.acceptIncomingRequests();
@@ -262,7 +265,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
toClose.addFirst(masterMasterService);
state = ClusterState.builder(discoveryState(masterMasterService).getClusterName()).nodes(state.nodes()).build();
ZenDiscovery masterZen = buildZenDiscovery(settings, masterTransport, masterMasterService, threadPool);
- masterZen.setState(state);
+ masterZen.setCommittedState(state);
toClose.addFirst(masterZen);
masterTransport.acceptIncomingRequests();
@@ -297,9 +300,19 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
private ZenDiscovery buildZenDiscovery(Settings settings, TransportService service, MasterService masterService,
ThreadPool threadPool) {
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
+ ClusterApplier clusterApplier = new ClusterApplier() {
+ @Override
+ public void setInitialState(ClusterState initialState) {
+
+ }
+
+ @Override
+ public void onNewClusterState(String source, Supplier<ClusterState> clusterStateSupplier, ClusterStateTaskListener listener) {
+ listener.clusterStateProcessed(source, clusterStateSupplier.get(), clusterStateSupplier.get());
+ }
+ };
ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, service, new NamedWriteableRegistry(ClusterModule.getNamedWriteables()),
- masterService, (source, clusterStateSupplier, listener) -> listener.clusterStateProcessed(source, clusterStateSupplier.get(), clusterStateSupplier.get()),
- clusterSettings, Collections::emptyList, null);
+ masterService, clusterApplier, clusterSettings, Collections::emptyList, ESAllocationTestCase.createAllocationService());
zenDiscovery.start();
return zenDiscovery;
}
diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java
index 2bec3d5ede..3e4a3dce09 100644
--- a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java
@@ -24,7 +24,6 @@ import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.NoopDiscovery;
import org.hamcrest.Matchers;
import java.io.IOException;
@@ -36,7 +35,7 @@ public class GatewayServiceTests extends ESTestCase {
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS),
null);
return new GatewayService(settings.build(),
- null, clusterService, null, null, null, new NoopDiscovery(), null);
+ null, clusterService, null, null, null, null);
}
public void testDefaultRecoverAfterTime() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java
index cd6622368c..9e199f71ce 100644
--- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java
@@ -153,24 +153,6 @@ public class InnerHitBuilderTests extends ESTestCase {
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
}
- public void testInlineLeafInnerHitsHasChildQuery() throws Exception {
- InnerHitBuilder leafInnerHits = randomInnerHits();
- HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder("type", new MatchAllQueryBuilder(), ScoreMode.None)
- .innerHit(leafInnerHits, false);
- Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
- hasChildQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
- assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
- }
-
- public void testInlineLeafInnerHitsHasParentQuery() throws Exception {
- InnerHitBuilder leafInnerHits = randomInnerHits();
- HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder("type", new MatchAllQueryBuilder(), false)
- .innerHit(leafInnerHits, false);
- Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
- hasParentQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
- assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
- }
-
public void testInlineLeafInnerHitsNestedQueryViaBoolQuery() {
InnerHitBuilder leafInnerHits = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
@@ -181,25 +163,6 @@ public class InnerHitBuilderTests extends ESTestCase {
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
}
- public void testInlineLeafInnerHitsNestedQueryViaDisMaxQuery() {
- InnerHitBuilder leafInnerHits1 = randomInnerHits();
- NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
- .innerHit(leafInnerHits1, false);
-
- InnerHitBuilder leafInnerHits2 = randomInnerHits();
- HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder("type", new MatchAllQueryBuilder(), ScoreMode.None)
- .innerHit(leafInnerHits2, false);
-
- DisMaxQueryBuilder disMaxQueryBuilder = new DisMaxQueryBuilder();
- disMaxQueryBuilder.add(nestedQueryBuilder);
- disMaxQueryBuilder.add(hasChildQueryBuilder);
- Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
- disMaxQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
- assertThat(innerHitBuilders.size(), equalTo(2));
- assertThat(innerHitBuilders.get(leafInnerHits1.getName()), notNullValue());
- assertThat(innerHitBuilders.get(leafInnerHits2.getName()), notNullValue());
- }
-
public void testInlineLeafInnerHitsNestedQueryViaConstantScoreQuery() {
InnerHitBuilder leafInnerHits = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
@@ -252,43 +215,6 @@ public class InnerHitBuilderTests extends ESTestCase {
assertThat(innerHitsContext.getInnerHits().size(), equalTo(0));
}
- public void testBuild_ignoreUnmappedHasChildQuery() throws Exception {
- QueryShardContext queryShardContext = mock(QueryShardContext.class);
- when(queryShardContext.documentMapper("type")).thenReturn(null);
- SearchContext searchContext = mock(SearchContext.class);
- when(searchContext.getQueryShardContext()).thenReturn(queryShardContext);
-
- InnerHitBuilder leafInnerHits = randomInnerHits();
- HasChildQueryBuilder query1 = new HasChildQueryBuilder("type", new MatchAllQueryBuilder(), ScoreMode.None)
- .innerHit(leafInnerHits, false);
- expectThrows(IllegalStateException.class, () -> query1.innerHit().build(searchContext, new InnerHitsContext()));
-
- HasChildQueryBuilder query2 = new HasChildQueryBuilder("type", new MatchAllQueryBuilder(), ScoreMode.None)
- .innerHit(leafInnerHits, true);
- InnerHitsContext innerHitsContext = new InnerHitsContext();
- query2.innerHit().build(searchContext, innerHitsContext);
- assertThat(innerHitsContext.getInnerHits().size(), equalTo(0));
- }
-
- public void testBuild_ingoreUnmappedHasParentQuery() throws Exception {
- QueryShardContext queryShardContext = mock(QueryShardContext.class);
- when(queryShardContext.documentMapper("type")).thenReturn(null);
- SearchContext searchContext = mock(SearchContext.class);
- when(searchContext.getQueryShardContext()).thenReturn(queryShardContext);
-
- InnerHitBuilder leafInnerHits = randomInnerHits();
- HasParentQueryBuilder query1 = new HasParentQueryBuilder("type", new MatchAllQueryBuilder(), false)
- .innerHit(leafInnerHits, false);
- expectThrows(IllegalStateException.class, () -> query1.innerHit().build(searchContext, new InnerHitsContext()));
-
- HasParentQueryBuilder query2 = new HasParentQueryBuilder("type", new MatchAllQueryBuilder(), false)
- .innerHit(leafInnerHits, true);
- InnerHitsContext innerHitsContext = new InnerHitsContext();
- query2.innerHit().build(searchContext, innerHitsContext);
- assertThat(innerHitsContext.getInnerHits().size(), equalTo(0));
- }
-
-
public static InnerHitBuilder randomInnerHits() {
return randomInnerHits(true, true);
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
index 267963878e..ed4fbcd53c 100644
--- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
@@ -61,7 +61,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
}
/**
- * @return a {@link HasChildQueryBuilder} with random values all over the place
+ * @return a {@link NestedQueryBuilder} with random values all over the place
*/
@Override
protected NestedQueryBuilder doCreateTestQueryBuilder() {
@@ -203,4 +203,46 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
+
+ public void testMinFromString() {
+ assertThat("fromString(min) != MIN", ScoreMode.Min, equalTo(NestedQueryBuilder.parseScoreMode("min")));
+ assertThat("min", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Min)));
+ }
+
+ public void testMaxFromString() {
+ assertThat("fromString(max) != MAX", ScoreMode.Max, equalTo(NestedQueryBuilder.parseScoreMode("max")));
+ assertThat("max", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Max)));
+ }
+
+ public void testAvgFromString() {
+ assertThat("fromString(avg) != AVG", ScoreMode.Avg, equalTo(NestedQueryBuilder.parseScoreMode("avg")));
+ assertThat("avg", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Avg)));
+ }
+
+ public void testSumFromString() {
+ assertThat("fromString(total) != SUM", ScoreMode.Total, equalTo(NestedQueryBuilder.parseScoreMode("sum")));
+ assertThat("sum", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Total)));
+ }
+
+ public void testNoneFromString() {
+ assertThat("fromString(none) != NONE", ScoreMode.None, equalTo(NestedQueryBuilder.parseScoreMode("none")));
+ assertThat("none", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.None)));
+ }
+
+ /**
+ * Should throw {@link IllegalArgumentException} instead of NPE.
+ */
+ public void testThatNullFromStringThrowsException() {
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> NestedQueryBuilder.parseScoreMode(null));
+ assertEquals("No score mode for child query [null] found", e.getMessage());
+ }
+
+ /**
+ * Failure should not change (and the value should never match anything...).
+ */
+ public void testThatUnrecognizedFromStringThrowsException() {
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+ () -> NestedQueryBuilder.parseScoreMode("unrecognized value"));
+ assertEquals("No score mode for child query [unrecognized value] found", e.getMessage());
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java
index 5db397ab16..bf23da1868 100644
--- a/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java
+++ b/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java
@@ -150,38 +150,24 @@ public class InternalSettingsPreparerTests extends ESTestCase {
}
}
- public void testMultipleSettingsFileNotAllowed() throws IOException {
- InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.yaml");
- InputStream json = getClass().getResourceAsStream("/config/elasticsearch.json");
+ public void testYamlNotAllowed() throws IOException {
+ InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.yml");
Path config = homeDir.resolve("config");
Files.createDirectory(config);
Files.copy(yaml, config.resolve("elasticsearch.yaml"));
- Files.copy(json, config.resolve("elasticsearch.json"));
-
SettingsException e = expectThrows(SettingsException.class, () ->
- InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null)
- );
- assertTrue(e.getMessage(), e.getMessage().contains("multiple settings files found with suffixes"));
- assertTrue(e.getMessage(), e.getMessage().contains(".yaml"));
- assertTrue(e.getMessage(), e.getMessage().contains(".json"));
- }
-
- public void testYmlExtension() throws IOException {
- InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.yaml");
- Path config = homeDir.resolve("config");
- Files.createDirectory(config);
- Files.copy(yaml, config.resolve("elasticsearch.yml"));
- Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null);
- assertEquals(".yml", env.configExtension());
+ InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null));
+ assertEquals("elasticsearch.yaml was deprecated in 5.5.0 and must be renamed to elasticsearch.yml", e.getMessage());
}
- public void testJsonExtension() throws IOException {
+ public void testJsonNotAllowed() throws IOException {
InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.json");
Path config = homeDir.resolve("config");
Files.createDirectory(config);
Files.copy(yaml, config.resolve("elasticsearch.json"));
- Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null);
- assertEquals(".json", env.configExtension());
+ SettingsException e = expectThrows(SettingsException.class, () ->
+ InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null));
+ assertEquals("elasticsearch.json was deprecated in 5.5.0 and must be converted to elasticsearch.yml", e.getMessage());
}
public void testSecureSettings() {
diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
index 8514096b83..96767c99b9 100644
--- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
@@ -270,8 +270,6 @@ public class SearchModuleTests extends ModuleTestCase {
"geo_distance",
"geo_polygon",
"geo_shape",
- "has_child",
- "has_parent",
"ids",
"match",
"match_all",
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java
index 4cfc87c171..a70c3c2d1b 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java
@@ -26,20 +26,18 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.action.search.RestSearchAction;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
-import org.elasticsearch.search.aggregations.bucket.children.InternalChildrenTests;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilterTests;
-import org.elasticsearch.search.aggregations.bucket.global.InternalGlobalTests;
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGridTests;
+import org.elasticsearch.search.aggregations.bucket.global.InternalGlobalTests;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogramTests;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogramTests;
import org.elasticsearch.search.aggregations.bucket.missing.InternalMissingTests;
import org.elasticsearch.search.aggregations.bucket.nested.InternalNestedTests;
import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNestedTests;
-import org.elasticsearch.search.aggregations.bucket.sampler.InternalSamplerTests;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTests;
import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRangeTests;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistanceTests;
+import org.elasticsearch.search.aggregations.bucket.sampler.InternalSamplerTests;
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTermsTests;
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsTests;
import org.elasticsearch.search.aggregations.bucket.terms.StringTermsTests;
@@ -117,7 +115,8 @@ public class AggregationsTests extends ESTestCase {
aggsTests.add(new InternalMissingTests());
aggsTests.add(new InternalNestedTests());
aggsTests.add(new InternalReverseNestedTests());
- aggsTests.add(new InternalChildrenTests());
+ // TODO can we find a way to include the children aggregation in this test?
+ //aggsTests.add(new InternalChildrenTests());
aggsTests.add(new InternalGlobalTests());
aggsTests.add(new InternalFilterTests());
aggsTests.add(new InternalSamplerTests());
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java
index 8f888e13af..d3c092ab65 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java
@@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
+import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java
index 392f88b4d4..cb11574f86 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java
@@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.global;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
+import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java
index 75a28e87ce..71df2af079 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java
@@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.missing;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
+import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java
index f6299ebf7b..a43e0dd519 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java
@@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.nested;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
+import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java
index 08940fcd3a..ddfe10b27e 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java
@@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.nested;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
+import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java
index 0631908092..f6288a9712 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java
@@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.sampler;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
+import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@@ -48,4 +48,4 @@ public class InternalSamplerTests extends InternalSingleBucketAggregationTestCas
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedSampler.class;
}
-} \ No newline at end of file
+}
diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
index 6fbda92ba2..8eca505564 100644
--- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
@@ -55,8 +55,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
-import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
-import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.nestedQuery;
@@ -246,268 +244,6 @@ public class InnerHitsIT extends ESIntegTestCase {
}
}
- public void testSimpleParentChild() throws Exception {
- assertAcked(prepareCreate("articles")
- .setSettings("index.mapping.single_type", false)
- .addMapping("article", "title", "type=text")
- .addMapping("comment", "_parent", "type=article", "message", "type=text,fielddata=true")
- );
-
- List<IndexRequestBuilder> requests = new ArrayList<>();
- requests.add(client().prepareIndex("articles", "article", "1").setSource("title", "quick brown fox"));
- requests.add(client().prepareIndex("articles", "comment", "1").setParent("1").setSource("message", "fox eat quick"));
- requests.add(client().prepareIndex("articles", "comment", "2").setParent("1").setSource("message", "fox ate rabbit x y z"));
- requests.add(client().prepareIndex("articles", "comment", "3").setParent("1").setSource("message", "rabbit got away"));
- requests.add(client().prepareIndex("articles", "article", "2").setSource("title", "big gray elephant"));
- requests.add(client().prepareIndex("articles", "comment", "4").setParent("2").setSource("message", "elephant captured"));
- requests.add(client().prepareIndex("articles", "comment", "5").setParent("2").setSource("message", "mice squashed by elephant x"));
- requests.add(client().prepareIndex("articles", "comment", "6").setParent("2").setSource("message", "elephant scared by mice x y"));
- indexRandom(true, requests);
-
- SearchResponse response = client().prepareSearch("articles")
- .setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None)
- .innerHit(new InnerHitBuilder(), false))
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertSearchHit(response, 1, hasId("1"));
- assertThat(response.getHits().getAt(0).getShard(), notNullValue());
-
- assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
- SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.getTotalHits(), equalTo(2L));
-
- assertThat(innerHits.getAt(0).getId(), equalTo("1"));
- assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
- assertThat(innerHits.getAt(1).getId(), equalTo("2"));
- assertThat(innerHits.getAt(1).getType(), equalTo("comment"));
-
- response = client().prepareSearch("articles")
- .setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None)
- .innerHit(new InnerHitBuilder(), false))
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertSearchHit(response, 1, hasId("2"));
-
- assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
- innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.getTotalHits(), equalTo(3L));
-
- assertThat(innerHits.getAt(0).getId(), equalTo("4"));
- assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
- assertThat(innerHits.getAt(1).getId(), equalTo("5"));
- assertThat(innerHits.getAt(1).getType(), equalTo("comment"));
- assertThat(innerHits.getAt(2).getId(), equalTo("6"));
- assertThat(innerHits.getAt(2).getType(), equalTo("comment"));
-
- response = client().prepareSearch("articles")
- .setQuery(
- hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
- new InnerHitBuilder()
- .addDocValueField("message")
- .setHighlightBuilder(new HighlightBuilder().field("message"))
- .setExplain(true).setSize(1)
- .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5",
- Collections.emptyMap())),
- false)
- ).get();
- assertNoFailures(response);
- innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.getHits().length, equalTo(1));
- assertThat(innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
- assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(message:fox"));
- assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("eat"));
- assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
- }
-
- public void testRandomParentChild() throws Exception {
- assertAcked(prepareCreate("idx")
- .setSettings("index.mapping.single_type", false)
- .addMapping("parent")
- .addMapping("child1", "_parent", "type=parent")
- .addMapping("child2", "_parent", "type=parent")
- );
- int numDocs = scaledRandomIntBetween(5, 50);
- List<IndexRequestBuilder> requestBuilders = new ArrayList<>();
-
- int child1 = 0;
- int child2 = 0;
- int[] child1InnerObjects = new int[numDocs];
- int[] child2InnerObjects = new int[numDocs];
- for (int parent = 0; parent < numDocs; parent++) {
- String parentId = String.format(Locale.ENGLISH, "%03d", parent);
- requestBuilders.add(client().prepareIndex("idx", "parent", parentId).setSource("{}", XContentType.JSON));
-
- int numChildDocs = child1InnerObjects[parent] = scaledRandomIntBetween(1, numDocs);
- int limit = child1 + numChildDocs;
- for (; child1 < limit; child1++) {
- requestBuilders.add(client().prepareIndex("idx", "child1",
- String.format(Locale.ENGLISH, "%04d", child1)).setParent(parentId).setSource("{}", XContentType.JSON));
- }
- numChildDocs = child2InnerObjects[parent] = scaledRandomIntBetween(1, numDocs);
- limit = child2 + numChildDocs;
- for (; child2 < limit; child2++) {
- requestBuilders.add(client().prepareIndex("idx", "child2",
- String.format(Locale.ENGLISH, "%04d", child2)).setParent(parentId).setSource("{}", XContentType.JSON));
- }
- }
- indexRandom(true, requestBuilders);
-
- int size = randomIntBetween(0, numDocs);
- BoolQueryBuilder boolQuery = new BoolQueryBuilder();
- boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery(), ScoreMode.None)
- .innerHit(new InnerHitBuilder().setName("a")
- .addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size), false)));
- boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery(), ScoreMode.None)
- .innerHit(new InnerHitBuilder().setName("b")
- .addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size), false)));
- SearchResponse searchResponse = client().prepareSearch("idx")
- .setSize(numDocs)
- .setTypes("parent")
- .addSort("_uid", SortOrder.ASC)
- .setQuery(boolQuery)
- .get();
-
- assertNoFailures(searchResponse);
- assertHitCount(searchResponse, numDocs);
- assertThat(searchResponse.getHits().getHits().length, equalTo(numDocs));
-
- int offset1 = 0;
- int offset2 = 0;
- for (int parent = 0; parent < numDocs; parent++) {
- SearchHit searchHit = searchResponse.getHits().getAt(parent);
- assertThat(searchHit.getType(), equalTo("parent"));
- assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "%03d", parent)));
- assertThat(searchHit.getShard(), notNullValue());
-
- SearchHits inner = searchHit.getInnerHits().get("a");
- assertThat(inner.getTotalHits(), equalTo((long) child1InnerObjects[parent]));
- for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) {
- SearchHit innerHit = inner.getAt(child);
- assertThat(innerHit.getType(), equalTo("child1"));
- String childId = String.format(Locale.ENGLISH, "%04d", offset1 + child);
- assertThat(innerHit.getId(), equalTo(childId));
- assertThat(innerHit.getNestedIdentity(), nullValue());
- }
- offset1 += child1InnerObjects[parent];
-
- inner = searchHit.getInnerHits().get("b");
- assertThat(inner.getTotalHits(), equalTo((long) child2InnerObjects[parent]));
- for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) {
- SearchHit innerHit = inner.getAt(child);
- assertThat(innerHit.getType(), equalTo("child2"));
- String childId = String.format(Locale.ENGLISH, "%04d", offset2 + child);
- assertThat(innerHit.getId(), equalTo(childId));
- assertThat(innerHit.getNestedIdentity(), nullValue());
- }
- offset2 += child2InnerObjects[parent];
- }
- }
-
- public void testInnerHitsOnHasParent() throws Exception {
- assertAcked(prepareCreate("stack")
- .setSettings("index.mapping.single_type", false)
- .addMapping("question", "body", "type=text")
- .addMapping("answer", "_parent", "type=question", "body", "type=text")
- );
- List<IndexRequestBuilder> requests = new ArrayList<>();
- requests.add(client().prepareIndex("stack", "question", "1").setSource("body", "I'm using HTTPS + Basic authentication "
- + "to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?"));
- requests.add(client().prepareIndex("stack", "answer", "1").setParent("1").setSource("body",
- "install fail2ban and enable rules for apache"));
- requests.add(client().prepareIndex("stack", "question", "2").setSource("body",
- "I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?"));
- requests.add(client().prepareIndex("stack", "answer", "2").setParent("2").setSource("body",
- "Denyhosts protects only ssh; Fail2Ban protects all daemons."));
- indexRandom(true, requests);
-
- SearchResponse response = client().prepareSearch("stack")
- .setTypes("answer")
- .addSort("_uid", SortOrder.ASC)
- .setQuery(
- boolQuery()
- .must(matchQuery("body", "fail2ban"))
- .must(hasParentQuery("question", matchAllQuery(), false).innerHit(new InnerHitBuilder(), false))
- ).get();
- assertNoFailures(response);
- assertHitCount(response, 2);
-
- SearchHit searchHit = response.getHits().getAt(0);
- assertThat(searchHit.getId(), equalTo("1"));
- assertThat(searchHit.getType(), equalTo("answer"));
- assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
- assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("question"));
- assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1"));
-
- searchHit = response.getHits().getAt(1);
- assertThat(searchHit.getId(), equalTo("2"));
- assertThat(searchHit.getType(), equalTo("answer"));
- assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
- assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("question"));
- assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2"));
- }
-
- public void testParentChildMultipleLayers() throws Exception {
- assertAcked(prepareCreate("articles")
- .setSettings("index.mapping.single_type", false)
- .addMapping("article", "title", "type=text")
- .addMapping("comment", "_parent", "type=article", "message", "type=text")
- .addMapping("remark", "_parent", "type=comment", "message", "type=text")
- );
-
- List<IndexRequestBuilder> requests = new ArrayList<>();
- requests.add(client().prepareIndex("articles", "article", "1").setSource("title", "quick brown fox"));
- requests.add(client().prepareIndex("articles", "comment", "1").setParent("1").setSource("message", "fox eat quick"));
- requests.add(client().prepareIndex("articles", "remark", "1").setParent("1").setRouting("1").setSource("message", "good"));
- requests.add(client().prepareIndex("articles", "article", "2").setSource("title", "big gray elephant"));
- requests.add(client().prepareIndex("articles", "comment", "2").setParent("2").setSource("message", "elephant captured"));
- requests.add(client().prepareIndex("articles", "remark", "2").setParent("2").setRouting("2").setSource("message", "bad"));
- indexRandom(true, requests);
-
- SearchResponse response = client().prepareSearch("articles")
- .setQuery(hasChildQuery("comment",
- hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder(), false),
- ScoreMode.None).innerHit(new InnerHitBuilder(), false))
- .get();
-
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertSearchHit(response, 1, hasId("1"));
-
- assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
- SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.getTotalHits(), equalTo(1L));
- assertThat(innerHits.getAt(0).getId(), equalTo("1"));
- assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
-
- innerHits = innerHits.getAt(0).getInnerHits().get("remark");
- assertThat(innerHits.getTotalHits(), equalTo(1L));
- assertThat(innerHits.getAt(0).getId(), equalTo("1"));
- assertThat(innerHits.getAt(0).getType(), equalTo("remark"));
-
- response = client().prepareSearch("articles")
- .setQuery(hasChildQuery("comment",
- hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder(), false),
- ScoreMode.None).innerHit(new InnerHitBuilder(), false))
- .get();
-
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertSearchHit(response, 1, hasId("2"));
-
- assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
- innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.getTotalHits(), equalTo(1L));
- assertThat(innerHits.getAt(0).getId(), equalTo("2"));
- assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
-
- innerHits = innerHits.getAt(0).getInnerHits().get("remark");
- assertThat(innerHits.getTotalHits(), equalTo(1L));
- assertThat(innerHits.getAt(0).getId(), equalTo("2"));
- assertThat(innerHits.getAt(0).getType(), equalTo("remark"));
- }
-
public void testNestedMultipleLayers() throws Exception {
assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject()
.startObject("article").startObject("properties")
@@ -724,92 +460,6 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue());
}
- public void testRoyals() throws Exception {
- assertAcked(
- prepareCreate("royals")
- .setSettings("index.mapping.single_type", false)
- .addMapping("king")
- .addMapping("prince", "_parent", "type=king")
- .addMapping("duke", "_parent", "type=prince")
- .addMapping("earl", "_parent", "type=duke")
- .addMapping("baron", "_parent", "type=earl")
- );
-
- List<IndexRequestBuilder> requests = new ArrayList<>();
- requests.add(client().prepareIndex("royals", "king", "king").setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "prince", "prince").setParent("king").setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "duke", "duke").setParent("prince").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "earl", "earl1").setParent("duke").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "earl", "earl2").setParent("duke").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "earl", "earl3").setParent("duke").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "earl", "earl4").setParent("duke").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "baron", "baron1").setParent("earl1").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "baron", "baron2").setParent("earl2").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "baron", "baron3").setParent("earl3").setRouting("king")
- .setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("royals", "baron", "baron4").setParent("earl4").setRouting("king")
- .setSource("{}", XContentType.JSON));
- indexRandom(true, requests);
-
- SearchResponse response = client().prepareSearch("royals")
- .setTypes("duke")
- .setQuery(boolQuery()
- .filter(hasParentQuery("prince",
- hasParentQuery("king", matchAllQuery(), false).innerHit(new InnerHitBuilder().setName("kings"), false),
- false).innerHit(new InnerHitBuilder().setName("princes"), false)
- )
- .filter(hasChildQuery("earl",
- hasChildQuery("baron", matchAllQuery(), ScoreMode.None)
- .innerHit(new InnerHitBuilder().setName("barons"), false),
- ScoreMode.None).innerHit(new InnerHitBuilder()
- .addSort(SortBuilders.fieldSort("_uid").order(SortOrder.ASC))
- .setName("earls")
- .setSize(4), false)
- )
- )
- .get();
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).getId(), equalTo("duke"));
-
- SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls");
- assertThat(innerHits.getTotalHits(), equalTo(4L));
- assertThat(innerHits.getAt(0).getId(), equalTo("earl1"));
- assertThat(innerHits.getAt(1).getId(), equalTo("earl2"));
- assertThat(innerHits.getAt(2).getId(), equalTo("earl3"));
- assertThat(innerHits.getAt(3).getId(), equalTo("earl4"));
-
- SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons");
- assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
- assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1"));
-
- innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons");
- assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
- assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2"));
-
- innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons");
- assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
- assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3"));
-
- innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons");
- assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
- assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4"));
-
- innerHits = response.getHits().getAt(0).getInnerHits().get("princes");
- assertThat(innerHits.getTotalHits(), equalTo(1L));
- assertThat(innerHits.getAt(0).getId(), equalTo("prince"));
-
- innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings");
- assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
- assertThat(innerInnerHits.getAt(0).getId(), equalTo("king"));
- }
-
public void testMatchesQueriesNestedInnerHits() throws Exception {
XContentBuilder builder = jsonBuilder().startObject()
.startObject("type1")
@@ -914,84 +564,6 @@ public class InnerHitsIT extends ESIntegTestCase {
}
}
- public void testMatchesQueriesParentChildInnerHits() throws Exception {
- assertAcked(prepareCreate("index")
- .setSettings("index.mapping.single_type", false)
- .addMapping("child", "_parent", "type=parent"));
- List<IndexRequestBuilder> requests = new ArrayList<>();
- requests.add(client().prepareIndex("index", "parent", "1").setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("index", "child", "1").setParent("1").setSource("field", "value1"));
- requests.add(client().prepareIndex("index", "child", "2").setParent("1").setSource("field", "value2"));
- requests.add(client().prepareIndex("index", "parent", "2").setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("index", "child", "3").setParent("2").setSource("field", "value1"));
- indexRandom(true, requests);
-
- SearchResponse response = client().prepareSearch("index")
- .setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None)
- .innerHit(new InnerHitBuilder(), false))
- .addSort("_uid", SortOrder.ASC)
- .get();
- assertHitCount(response, 2);
- assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1L));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
-
- assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
- assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits(), equalTo(1L));
- assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
- assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
-
- QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None)
- .innerHit(new InnerHitBuilder(), false);
- response = client().prepareSearch("index")
- .setQuery(query)
- .addSort("_uid", SortOrder.ASC)
- .get();
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1L));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2"));
- }
-
- public void testDontExplode() throws Exception {
- assertAcked(prepareCreate("index1")
- .setSettings("index.mapping.single_type", false)
- .addMapping("child", "_parent", "type=parent"));
- List<IndexRequestBuilder> requests = new ArrayList<>();
- requests.add(client().prepareIndex("index1", "parent", "1").setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("index1", "child", "1").setParent("1").setSource("field", "value1"));
- indexRandom(true, requests);
-
- QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None)
- .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1), false);
- SearchResponse response = client().prepareSearch("index1")
- .setQuery(query)
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
-
- assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested"));
- client().prepareIndex("index2", "type", "1").setSource(jsonBuilder().startObject()
- .startArray("nested")
- .startObject()
- .field("field", "value1")
- .endObject()
- .endArray()
- .endObject())
- .setRefreshPolicy(IMMEDIATE)
- .get();
-
- query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
- .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1), false);
- response = client().prepareSearch("index2")
- .setQuery(query)
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
- }
-
public void testNestedSourceFiltering() throws Exception {
assertAcked(prepareCreate("index1").addMapping("message", "comments", "type=nested"));
client().prepareIndex("index1", "message", "1").setSource(jsonBuilder().startObject()
@@ -1021,25 +593,6 @@ public class InnerHitsIT extends ESIntegTestCase {
equalTo("fox ate rabbit x y z"));
}
- public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception {
- assertAcked(prepareCreate("test")
- .setSettings("index.mapping.single_type", false)
- .addMapping("child_type", "_parent", "type=parent_type", "nested_type", "type=nested"));
- client().prepareIndex("test", "parent_type", "1").setSource("key", "value").get();
- client().prepareIndex("test", "child_type", "2").setParent("1").setSource("nested_type", Collections.singletonMap("key", "value"))
- .get();
- refresh();
- SearchResponse response = client().prepareSearch("test")
- .setQuery(boolQuery().must(matchQuery("key", "value"))
- .should(hasChildQuery("child_type", nestedQuery("nested_type", matchAllQuery(), ScoreMode.None)
- .innerHit(new InnerHitBuilder(), false), ScoreMode.None).innerHit(new InnerHitBuilder(), false)))
- .get();
- assertHitCount(response, 1);
- SearchHit hit = response.getHits().getAt(0);
- assertThat(hit.getInnerHits().get("child_type").getAt(0).field("_parent").getValue(), equalTo("1"));
- assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue());
- }
-
public void testInnerHitsWithIgnoreUnmapped() throws Exception {
assertAcked(prepareCreate("index1")
.setSettings("index.mapping.single_type", false)
@@ -1062,17 +615,6 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 2);
assertSearchHits(response, "1", "3");
-
- response = client().prepareSearch("index1", "index2")
- .setQuery(boolQuery()
- .should(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
- .innerHit(new InnerHitBuilder(), true))
- .should(termQuery("key", "value"))
- )
- .get();
- assertNoFailures(response);
- assertHitCount(response, 2);
- assertSearchHits(response, "1", "3");
}
}
diff --git a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java
index 5b33fb1c8d..bcb9efd5b5 100644
--- a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java
+++ b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java
@@ -19,8 +19,6 @@
package org.elasticsearch.test;
import org.elasticsearch.cluster.ClusterChangedEvent;
-import org.elasticsearch.cluster.ClusterState;
-import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.component.LifecycleListener;
import org.elasticsearch.discovery.Discovery;
@@ -34,16 +32,6 @@ public class NoopDiscovery implements Discovery {
}
@Override
- public ClusterState getInitialClusterState() {
- return null;
- }
-
- @Override
- public ClusterState clusterState() {
- return null;
- }
-
- @Override
public DiscoveryStats stats() {
return null;
}
@@ -54,11 +42,6 @@ public class NoopDiscovery implements Discovery {
}
@Override
- public int getMinimumMasterNodes() {
- return -1;
- }
-
- @Override
public Lifecycle.State lifecycleState() {
return null;
}
diff --git a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java
index c14d6ec9e0..eb9e649652 100644
--- a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java
@@ -204,7 +204,7 @@ public class TCPTransportTests extends ESTestCase {
@Override
protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile) throws IOException {
- return new NodeChannels(node, new Object[profile.getNumConnections()], profile);
+ return new NodeChannels(node, new Object[profile.getNumConnections()], profile, c -> {});
}
@Override
@@ -220,7 +220,7 @@ public class TCPTransportTests extends ESTestCase {
@Override
public NodeChannels getConnection(DiscoveryNode node) {
return new NodeChannels(node, new Object[MockTcpTransport.LIGHT_PROFILE.getNumConnections()],
- MockTcpTransport.LIGHT_PROFILE);
+ MockTcpTransport.LIGHT_PROFILE, c -> {});
}
};
DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT);
diff --git a/core/src/test/resources/config/elasticsearch.properties b/core/src/test/resources/config/elasticsearch.properties
deleted file mode 100644
index d3f822cafb..0000000000
--- a/core/src/test/resources/config/elasticsearch.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-
-properties.config.exists: true
diff --git a/core/src/test/resources/config/elasticsearch.yaml b/core/src/test/resources/config/elasticsearch.yml
index b6ebc6bd10..b6ebc6bd10 100644
--- a/core/src/test/resources/config/elasticsearch.yaml
+++ b/core/src/test/resources/config/elasticsearch.yml
diff --git a/docs/java-api/aggregations/bucket/children-aggregation.asciidoc b/docs/java-api/aggregations/bucket/children-aggregation.asciidoc
index 1bf8a5b26e..f6a23fdafe 100644
--- a/docs/java-api/aggregations/bucket/children-aggregation.asciidoc
+++ b/docs/java-api/aggregations/bucket/children-aggregation.asciidoc
@@ -24,7 +24,7 @@ Import Aggregation definition classes:
[source,java]
--------------------------------------------------
-import org.elasticsearch.search.aggregations.bucket.children.Children;
+import org.elasticsearch.join.aggregations.Children;
--------------------------------------------------
[source,java]
diff --git a/docs/reference/setup/sysconfig/swap.asciidoc b/docs/reference/setup/sysconfig/swap.asciidoc
index 78ca7d40be..df9ad7baba 100644
--- a/docs/reference/setup/sysconfig/swap.asciidoc
+++ b/docs/reference/setup/sysconfig/swap.asciidoc
@@ -2,32 +2,64 @@
=== Disable swapping
Most operating systems try to use as much memory as possible for file system
-caches and eagerly swap out unused application memory. This can result in
-parts of the JVM heap being swapped out to disk.
+caches and eagerly swap out unused application memory. This can result in parts
+of the JVM heap or even its executable pages being swapped out to disk.
-Swapping is very bad for performance and for node stability and should be
-avoided at all costs. It can cause garbage collections to last for **minutes**
-instead of milliseconds and can cause nodes to respond slowly or even to
-disconnect from the cluster.
+Swapping is very bad for performance, for node stability, and should be avoided
+at all costs. It can cause garbage collections to last for **minutes** instead
+of milliseconds and can cause nodes to respond slowly or even to disconnect
+from the cluster. In a resilient distributed system, it's more effective to let
+the operating system kill the node.
-There are three approaches to disabling swapping:
+There are three approaches to disabling swapping. The preferred option is to
+completely disable swap. If this is not an option, whether or not to prefer
+minimizing swappiness versus memory locking is dependent on your environment.
+
+[[disable-swap-files]]
+==== Disable all swap files
+
+Usually Elasticsearch is the only service running on a box, and its memory usage
+is controlled by the JVM options. There should be no need to have swap enabled.
+
+On Linux systems, you can disable swap temporarily by running:
+
+[source,sh]
+--------------
+sudo swapoff -a
+--------------
+
+To disable it permanently, you will need to edit the `/etc/fstab` file and
+comment out any lines that contain the word `swap`.
+
+On Windows, the equivalent can be achieved by disabling the paging file entirely
+via `System Properties → Advanced → Performance → Advanced → Virtual memory`.
+
+[[swappiness]]
+==== Configure `swappiness`
+
+Another option available on Linux systems is to ensure that the sysctl value
+`vm.swappiness` is set to `1`. This reduces the kernel's tendency to swap and
+should not lead to swapping under normal circumstances, while still allowing the
+whole system to swap in emergency conditions.
[[mlockall]]
==== Enable `bootstrap.memory_lock`
-The first option is to use
-http://opengroup.org/onlinepubs/007908799/xsh/mlockall.html[mlockall] on Linux/Unix systems, or https://msdn.microsoft.com/en-us/library/windows/desktop/aa366895%28v=vs.85%29.aspx[VirtualLock] on Windows, to
-try to lock the process address space into RAM, preventing any Elasticsearch
-memory from being swapped out. This can be done, by adding this line
-to the `config/elasticsearch.yml` file:
+Another option is to use
+http://opengroup.org/onlinepubs/007908799/xsh/mlockall.html[mlockall] on
+Linux/Unix systems, or
+https://msdn.microsoft.com/en-us/library/windows/desktop/aa366895%28v=vs.85%29.aspx[VirtualLock]
+on Windows, to try to lock the process address space into RAM, preventing any
+Elasticsearch memory from being swapped out. This can be done, by adding this
+line to the `config/elasticsearch.yml` file:
[source,yaml]
--------------
bootstrap.memory_lock: true
--------------
-WARNING: `mlockall` might cause the JVM or shell session to exit if it tries
-to allocate more memory than is available!
+WARNING: `mlockall` might cause the JVM or shell session to exit if it tries to
+allocate more memory than is available!
After starting Elasticsearch, you can see whether this setting was applied
successfully by checking the value of `mlockall` in the output from this
@@ -40,11 +72,12 @@ GET _nodes?filter_path=**.mlockall
// CONSOLE
If you see that `mlockall` is `false`, then it means that the `mlockall`
-request has failed. You will also see a line with more information in the
-logs with the words `Unable to lock JVM Memory`.
+request has failed. You will also see a line with more information in the logs
+with the words `Unable to lock JVM Memory`.
The most probable reason, on Linux/Unix systems, is that the user running
-Elasticsearch doesn't have permission to lock memory. This can be granted as follows:
+Elasticsearch doesn't have permission to lock memory. This can be granted as
+follows:
`.zip` and `.tar.gz`::
@@ -55,13 +88,13 @@ Elasticsearch doesn't have permission to lock memory. This can be granted as fo
RPM and Debian::
Set `MAX_LOCKED_MEMORY` to `unlimited` in the
- <<sysconfig,system configuration file>> (or see below for systems using `systemd`).
+ <<sysconfig,system configuration file>> (or see below for systems using
+ `systemd`).
Systems using `systemd`::
Set `LimitMEMLOCK` to `infinity` in the <<systemd,systemd configuration>>.
-
Another possible reason why `mlockall` can fail is that the temporary directory
(usually `/tmp`) is mounted with the `noexec` option. This can be solved by
specifying a new temp directory using the `ES_JAVA_OPTS` environment variable:
@@ -73,26 +106,3 @@ export ES_JAVA_OPTS="$ES_JAVA_OPTS -Djava.io.tmpdir=/path/to/temp/dir"
--------------
or setting this JVM flag in the jvm.options configuration file.
-
-[[disable-swap-files]]
-==== Disable all swap files
-
-The second option is to completely disable swap. Usually Elasticsearch
-is the only service running on a box, and its memory usage is controlled
-by the JVM options. There should be no need to have swap enabled.
-
-On Linux systems, you can disable swap temporarily
-by running: `sudo swapoff -a`. To disable it permanently, you will need
-to edit the `/etc/fstab` file and comment out any lines that contain the
-word `swap`.
-
-On Windows, the equivalent can be achieved by disabling the paging file entirely
-via `System Properties → Advanced → Performance → Advanced → Virtual memory`.
-
-[[swappiness]]
-==== Configure `swappiness`
-
-Another option available on Linux systems is to ensure that the sysctl value
-`vm.swappiness` is set to `1`. This reduces the kernel's tendency to swap and
-should not lead to swapping under normal circumstances, while still allowing
-the whole system to swap in emergency conditions.
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java
index 3cf35c7611..8ecd4d99eb 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java
@@ -153,7 +153,7 @@ public final class ConvertProcessor extends AbstractProcessor {
if (oldValue instanceof List) {
List<?> list = (List<?>) oldValue;
- List<Object> newList = new ArrayList<>();
+ List<Object> newList = new ArrayList<>(list.size());
for (Object value : list) {
newList.add(convertType.convert(value));
}
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java
index 676cf2e741..e603413887 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java
@@ -54,7 +54,7 @@ public final class DateProcessor extends AbstractProcessor {
this.field = field;
this.targetField = targetField;
this.formats = formats;
- this.dateParsers = new ArrayList<>();
+ this.dateParsers = new ArrayList<>(this.formats.size());
for (String format : formats) {
DateFormat dateFormat = DateFormat.fromString(format);
dateParsers.add(dateFormat.getFunction(format, timezone, locale));
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
index dd1b51a515..ebd2503222 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
@@ -240,7 +240,7 @@ public final class Def {
}
// convert recipe string to a bitset for convenience (the code below should be refactored...)
- BitSet lambdaArgs = new BitSet();
+ BitSet lambdaArgs = new BitSet(recipeString.length());
for (int i = 0; i < recipeString.length(); i++) {
lambdaArgs.set(recipeString.charAt(i));
}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
index ad843f86af..748ff67ddd 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
@@ -113,7 +113,7 @@ public final class ELambda extends AExpression implements ILambda {
// we don't know anything: treat as def
returnType = Definition.DEF_TYPE;
// don't infer any types, replace any null types with def
- actualParamTypeStrs = new ArrayList<>();
+ actualParamTypeStrs = new ArrayList<>(paramTypeStrs.size());
for (String type : paramTypeStrs) {
if (type == null) {
actualParamTypeStrs.add("def");
@@ -139,7 +139,7 @@ public final class ELambda extends AExpression implements ILambda {
returnType = interfaceMethod.rtn;
}
// replace any null types with the actual type
- actualParamTypeStrs = new ArrayList<>();
+ actualParamTypeStrs = new ArrayList<>(paramTypeStrs.size());
for (int i = 0; i < paramTypeStrs.size(); i++) {
String paramType = paramTypeStrs.get(i);
if (paramType == null) {
@@ -162,8 +162,8 @@ public final class ELambda extends AExpression implements ILambda {
}
}
// prepend capture list to lambda's arguments
- List<String> paramTypes = new ArrayList<>();
- List<String> paramNames = new ArrayList<>();
+ List<String> paramTypes = new ArrayList<>(captures.size() + actualParamTypeStrs.size());
+ List<String> paramNames = new ArrayList<>(captures.size() + paramNameStrs.size());
for (Variable var : captures) {
paramTypes.add(var.type.name);
paramNames.add(var.name);
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
index d159834bd2..2d0734915a 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
@@ -19,6 +19,7 @@
package org.elasticsearch.painless.node;
+import java.util.Collections;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Globals;
@@ -97,10 +98,7 @@ final class PSubDefCall extends AExpression {
if (argument instanceof ILambda) {
ILambda lambda = (ILambda) argument;
-
- for (Type capture : lambda.getCaptures()) {
- parameterTypes.add(capture);
- }
+ Collections.addAll(parameterTypes, lambda.getCaptures());
}
argument.write(writer, globals);
diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle
new file mode 100644
index 0000000000..67bcc9d54e
--- /dev/null
+++ b/modules/parent-join/build.gradle
@@ -0,0 +1,24 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+esplugin {
+ description 'This module adds the support parent-child queries and aggregations'
+ classname 'org.elasticsearch.join.ParentJoinPlugin'
+ hasClientJar = true
+}
diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/ParentJoinPlugin.java b/modules/parent-join/src/main/java/org/elasticsearch/join/ParentJoinPlugin.java
new file mode 100644
index 0000000000..dec3950836
--- /dev/null
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/ParentJoinPlugin.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.join;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder;
+import org.elasticsearch.join.aggregations.InternalChildren;
+import org.elasticsearch.join.query.HasChildQueryBuilder;
+import org.elasticsearch.join.query.HasParentQueryBuilder;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.plugins.SearchPlugin;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+public class ParentJoinPlugin extends Plugin implements SearchPlugin {
+ public ParentJoinPlugin(Settings settings) {}
+
+ @Override
+ public List<QuerySpec<?>> getQueries() {
+ return Arrays.asList(
+ new QuerySpec<>(HasChildQueryBuilder.NAME, HasChildQueryBuilder::new, HasChildQueryBuilder::fromXContent),
+ new QuerySpec<>(HasParentQueryBuilder.NAME, HasParentQueryBuilder::new, HasParentQueryBuilder::fromXContent)
+ );
+ }
+
+ @Override
+ public List<AggregationSpec> getAggregations() {
+ return Collections.singletonList(
+ new AggregationSpec(ChildrenAggregationBuilder.NAME, ChildrenAggregationBuilder::new, ChildrenAggregationBuilder::parse)
+ .addResultReader(InternalChildren::new)
+ );
+ }
+
+
+}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/Children.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/Children.java
index b1e4b2877a..394c690709 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/Children.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/Children.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java
index 3a0d2fff98..d04b1f0a66 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorFactory.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregatorFactory.java
index b0a4c64305..800be74ba6 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorFactory.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregatorFactory.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
import org.apache.lucene.search.Query;
import org.elasticsearch.search.aggregations.Aggregator;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/InternalChildren.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/InternalChildren.java
index 05a38c8cd5..05cd40e3d3 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/InternalChildren.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/InternalChildren.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.InternalAggregations;
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/JoinAggregationBuilders.java
index b2fab7746f..73522a68b4 100644
--- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/JoinAggregationBuilders.java
@@ -17,17 +17,13 @@
* under the License.
*/
-package org.elasticsearch.bootstrap;
+package org.elasticsearch.join.aggregations;
-import org.elasticsearch.test.ESTestCase;
-
-public class BootstrapTests extends ESTestCase {
-
- public void testConfigDeprecation() {
- Bootstrap.checkConfigExtension(".json");
- assertWarnings("elasticsearch.json is deprecated; rename your configuration file to elasticsearch.yaml");
- Bootstrap.checkConfigExtension(".yml");
- assertWarnings("elasticsearch.yml is deprecated; rename your configuration file to elasticsearch.yaml");
- Bootstrap.checkConfigExtension(".yaml"); // no warnings, will be checked in @After
+public abstract class JoinAggregationBuilders {
+ /**
+ * Create a new {@link Children} aggregation with the given name.
+ */
+ public static ChildrenAggregationBuilder children(String name, String childType) {
+ return new ChildrenAggregationBuilder(name, childType);
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java
index 37a443e9ba..c1ffb097ab 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParsedChildren.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedChildren.java
index 9ce6661923..fa370325f7 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParsedChildren.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParsedChildren.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java
index 18ad7f9f31..494c5e498e 100644
--- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.index.query;
+package org.elasticsearch.join.query;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
@@ -38,6 +38,14 @@ import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
+import org.elasticsearch.index.query.AbstractQueryBuilder;
+import org.elasticsearch.index.query.InnerHitBuilder;
+import org.elasticsearch.index.query.NestedQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.index.query.QueryRewriteContext;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
import java.util.Locale;
@@ -210,7 +218,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
builder.field(QUERY_FIELD.getPreferredName());
query.toXContent(builder, params);
builder.field(TYPE_FIELD.getPreferredName(), type);
- builder.field(SCORE_MODE_FIELD.getPreferredName(), scoreModeAsString(scoreMode));
+ builder.field(SCORE_MODE_FIELD.getPreferredName(), NestedQueryBuilder.scoreModeAsString(scoreMode));
builder.field(MIN_CHILDREN_FIELD.getPreferredName(), minChildren);
builder.field(MAX_CHILDREN_FIELD.getPreferredName(), maxChildren);
builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
@@ -251,7 +259,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
if (TYPE_FIELD.match(currentFieldName)) {
childType = parser.text();
} else if (SCORE_MODE_FIELD.match(currentFieldName)) {
- scoreMode = parseScoreMode(parser.text());
+ scoreMode = NestedQueryBuilder.parseScoreMode(parser.text());
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) {
boost = parser.floatValue();
} else if (MIN_CHILDREN_FIELD.match(currentFieldName)) {
@@ -278,30 +286,6 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
return hasChildQueryBuilder;
}
- public static ScoreMode parseScoreMode(String scoreModeString) {
- if ("none".equals(scoreModeString)) {
- return ScoreMode.None;
- } else if ("min".equals(scoreModeString)) {
- return ScoreMode.Min;
- } else if ("max".equals(scoreModeString)) {
- return ScoreMode.Max;
- } else if ("avg".equals(scoreModeString)) {
- return ScoreMode.Avg;
- } else if ("sum".equals(scoreModeString)) {
- return ScoreMode.Total;
- }
- throw new IllegalArgumentException("No score mode for child query [" + scoreModeString + "] found");
- }
-
- public static String scoreModeAsString(ScoreMode scoreMode) {
- if (scoreMode == ScoreMode.Total) {
- // Lucene uses 'total' but 'sum' is more consistent with other elasticsearch APIs
- return "sum";
- } else {
- return scoreMode.name().toLowerCase(Locale.ROOT);
- }
- }
-
@Override
public String getWriteableName() {
return NAME;
diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java
index 63c9484691..ca0bfd623d 100644
--- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.index.query;
+package org.elasticsearch.join.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
@@ -33,6 +33,13 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
+import org.elasticsearch.index.query.AbstractQueryBuilder;
+import org.elasticsearch.index.query.InnerHitBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.index.query.QueryRewriteContext;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
import java.util.HashSet;
diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/JoinQueryBuilders.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/JoinQueryBuilders.java
new file mode 100644
index 0000000000..af778f400f
--- /dev/null
+++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/JoinQueryBuilders.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.join.query;
+
+import org.apache.lucene.search.join.ScoreMode;
+import org.elasticsearch.index.query.QueryBuilder;
+
+public abstract class JoinQueryBuilders {
+ /**
+ * Constructs a new has_child query, with the child type and the query to run on the child documents. The
+ * results of this query are the parent docs that those child docs matched.
+ *
+ * @param type The child type.
+ * @param query The query.
+ * @param scoreMode How the scores from the children hits should be aggregated into the parent hit.
+ */
+ public static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder query, ScoreMode scoreMode) {
+ return new HasChildQueryBuilder(type, query, scoreMode);
+ }
+
+ /**
+ * Constructs a new parent query, with the parent type and the query to run on the parent documents. The
+ * results of this query are the children docs that those parent docs matched.
+ *
+ * @param type The parent type.
+ * @param query The query.
+ * @param score Whether the score from the parent hit should propagate to the child hit
+ */
+ public static HasParentQueryBuilder hasParentQuery(String type, QueryBuilder query, boolean score) {
+ return new HasParentQueryBuilder(type, query, score);
+ }
+
+}
diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java
new file mode 100644
index 0000000000..666fa736d4
--- /dev/null
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.join;
+
+import com.carrotsearch.randomizedtesting.annotations.Name;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
+import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
+import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
+
+public class ParentChildClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
+ public ParentChildClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
+ super(testCandidate);
+ }
+
+ @ParametersFactory
+ public static Iterable<Object[]> parameters() throws Exception {
+ return createParameters();
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java
index bfe483ca89..8da6dbcdf6 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket;
+package org.elasticsearch.join.aggregations;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.index.IndexRequestBuilder;
@@ -26,27 +26,33 @@ import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.InternalAggregation;
-import org.elasticsearch.search.aggregations.bucket.children.Children;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
+import org.elasticsearch.test.ESIntegTestCase.Scope;
+import org.junit.Before;
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
-import static org.elasticsearch.search.aggregations.AggregationBuilders.children;
+import static org.elasticsearch.join.aggregations.JoinAggregationBuilders.children;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.search.aggregations.AggregationBuilders.topHits;
@@ -59,13 +65,28 @@ import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
-@ESIntegTestCase.SuiteScopeTestCase
+@ClusterScope(scope = Scope.SUITE)
public class ChildrenIT extends ESIntegTestCase {
-
private static final Map<String, Control> categoryToControl = new HashMap<>();
@Override
- public void setupSuiteScopeCluster() throws Exception {
+ protected boolean ignoreExternalCluster() {
+ return true;
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> nodePlugins() {
+ return Collections.singleton(ParentJoinPlugin.class);
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> transportClientPlugins() {
+ return nodePlugins();
+ }
+
+ @Before
+ public void setupCluster() throws Exception {
+ categoryToControl.clear();
assertAcked(
prepareCreate("test")
.setSettings("index.mapping.single_type", false)
@@ -95,7 +116,8 @@ public class ChildrenIT extends ESIntegTestCase {
control.articleIds.add(id);
}
- requests.add(client().prepareIndex("test", "article", id).setCreate(true).setSource("category", categories, "randomized", true));
+ requests.add(client()
+ .prepareIndex("test", "article", id).setCreate(true).setSource("category", categories, "randomized", true));
}
String[] commenters = new String[randomIntBetween(5, 50)];
@@ -116,17 +138,24 @@ public class ChildrenIT extends ESIntegTestCase {
control.commenterToCommentId.put(commenter, ids = new HashSet<>());
}
ids.add(idValue);
- requests.add(client().prepareIndex("test", "comment", idValue).setCreate(true).setParent(articleId).setSource("commenter", commenter));
+ requests.add(client().prepareIndex("test", "comment", idValue)
+ .setCreate(true).setParent(articleId).setSource("commenter", commenter));
}
}
}
- requests.add(client().prepareIndex("test", "article", "a").setSource("category", new String[]{"a"}, "randomized", false));
- requests.add(client().prepareIndex("test", "article", "b").setSource("category", new String[]{"a", "b"}, "randomized", false));
- requests.add(client().prepareIndex("test", "article", "c").setSource("category", new String[]{"a", "b", "c"}, "randomized", false));
- requests.add(client().prepareIndex("test", "article", "d").setSource("category", new String[]{"c"}, "randomized", false));
- requests.add(client().prepareIndex("test", "comment", "a").setParent("a").setSource("{}", XContentType.JSON));
- requests.add(client().prepareIndex("test", "comment", "c").setParent("c").setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("test", "article", "a")
+ .setSource("category", new String[]{"a"}, "randomized", false));
+ requests.add(client().prepareIndex("test", "article", "b")
+ .setSource("category", new String[]{"a", "b"}, "randomized", false));
+ requests.add(client().prepareIndex("test", "article", "c")
+ .setSource("category", new String[]{"a", "b", "c"}, "randomized", false));
+ requests.add(client().prepareIndex("test", "article", "d")
+ .setSource("category", new String[]{"c"}, "randomized", false));
+ requests.add(client().prepareIndex("test", "comment", "a")
+ .setParent("a").setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("test", "comment", "c")
+ .setParent("c").setSource("{}", XContentType.JSON));
indexRandom(true, requests);
ensureSearchable("test");
@@ -155,7 +184,8 @@ public class ChildrenIT extends ESIntegTestCase {
Children childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo((long) entry1.getValue().commentIds.size()));
- assertThat((long) ((InternalAggregation)childrenBucket).getProperty("_count"), equalTo((long) entry1.getValue().commentIds.size()));
+ assertThat((long) ((InternalAggregation)childrenBucket).getProperty("_count"),
+ equalTo((long) entry1.getValue().commentIds.size()));
Terms commentersTerms = childrenBucket.getAggregations().get("commenters");
assertThat((Terms) ((InternalAggregation)childrenBucket).getProperty("commenters"), sameInstance(commentersTerms));
@@ -283,7 +313,7 @@ public class ChildrenIT extends ESIntegTestCase {
public void testNonExistingChildType() throws Exception {
SearchResponse searchResponse = client().prepareSearch("test")
.addAggregation(
-children("non-existing", "xyz")
+ children("non-existing", "xyz")
).get();
assertSearchResponse(searchResponse);
@@ -304,7 +334,8 @@ children("non-existing", "xyz")
);
List<IndexRequestBuilder> requests = new ArrayList<>();
- requests.add(client().prepareIndex(indexName, masterType, "1").setSource("brand", "Levis", "name", "Style 501", "material", "Denim"));
+ requests.add(client().prepareIndex(indexName, masterType, "1")
+ .setSource("brand", "Levis", "name", "Style 501", "material", "Denim"));
requests.add(client().prepareIndex(indexName, childType, "0").setParent("1").setSource("color", "blue", "size", "32"));
requests.add(client().prepareIndex(indexName, childType, "1").setParent("1").setSource("color", "blue", "size", "34"));
requests.add(client().prepareIndex(indexName, childType, "2").setParent("1").setSource("color", "blue", "size", "36"));
@@ -312,7 +343,8 @@ children("non-existing", "xyz")
requests.add(client().prepareIndex(indexName, childType, "4").setParent("1").setSource("color", "black", "size", "40"));
requests.add(client().prepareIndex(indexName, childType, "5").setParent("1").setSource("color", "gray", "size", "36"));
- requests.add(client().prepareIndex(indexName, masterType, "2").setSource("brand", "Wrangler", "name", "Regular Cut", "material", "Leather"));
+ requests.add(client().prepareIndex(indexName, masterType, "2")
+ .setSource("brand", "Wrangler", "name", "Regular Cut", "material", "Leather"));
requests.add(client().prepareIndex(indexName, childType, "6").setParent("2").setSource("color", "blue", "size", "32"));
requests.add(client().prepareIndex(indexName, childType, "7").setParent("2").setSource("color", "blue", "size", "34"));
requests.add(client().prepareIndex(indexName, childType, "8").setParent("2").setSource("color", "black", "size", "36"));
@@ -425,7 +457,7 @@ children("non-existing", "xyz")
.setSize(0)
.addAggregation(AggregationBuilders.terms("towns").field("town")
.subAggregation(AggregationBuilders.terms("parent_names").field("name")
-.subAggregation(AggregationBuilders.children("child_docs", "childType"))
+.subAggregation(children("child_docs", "childType"))
)
)
.get();
@@ -468,5 +500,4 @@ children("non-existing", "xyz")
this.category = category;
}
}
-
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenTests.java
index 4098e85c62..85a97c4b9b 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenTests.java
@@ -17,14 +17,23 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket;
+package org.elasticsearch.join.aggregations;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
-import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
+
+import java.util.Collection;
+import java.util.Collections;
public class ChildrenTests extends BaseAggregationTestCase<ChildrenAggregationBuilder> {
@Override
+ protected Collection<Class<? extends Plugin>> getPlugins() {
+ return Collections.singleton(ParentJoinPlugin.class);
+ }
+
+ @Override
protected ChildrenAggregationBuilder createTestAggregatorBuilder() {
String name = randomAlphaOfLengthBetween(3, 20);
String childType = randomAlphaOfLengthBetween(5, 40);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/children/InternalChildrenTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalChildrenTests.java
index 285837c6e4..089f0a19bd 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/children/InternalChildrenTests.java
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/InternalChildrenTests.java
@@ -17,18 +17,29 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.Writeable.Reader;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregationTestCase;
+import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.junit.BeforeClass;
import java.util.List;
import java.util.Map;
public class InternalChildrenTests extends InternalSingleBucketAggregationTestCase<InternalChildren> {
+
+ @BeforeClass
+ public static void init() {
+ namedXContents.add(new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(ChildrenAggregationBuilder.NAME),
+ (p, c) -> ParsedChildren.fromXContent(p, (String) c)));
+ }
+
@Override
protected InternalChildren createTestInstance(String name, long docCount, InternalAggregations aggregations,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java
index 17152bc450..0a00b2d1c2 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregatorTests.java
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket.children;
+package org.elasticsearch.join.aggregations;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ChildQuerySearchIT.java
index 697352c5ed..ed910ac89e 100644
--- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ChildQuerySearchIT.java
@@ -16,17 +16,20 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.search.child;
+package org.elasticsearch.join.query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
+import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
import org.elasticsearch.common.settings.Settings;
@@ -34,8 +37,6 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.query.BoolQueryBuilder;
-import org.elasticsearch.index.query.HasChildQueryBuilder;
-import org.elasticsearch.index.query.HasParentQueryBuilder;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
@@ -43,6 +44,8 @@ import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@@ -62,6 +65,8 @@ import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -72,8 +77,6 @@ import java.util.Set;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
-import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
-import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
@@ -83,6 +86,8 @@ import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.termsQuery;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.weightFactorFunction;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
@@ -100,6 +105,21 @@ import static org.hamcrest.Matchers.notNullValue;
@ClusterScope(scope = Scope.SUITE)
public class ChildQuerySearchIT extends ESIntegTestCase {
+
+ @Override
+ protected boolean ignoreExternalCluster() {
+ return true;
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> nodePlugins() {
+ return Collections.singleton(ParentJoinPlugin.class);
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> transportClientPlugins() {
+ return nodePlugins();
+ }
@Override
public Settings indexSettings() {
@@ -145,26 +165,30 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
searchResponse = client().prepareSearch("test")
- .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))).execute()
+ .setQuery(boolQuery().must(matchAllQuery())
+ .filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
searchResponse = client().prepareSearch("test")
- .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))).execute()
+ .setQuery(boolQuery().must(matchAllQuery())
+ .filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("gc1"));
- searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)).execute()
+ searchResponse = client().prepareSearch("test")
+ .setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
- searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)).execute()
+ searchResponse = client().prepareSearch("test")
+ .setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
@@ -209,7 +233,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
refresh();
// TEST FETCHING _parent from child
- SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).storedFields("_parent").execute()
+ SearchResponse searchResponse = client().prepareSearch("test")
+ .setQuery(idsQuery("child").addIds("c1")).storedFields("_parent").execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
@@ -284,7 +309,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
builders.add(client().prepareIndex("test", "child", Integer.toString(i)).setSource("c_field", i).setParent("" + 0));
}
for (int i = 0; i < 10; i++) {
- builders.add(client().prepareIndex("test", "child", Integer.toString(i + 10)).setSource("c_field", i + 10).setParent(Integer.toString(i)));
+ builders.add(client().prepareIndex("test", "child", Integer.toString(i + 10))
+ .setSource("c_field", i + 10).setParent(Integer.toString(i)));
}
if (randomBoolean()) {
@@ -445,9 +471,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client()
.prepareSearch("test")
- .setQuery(hasChildQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")), ScoreMode.None))
+ .setQuery(hasChildQuery("child",
+ boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")), ScoreMode.None))
.addAggregation(AggregationBuilders.global("global").subAggregation(
- AggregationBuilders.filter("filter", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow"))).subAggregation(
+ AggregationBuilders.filter("filter",
+ boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow"))).subAggregation(
AggregationBuilders.terms("facet1").field("c_field")))).get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
@@ -523,7 +551,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertNoFailures(searchResponse);
searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
- .setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryStringQuery("p_field:*")), false))).execute()
+ .setQuery(boolQuery().mustNot(hasParentQuery("parent",
+ boolQuery().should(queryStringQuery("p_field:*")), false))).execute()
.actionGet();
assertNoFailures(searchResponse);
}
@@ -570,7 +599,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.get();
assertHitCount(countResponse, 1L);
- countResponse = client().prepareSearch("test").setSize(0).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true))
+ countResponse = client().prepareSearch("test").setSize(0)
+ .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true))
.get();
assertHitCount(countResponse, 1L);
@@ -579,7 +609,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.get();
assertHitCount(countResponse, 1L);
- countResponse = client().prepareSearch("test").setSize(0).setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false)))
+ countResponse = client().prepareSearch("test").setSize(0)
+ .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false)))
.get();
assertHitCount(countResponse, 1L);
}
@@ -685,11 +716,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse response = client()
.prepareSearch("test")
.setQuery(
- QueryBuilders.hasChildQuery(
- "child",
- QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
- fieldValueFactorFunction("c_field1"))
- .boostMode(CombineFunction.REPLACE), ScoreMode.Total)).get();
+ hasChildQuery(
+ "child",
+ QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
+ fieldValueFactorFunction("c_field1"))
+ .boostMode(CombineFunction.REPLACE), ScoreMode.Total)).get();
assertThat(response.getHits().getTotalHits(), equalTo(3L));
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
@@ -702,11 +733,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = client()
.prepareSearch("test")
.setQuery(
- QueryBuilders.hasChildQuery(
- "child",
- QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
- fieldValueFactorFunction("c_field1"))
- .boostMode(CombineFunction.REPLACE), ScoreMode.Max)).get();
+ hasChildQuery(
+ "child",
+ QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
+ fieldValueFactorFunction("c_field1"))
+ .boostMode(CombineFunction.REPLACE), ScoreMode.Max)).get();
assertThat(response.getHits().getTotalHits(), equalTo(3L));
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
@@ -719,11 +750,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = client()
.prepareSearch("test")
.setQuery(
- QueryBuilders.hasChildQuery(
- "child",
- QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
- fieldValueFactorFunction("c_field1"))
- .boostMode(CombineFunction.REPLACE), ScoreMode.Avg)).get();
+ hasChildQuery(
+ "child",
+ QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
+ fieldValueFactorFunction("c_field1"))
+ .boostMode(CombineFunction.REPLACE), ScoreMode.Avg)).get();
assertThat(response.getHits().getTotalHits(), equalTo(3L));
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
@@ -736,11 +767,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = client()
.prepareSearch("test")
.setQuery(
- QueryBuilders.hasParentQuery(
- "parent",
- QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"),
- fieldValueFactorFunction("p_field2"))
- .boostMode(CombineFunction.REPLACE), true))
+ hasParentQuery(
+ "parent",
+ QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"),
+ fieldValueFactorFunction("p_field2"))
+ .boostMode(CombineFunction.REPLACE), true))
.addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()).get();
assertThat(response.getHits().getTotalHits(), equalTo(7L));
@@ -769,7 +800,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
ensureGreen();
SearchResponse response = client().prepareSearch("test")
- .setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
+ .setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo(0L));
@@ -777,20 +808,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
response = client().prepareSearch("test")
- .setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
+ .setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo(0L));
- response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max))
+ response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo(0L));
- response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("parent", matchQuery("text", "value"), false)).get();
+ response = client().prepareSearch("test")
+ .setQuery(hasParentQuery("parent", matchQuery("text", "value"), false)).get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo(0L));
- response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("parent", matchQuery("text", "value"), true))
+ response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), true))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo(0L));
@@ -818,7 +850,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
searchResponse = client().prepareSearch("test")
- .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false))).get();
+ .setQuery(boolQuery().must(matchAllQuery())
+ .filter(hasParentQuery("parent", termQuery("p_field", 1), false))).get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2"));
@@ -873,12 +906,14 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertSearchHit(searchResponse, 1, hasId("2"));
searchResponse = client().prepareSearch("test")
- .setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))))
+ .setQuery(boolQuery().must(matchAllQuery())
+ .filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))))
.get();
assertSearchHit(searchResponse, 1, hasId("1"));
searchResponse = client().prepareSearch("test")
- .setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false)))).get();
+ .setQuery(boolQuery().must(matchAllQuery())
+ .filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false)))).get();
assertSearchHit(searchResponse, 1, hasId("2"));
}
@@ -974,7 +1009,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh("test").get();
}
- searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total))
+ searchResponse = client().prepareSearch("test")
+ .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total))
.get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
@@ -1009,7 +1045,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
client().prepareIndex("test", "child", "c5").setSource("c_field", "x").setParent("p2").get();
refresh();
- SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total))
+ SearchResponse searchResponse = client()
+ .prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total))
.setMinScore(3) // Score needs to be 3 or above!
.get();
assertNoFailures(searchResponse);
@@ -1219,7 +1256,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
createIndex("test");
ensureGreen();
- PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child").setSource("number", "type=integer")
+ PutMappingResponse putMappingResponse = client().admin().indices()
+ .preparePutMapping("test").setType("child").setSource("number", "type=integer")
.get();
assertThat(putMappingResponse.isAcknowledged(), equalTo(true));
@@ -1269,13 +1307,15 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
ScoreMode scoreMode = randomFrom(ScoreMode.values());
SearchResponse searchResponse = client().prepareSearch("test")
- .setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "blue"), scoreMode)).filter(boolQuery().mustNot(termQuery("p_field", "3"))))
+ .setQuery(boolQuery().must(hasChildQuery("child", termQuery("c_field", "blue"), scoreMode))
+ .filter(boolQuery().mustNot(termQuery("p_field", "3"))))
.get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test")
- .setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "red"), scoreMode)).filter(boolQuery().mustNot(termQuery("p_field", "3"))))
+ .setQuery(boolQuery().must(hasChildQuery("child", termQuery("c_field", "red"), scoreMode))
+ .filter(boolQuery().mustNot(termQuery("p_field", "3"))))
.get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
@@ -1293,25 +1333,29 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get();
refresh();
- SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max).queryName("test"))
+ SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child",
+ termQuery("c_field", "1"), ScoreMode.Max).queryName("test"))
.get();
assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
- searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true).queryName("test"))
+ searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent",
+ termQuery("p_field", "1"), true).queryName("test"))
.get();
assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
- searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None).queryName("test")))
+ searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child",
+ termQuery("c_field", "1"), ScoreMode.None).queryName("test")))
.get();
assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
- searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false).queryName("test")))
+ searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent",
+ termQuery("p_field", "1"), false).queryName("test")))
.get();
assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
@@ -1400,7 +1444,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
for (int i = 0; i < 2; i++) {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery()
- .must(QueryBuilders.hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None))
+ .must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None))
.must(matchAllQuery())))
.get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
@@ -1412,7 +1456,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery()
- .must(QueryBuilders.hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None))
+ .must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None))
.must(matchAllQuery())))
.get();
@@ -1478,7 +1522,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse resp;
resp = client().prepareSearch("test")
- .setSource(new SearchSourceBuilder().query(QueryBuilders.hasChildQuery("posts", QueryBuilders.matchQuery("field", "bar"), ScoreMode.None)))
+ .setSource(new SearchSourceBuilder().query(hasChildQuery("posts",
+ QueryBuilders.matchQuery("field", "bar"), ScoreMode.None)))
.get();
assertHitCount(resp, 1L);
}
@@ -1580,8 +1625,10 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
QueryBuilders.functionScoreQuery(constantScoreQuery(QueryBuilders.termQuery("foo", "two")),
new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
new FunctionScoreQueryBuilder.FilterFunctionBuilder(weightFactorFunction(1)),
- new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)),
- new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1))
+ new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "three"),
+ weightFactorFunction(1)),
+ new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "four"),
+ weightFactorFunction(1))
}).boostMode(CombineFunction.REPLACE).scoreMode(FiltersFunctionScoreQuery.ScoreMode.SUM), scoreMode)
.minMaxChildren(minChildren, maxChildren != null ? maxChildren : HasChildQueryBuilder.DEFAULT_MAX_CHILDREN);
@@ -1916,7 +1963,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
try {
client().prepareSearch("test")
- .setQuery(QueryBuilders.hasChildQuery("child", matchAllQuery(), ScoreMode.None))
+ .setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None))
.get();
fail();
} catch (SearchPhaseExecutionException e) {
@@ -1932,7 +1979,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
refresh();
//make sure that when we explicitly set a type, the inner query is executed in the context of the parent type instead
SearchResponse searchResponse = client().prepareSearch("test").setTypes("child-type").setQuery(
- QueryBuilders.hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)).get();
+ hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)).get();
assertSearchHits(searchResponse, "child-id");
}
@@ -1945,7 +1992,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
refresh();
//make sure that when we explicitly set a type, the inner query is executed in the context of the child type instead
SearchResponse searchResponse = client().prepareSearch("test").setTypes("parent-type").setQuery(
- QueryBuilders.hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)).get();
+ hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)).get();
assertSearchHits(searchResponse, "parent-id");
}
@@ -1955,8 +2002,10 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.addMapping("parent-type", "searchText", "type=text,term_vector=with_positions_offsets,index_options=offsets")
.addMapping("child-type", "_parent", "type=parent-type", "searchText",
"type=text,term_vector=with_positions_offsets,index_options=offsets"));
- client().prepareIndex("test", "parent-type", "parent-id").setSource("searchText", "quick brown fox").get();
- client().prepareIndex("test", "child-type", "child-id").setParent("parent-id").setSource("searchText", "quick brown fox").get();
+ client().prepareIndex("test", "parent-type", "parent-id")
+ .setSource("searchText", "quick brown fox").get();
+ client().prepareIndex("test", "child-type", "child-id")
+ .setParent("parent-id").setSource("searchText", "quick brown fox").get();
refresh();
String[] highlightTypes = new String[] {"plain", "fvh", "postings"};
@@ -1988,4 +2037,177 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
}
}
+ public void testAliasesFilterWithHasChildQuery() throws Exception {
+ assertAcked(prepareCreate("my-index")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("parent")
+ .addMapping("child", "_parent", "type=parent")
+ );
+ client().prepareIndex("my-index", "parent", "1").setSource("{}", XContentType.JSON).get();
+ client().prepareIndex("my-index", "child", "2").setSource("{}", XContentType.JSON).setParent("1").get();
+ refresh();
+
+ assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1",
+ hasChildQuery("child", matchAllQuery(), ScoreMode.None)));
+ assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2",
+ hasParentQuery("parent", matchAllQuery(), false)));
+
+ SearchResponse response = client().prepareSearch("filter1").get();
+ assertHitCount(response, 1);
+ assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
+ response = client().prepareSearch("filter2").get();
+ assertHitCount(response, 1);
+ assertThat(response.getHits().getAt(0).getId(), equalTo("2"));
+ }
+
+ /*
+ Test for https://github.com/elastic/elasticsearch/issues/3444
+ */
+ public void testBulkUpdateDocAsUpsertWithParent() throws Exception {
+ client().admin().indices().prepareCreate("test")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("parent", "{\"parent\":{}}", XContentType.JSON)
+ .addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}", XContentType.JSON)
+ .execute().actionGet();
+ ensureGreen();
+
+ BulkRequestBuilder builder = client().prepareBulk();
+
+ // It's important to use JSON parsing here and request objects: issue 3444 is related to incomplete option parsing
+ byte[] addParent = new BytesArray(
+ "{" +
+ " \"index\" : {" +
+ " \"_index\" : \"test\"," +
+ " \"_type\" : \"parent\"," +
+ " \"_id\" : \"parent1\"" +
+ " }" +
+ "}" +
+ "\n" +
+ "{" +
+ " \"field1\" : \"value1\"" +
+ "}" +
+ "\n").array();
+
+ byte[] addChild = new BytesArray(
+ "{" +
+ " \"update\" : {" +
+ " \"_index\" : \"test\"," +
+ " \"_type\" : \"child\"," +
+ " \"_id\" : \"child1\"," +
+ " \"parent\" : \"parent1\"" +
+ " }" +
+ "}" +
+ "\n" +
+ "{" +
+ " \"doc\" : {" +
+ " \"field1\" : \"value1\"" +
+ " }," +
+ " \"doc_as_upsert\" : \"true\"" +
+ "}" +
+ "\n").array();
+
+ builder.add(addParent, 0, addParent.length, XContentType.JSON);
+ builder.add(addChild, 0, addChild.length, XContentType.JSON);
+
+ BulkResponse bulkResponse = builder.get();
+ assertThat(bulkResponse.getItems().length, equalTo(2));
+ assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
+ assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
+
+ client().admin().indices().prepareRefresh("test").get();
+
+ //we check that the _parent field was set on the child document by using the has parent query
+ SearchResponse searchResponse = client().prepareSearch("test")
+ .setQuery(hasParentQuery("parent", QueryBuilders.matchAllQuery(), false))
+ .get();
+
+ assertNoFailures(searchResponse);
+ assertSearchHits(searchResponse, "child1");
+ }
+
+ /*
+ Test for https://github.com/elastic/elasticsearch/issues/3444
+ */
+ public void testBulkUpdateUpsertWithParent() throws Exception {
+ assertAcked(prepareCreate("test")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("parent", "{\"parent\":{}}", XContentType.JSON)
+ .addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}", XContentType.JSON));
+ ensureGreen();
+
+ BulkRequestBuilder builder = client().prepareBulk();
+
+ byte[] addParent = new BytesArray(
+ "{" +
+ " \"index\" : {" +
+ " \"_index\" : \"test\"," +
+ " \"_type\" : \"parent\"," +
+ " \"_id\" : \"parent1\"" +
+ " }" +
+ "}" +
+ "\n" +
+ "{" +
+ " \"field1\" : \"value1\"" +
+ "}" +
+ "\n").array();
+
+ byte[] addChild1 = new BytesArray(
+ "{" +
+ " \"update\" : {" +
+ " \"_index\" : \"test\"," +
+ " \"_type\" : \"child\"," +
+ " \"_id\" : \"child1\"," +
+ " \"parent\" : \"parent1\"" +
+ " }" +
+ "}" +
+ "\n" +
+ "{" +
+ " \"script\" : {" +
+ " \"inline\" : \"ctx._source.field2 = 'value2'\"" +
+ " }," +
+ " \"lang\" : \"" + InnerHitsIT.CustomScriptPlugin.NAME + "\"," +
+ " \"upsert\" : {" +
+ " \"field1\" : \"value1'\"" +
+ " }" +
+ "}" +
+ "\n").array();
+
+ byte[] addChild2 = new BytesArray(
+ "{" +
+ " \"update\" : {" +
+ " \"_index\" : \"test\"," +
+ " \"_type\" : \"child\"," +
+ " \"_id\" : \"child1\"," +
+ " \"parent\" : \"parent1\"" +
+ " }" +
+ "}" +
+ "\n" +
+ "{" +
+ " \"script\" : \"ctx._source.field2 = 'value2'\"," +
+ " \"upsert\" : {" +
+ " \"field1\" : \"value1'\"" +
+ " }" +
+ "}" +
+ "\n").array();
+
+ builder.add(addParent, 0, addParent.length, XContentType.JSON);
+ builder.add(addChild1, 0, addChild1.length, XContentType.JSON);
+ builder.add(addChild2, 0, addChild2.length, XContentType.JSON);
+
+ BulkResponse bulkResponse = builder.get();
+ assertThat(bulkResponse.getItems().length, equalTo(3));
+ assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
+ assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
+ assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(true));
+ assertThat(bulkResponse.getItems()[2].getFailure().getCause().getCause().getMessage(),
+ equalTo("script_lang not supported [painless]"));
+
+ client().admin().indices().prepareRefresh("test").get();
+
+ SearchResponse searchResponse = client().prepareSearch("test")
+ .setQuery(hasParentQuery("parent", QueryBuilders.matchAllQuery(), false))
+ .get();
+
+ assertSearchHits(searchResponse, "child1");
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java
index 49523fe923..8f4fc9d0c3 100644
--- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.index.query;
+package org.elasticsearch.join.query;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.search.TermInSetQuery;
@@ -40,7 +40,17 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
+import org.elasticsearch.index.query.IdsQueryBuilder;
+import org.elasticsearch.index.query.InnerHitBuilder;
+import org.elasticsearch.index.query.MatchAllQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.index.query.QueryShardException;
+import org.elasticsearch.index.query.TermQueryBuilder;
+import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.similarity.SimilarityService;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
@@ -48,10 +58,12 @@ import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import java.io.IOException;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
@@ -66,6 +78,11 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
boolean requiresRewrite = false;
@Override
+ protected Collection<Class<? extends Plugin>> getPlugins() {
+ return Collections.singletonList(ParentJoinPlugin.class);
+ }
+
+ @Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
similarity = randomFrom("classic", "BM25");
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
@@ -97,7 +114,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
int min = randomIntBetween(0, Integer.MAX_VALUE / 2);
int max = randomIntBetween(min, Integer.MAX_VALUE);
- QueryBuilder innerQueryBuilder = RandomQueryBuilder.createQuery(random());
+ QueryBuilder innerQueryBuilder = new MatchAllQueryBuilder();
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
@@ -144,19 +161,19 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
}
public void testIllegalValues() {
- QueryBuilder query = RandomQueryBuilder.createQuery(random());
+ QueryBuilder query = new MatchAllQueryBuilder();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> QueryBuilders.hasChildQuery(null, query, ScoreMode.None));
+ () -> hasChildQuery(null, query, ScoreMode.None));
assertEquals("[has_child] requires 'type' field", e.getMessage());
- e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.hasChildQuery("foo", null, ScoreMode.None));
+ e = expectThrows(IllegalArgumentException.class, () -> hasChildQuery("foo", null, ScoreMode.None));
assertEquals("[has_child] requires 'query' field", e.getMessage());
- e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.hasChildQuery("foo", query, null));
+ e = expectThrows(IllegalArgumentException.class, () -> hasChildQuery("foo", query, null));
assertEquals("[has_child] requires 'score_mode' field", e.getMessage());
int positiveValue = randomIntBetween(0, Integer.MAX_VALUE);
- HasChildQueryBuilder foo = QueryBuilders.hasChildQuery("foo", query, ScoreMode.None); // all good
+ HasChildQueryBuilder foo = hasChildQuery("foo", query, ScoreMode.None); // all good
e = expectThrows(IllegalArgumentException.class, () -> foo.minMaxChildren(randomIntBetween(Integer.MIN_VALUE, -1), positiveValue));
assertEquals("[has_child] requires non-negative 'min_children' field", e.getMessage());
@@ -225,7 +242,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
String[] searchTypes = new String[]{PARENT_TYPE};
QueryShardContext shardContext = createShardContext();
shardContext.setTypes(searchTypes);
- HasChildQueryBuilder hasChildQueryBuilder = QueryBuilders.hasChildQuery(CHILD_TYPE, new IdsQueryBuilder().addIds("id"), ScoreMode.None);
+ HasChildQueryBuilder hasChildQueryBuilder = hasChildQuery(CHILD_TYPE, new IdsQueryBuilder().addIds("id"), ScoreMode.None);
Query query = hasChildQueryBuilder.toQuery(shardContext);
//verify that the context types are still the same as the ones we previously set
assertThat(shardContext.getTypes(), equalTo(searchTypes));
@@ -252,7 +269,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
assertThat(booleanTermsQuery.clauses().get(0).getQuery(), instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) booleanTermsQuery.clauses().get(0).getQuery();
assertThat(termQuery.getTerm().field(), equalTo(UidFieldMapper.NAME));
- //we want to make sure that the inner ids query gets executed against the child type rather than the main type we initially set to the context
+ //we want to make sure that the inner ids query gets executed against the child type rather
+ // than the main type we initially set to the context
BytesRef[] ids = Uid.createUidsForTypesAndIds(Collections.singletonList(type), Collections.singletonList(id));
assertThat(termQuery.getTerm().bytes(), equalTo(ids[0]));
//check the type filter
@@ -273,7 +291,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
public void testNonDefaultSimilarity() throws Exception {
QueryShardContext shardContext = createShardContext();
- HasChildQueryBuilder hasChildQueryBuilder = QueryBuilders.hasChildQuery(CHILD_TYPE, new TermQueryBuilder("custom_string", "value"), ScoreMode.None);
+ HasChildQueryBuilder hasChildQueryBuilder =
+ hasChildQuery(CHILD_TYPE, new TermQueryBuilder("custom_string", "value"), ScoreMode.None);
HasChildQueryBuilder.LateParsingQuery query = (HasChildQueryBuilder.LateParsingQuery) hasChildQueryBuilder.toQuery(shardContext);
Similarity expected = SimilarityService.BUILT_IN.get(similarity)
.apply(similarity, Settings.EMPTY, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build())
@@ -281,48 +300,6 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
assertThat(((PerFieldSimilarityWrapper) query.getSimilarity()).get("custom_string"), instanceOf(expected.getClass()));
}
- public void testMinFromString() {
- assertThat("fromString(min) != MIN", ScoreMode.Min, equalTo(HasChildQueryBuilder.parseScoreMode("min")));
- assertThat("min", equalTo(HasChildQueryBuilder.scoreModeAsString(ScoreMode.Min)));
- }
-
- public void testMaxFromString() {
- assertThat("fromString(max) != MAX", ScoreMode.Max, equalTo(HasChildQueryBuilder.parseScoreMode("max")));
- assertThat("max", equalTo(HasChildQueryBuilder.scoreModeAsString(ScoreMode.Max)));
- }
-
- public void testAvgFromString() {
- assertThat("fromString(avg) != AVG", ScoreMode.Avg, equalTo(HasChildQueryBuilder.parseScoreMode("avg")));
- assertThat("avg", equalTo(HasChildQueryBuilder.scoreModeAsString(ScoreMode.Avg)));
- }
-
- public void testSumFromString() {
- assertThat("fromString(total) != SUM", ScoreMode.Total, equalTo(HasChildQueryBuilder.parseScoreMode("sum")));
- assertThat("sum", equalTo(HasChildQueryBuilder.scoreModeAsString(ScoreMode.Total)));
- }
-
- public void testNoneFromString() {
- assertThat("fromString(none) != NONE", ScoreMode.None, equalTo(HasChildQueryBuilder.parseScoreMode("none")));
- assertThat("none", equalTo(HasChildQueryBuilder.scoreModeAsString(ScoreMode.None)));
- }
-
- /**
- * Should throw {@link IllegalArgumentException} instead of NPE.
- */
- public void testThatNullFromStringThrowsException() {
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> HasChildQueryBuilder.parseScoreMode(null));
- assertEquals("No score mode for child query [null] found", e.getMessage());
- }
-
- /**
- * Failure should not change (and the value should never match anything...).
- */
- public void testThatUnrecognizedFromStringThrowsException() {
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> HasChildQueryBuilder.parseScoreMode("unrecognized value"));
- assertEquals("No score mode for child query [unrecognized value] found", e.getMessage());
- }
-
public void testIgnoreUnmapped() throws IOException {
final HasChildQueryBuilder queryBuilder = new HasChildQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None);
queryBuilder.ignoreUnmapped(true);
diff --git a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java
index 22ea2cc9c4..825dfede61 100644
--- a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.index.query;
+package org.elasticsearch.join.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
@@ -28,6 +28,16 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.query.IdsQueryBuilder;
+import org.elasticsearch.index.query.InnerHitBuilder;
+import org.elasticsearch.index.query.MatchAllQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.index.query.QueryShardException;
+import org.elasticsearch.index.query.TermQueryBuilder;
+import org.elasticsearch.index.query.WrapperQueryBuilder;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
@@ -35,9 +45,12 @@ import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
@@ -50,6 +63,11 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
boolean requiresRewrite = false;
@Override
+ protected Collection<Class<? extends Plugin>> getPlugins() {
+ return Collections.singletonList(ParentJoinPlugin.class);
+ }
+
+ @Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
@@ -79,7 +97,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
*/
@Override
protected HasParentQueryBuilder doCreateTestQueryBuilder() {
- QueryBuilder innerQueryBuilder = RandomQueryBuilder.createQuery(random());
+ QueryBuilder innerQueryBuilder = new MatchAllQueryBuilder();
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
@@ -124,17 +142,17 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
}
public void testIllegalValues() throws IOException {
- QueryBuilder query = RandomQueryBuilder.createQuery(random());
+ QueryBuilder query = new MatchAllQueryBuilder();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> QueryBuilders.hasParentQuery(null, query, false));
+ () -> hasParentQuery(null, query, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'type' field"));
e = expectThrows(IllegalArgumentException.class,
- () -> QueryBuilders.hasParentQuery("foo", null, false));
+ () -> hasParentQuery("foo", null, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'query' field"));
QueryShardContext context = createShardContext();
- HasParentQueryBuilder qb = QueryBuilders.hasParentQuery("just_a_type", new MatchAllQueryBuilder(), false);
+ HasParentQueryBuilder qb = hasParentQuery("just_a_type", new MatchAllQueryBuilder(), false);
QueryShardException qse = expectThrows(QueryShardException.class, () -> qb.doToQuery(context));
assertThat(qse.getMessage(), equalTo("[has_parent] no child types found for type [just_a_type]"));
}
diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java
new file mode 100644
index 0000000000..ad8e49e9f5
--- /dev/null
+++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java
@@ -0,0 +1,568 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.join.query;
+
+import org.apache.lucene.search.join.ScoreMode;
+import org.apache.lucene.util.ArrayUtil;
+import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.query.BoolQueryBuilder;
+import org.elasticsearch.index.query.InnerHitBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.script.MockScriptEngine;
+import org.elasticsearch.script.MockScriptPlugin;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
+import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
+import org.elasticsearch.search.sort.FieldSortBuilder;
+import org.elasticsearch.search.sort.SortBuilders;
+import org.elasticsearch.search.sort.SortOrder;
+import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
+import org.elasticsearch.test.ESIntegTestCase.Scope;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.function.Function;
+
+import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
+import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
+import static org.elasticsearch.index.query.QueryBuilders.nestedQuery;
+import static org.elasticsearch.index.query.QueryBuilders.termQuery;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+
+@ClusterScope(scope = Scope.SUITE)
+public class InnerHitsIT extends ESIntegTestCase {
+ @Override
+ protected boolean ignoreExternalCluster() {
+ return true;
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> nodePlugins() {
+ return Arrays.asList(ParentJoinPlugin.class, CustomScriptPlugin.class);
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> transportClientPlugins() {
+ return nodePlugins();
+ }
+
+ public static class CustomScriptPlugin extends MockScriptPlugin {
+ @Override
+ protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
+ return Collections.singletonMap("5", script -> "5");
+ }
+ }
+
+ public void testSimpleParentChild() throws Exception {
+ assertAcked(prepareCreate("articles")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("article", "title", "type=text")
+ .addMapping("comment", "_parent", "type=article", "message", "type=text,fielddata=true")
+ );
+
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ requests.add(client().prepareIndex("articles", "article", "1").setSource("title", "quick brown fox"));
+ requests.add(client().prepareIndex("articles", "comment", "1").setParent("1").setSource("message", "fox eat quick"));
+ requests.add(client().prepareIndex("articles", "comment", "2").setParent("1").setSource("message", "fox ate rabbit x y z"));
+ requests.add(client().prepareIndex("articles", "comment", "3").setParent("1").setSource("message", "rabbit got away"));
+ requests.add(client().prepareIndex("articles", "article", "2").setSource("title", "big gray elephant"));
+ requests.add(client().prepareIndex("articles", "comment", "4").setParent("2").setSource("message", "elephant captured"));
+ requests.add(client().prepareIndex("articles", "comment", "5").setParent("2").setSource("message", "mice squashed by elephant x"));
+ requests.add(client().prepareIndex("articles", "comment", "6").setParent("2").setSource("message", "elephant scared by mice x y"));
+ indexRandom(true, requests);
+
+ SearchResponse response = client().prepareSearch("articles")
+ .setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None)
+ .innerHit(new InnerHitBuilder(), false))
+ .get();
+ assertNoFailures(response);
+ assertHitCount(response, 1);
+ assertSearchHit(response, 1, hasId("1"));
+ assertThat(response.getHits().getAt(0).getShard(), notNullValue());
+
+ assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
+ SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
+ assertThat(innerHits.getTotalHits(), equalTo(2L));
+
+ assertThat(innerHits.getAt(0).getId(), equalTo("1"));
+ assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
+ assertThat(innerHits.getAt(1).getId(), equalTo("2"));
+ assertThat(innerHits.getAt(1).getType(), equalTo("comment"));
+
+ response = client().prepareSearch("articles")
+ .setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None)
+ .innerHit(new InnerHitBuilder(), false))
+ .get();
+ assertNoFailures(response);
+ assertHitCount(response, 1);
+ assertSearchHit(response, 1, hasId("2"));
+
+ assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
+ innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
+ assertThat(innerHits.getTotalHits(), equalTo(3L));
+
+ assertThat(innerHits.getAt(0).getId(), equalTo("4"));
+ assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
+ assertThat(innerHits.getAt(1).getId(), equalTo("5"));
+ assertThat(innerHits.getAt(1).getType(), equalTo("comment"));
+ assertThat(innerHits.getAt(2).getId(), equalTo("6"));
+ assertThat(innerHits.getAt(2).getType(), equalTo("comment"));
+
+ response = client().prepareSearch("articles")
+ .setQuery(
+ hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
+ new InnerHitBuilder()
+ .addDocValueField("message")
+ .setHighlightBuilder(new HighlightBuilder().field("message"))
+ .setExplain(true).setSize(1)
+ .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5",
+ Collections.emptyMap())),
+ false)
+ ).get();
+ assertNoFailures(response);
+ innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
+ assertThat(innerHits.getHits().length, equalTo(1));
+ assertThat(innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
+ assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(message:fox"));
+ assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("eat"));
+ assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
+ }
+
+ public void testRandomParentChild() throws Exception {
+ assertAcked(prepareCreate("idx")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("parent")
+ .addMapping("child1", "_parent", "type=parent")
+ .addMapping("child2", "_parent", "type=parent")
+ );
+ int numDocs = scaledRandomIntBetween(5, 50);
+ List<IndexRequestBuilder> requestBuilders = new ArrayList<>();
+
+ int child1 = 0;
+ int child2 = 0;
+ int[] child1InnerObjects = new int[numDocs];
+ int[] child2InnerObjects = new int[numDocs];
+ for (int parent = 0; parent < numDocs; parent++) {
+ String parentId = String.format(Locale.ENGLISH, "%03d", parent);
+ requestBuilders.add(client().prepareIndex("idx", "parent", parentId).setSource("{}", XContentType.JSON));
+
+ int numChildDocs = child1InnerObjects[parent] = scaledRandomIntBetween(1, numDocs);
+ int limit = child1 + numChildDocs;
+ for (; child1 < limit; child1++) {
+ requestBuilders.add(client().prepareIndex("idx", "child1",
+ String.format(Locale.ENGLISH, "%04d", child1)).setParent(parentId).setSource("{}", XContentType.JSON));
+ }
+ numChildDocs = child2InnerObjects[parent] = scaledRandomIntBetween(1, numDocs);
+ limit = child2 + numChildDocs;
+ for (; child2 < limit; child2++) {
+ requestBuilders.add(client().prepareIndex("idx", "child2",
+ String.format(Locale.ENGLISH, "%04d", child2)).setParent(parentId).setSource("{}", XContentType.JSON));
+ }
+ }
+ indexRandom(true, requestBuilders);
+
+ int size = randomIntBetween(0, numDocs);
+ BoolQueryBuilder boolQuery = new BoolQueryBuilder();
+ boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery(), ScoreMode.None)
+ .innerHit(new InnerHitBuilder().setName("a")
+ .addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size), false)));
+ boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery(), ScoreMode.None)
+ .innerHit(new InnerHitBuilder().setName("b")
+ .addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size), false)));
+ SearchResponse searchResponse = client().prepareSearch("idx")
+ .setSize(numDocs)
+ .setTypes("parent")
+ .addSort("_uid", SortOrder.ASC)
+ .setQuery(boolQuery)
+ .get();
+
+ assertNoFailures(searchResponse);
+ assertHitCount(searchResponse, numDocs);
+ assertThat(searchResponse.getHits().getHits().length, equalTo(numDocs));
+
+ int offset1 = 0;
+ int offset2 = 0;
+ for (int parent = 0; parent < numDocs; parent++) {
+ SearchHit searchHit = searchResponse.getHits().getAt(parent);
+ assertThat(searchHit.getType(), equalTo("parent"));
+ assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "%03d", parent)));
+ assertThat(searchHit.getShard(), notNullValue());
+
+ SearchHits inner = searchHit.getInnerHits().get("a");
+ assertThat(inner.getTotalHits(), equalTo((long) child1InnerObjects[parent]));
+ for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) {
+ SearchHit innerHit = inner.getAt(child);
+ assertThat(innerHit.getType(), equalTo("child1"));
+ String childId = String.format(Locale.ENGLISH, "%04d", offset1 + child);
+ assertThat(innerHit.getId(), equalTo(childId));
+ assertThat(innerHit.getNestedIdentity(), nullValue());
+ }
+ offset1 += child1InnerObjects[parent];
+
+ inner = searchHit.getInnerHits().get("b");
+ assertThat(inner.getTotalHits(), equalTo((long) child2InnerObjects[parent]));
+ for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) {
+ SearchHit innerHit = inner.getAt(child);
+ assertThat(innerHit.getType(), equalTo("child2"));
+ String childId = String.format(Locale.ENGLISH, "%04d", offset2 + child);
+ assertThat(innerHit.getId(), equalTo(childId));
+ assertThat(innerHit.getNestedIdentity(), nullValue());
+ }
+ offset2 += child2InnerObjects[parent];
+ }
+ }
+
+ public void testInnerHitsOnHasParent() throws Exception {
+ assertAcked(prepareCreate("stack")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("question", "body", "type=text")
+ .addMapping("answer", "_parent", "type=question", "body", "type=text")
+ );
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ requests.add(client().prepareIndex("stack", "question", "1").setSource("body", "I'm using HTTPS + Basic authentication "
+ + "to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?"));
+ requests.add(client().prepareIndex("stack", "answer", "1").setParent("1").setSource("body",
+ "install fail2ban and enable rules for apache"));
+ requests.add(client().prepareIndex("stack", "question", "2").setSource("body",
+ "I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?"));
+ requests.add(client().prepareIndex("stack", "answer", "2").setParent("2").setSource("body",
+ "Denyhosts protects only ssh; Fail2Ban protects all daemons."));
+ indexRandom(true, requests);
+
+ SearchResponse response = client().prepareSearch("stack")
+ .setTypes("answer")
+ .addSort("_uid", SortOrder.ASC)
+ .setQuery(
+ boolQuery()
+ .must(matchQuery("body", "fail2ban"))
+ .must(hasParentQuery("question", matchAllQuery(), false).innerHit(new InnerHitBuilder(), false))
+ ).get();
+ assertNoFailures(response);
+ assertHitCount(response, 2);
+
+ SearchHit searchHit = response.getHits().getAt(0);
+ assertThat(searchHit.getId(), equalTo("1"));
+ assertThat(searchHit.getType(), equalTo("answer"));
+ assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
+ assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("question"));
+ assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1"));
+
+ searchHit = response.getHits().getAt(1);
+ assertThat(searchHit.getId(), equalTo("2"));
+ assertThat(searchHit.getType(), equalTo("answer"));
+ assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
+ assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("question"));
+ assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2"));
+ }
+
+ public void testParentChildMultipleLayers() throws Exception {
+ assertAcked(prepareCreate("articles")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("article", "title", "type=text")
+ .addMapping("comment", "_parent", "type=article", "message", "type=text")
+ .addMapping("remark", "_parent", "type=comment", "message", "type=text")
+ );
+
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ requests.add(client().prepareIndex("articles", "article", "1").setSource("title", "quick brown fox"));
+ requests.add(client().prepareIndex("articles", "comment", "1").setParent("1").setSource("message", "fox eat quick"));
+ requests.add(client().prepareIndex("articles", "remark", "1").setParent("1").setRouting("1").setSource("message", "good"));
+ requests.add(client().prepareIndex("articles", "article", "2").setSource("title", "big gray elephant"));
+ requests.add(client().prepareIndex("articles", "comment", "2").setParent("2").setSource("message", "elephant captured"));
+ requests.add(client().prepareIndex("articles", "remark", "2").setParent("2").setRouting("2").setSource("message", "bad"));
+ indexRandom(true, requests);
+
+ SearchResponse response = client().prepareSearch("articles")
+ .setQuery(hasChildQuery("comment",
+ hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder(), false),
+ ScoreMode.None).innerHit(new InnerHitBuilder(), false))
+ .get();
+
+ assertNoFailures(response);
+ assertHitCount(response, 1);
+ assertSearchHit(response, 1, hasId("1"));
+
+ assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
+ SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
+ assertThat(innerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerHits.getAt(0).getId(), equalTo("1"));
+ assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
+
+ innerHits = innerHits.getAt(0).getInnerHits().get("remark");
+ assertThat(innerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerHits.getAt(0).getId(), equalTo("1"));
+ assertThat(innerHits.getAt(0).getType(), equalTo("remark"));
+
+ response = client().prepareSearch("articles")
+ .setQuery(hasChildQuery("comment",
+ hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder(), false),
+ ScoreMode.None).innerHit(new InnerHitBuilder(), false))
+ .get();
+
+ assertNoFailures(response);
+ assertHitCount(response, 1);
+ assertSearchHit(response, 1, hasId("2"));
+
+ assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
+ innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
+ assertThat(innerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerHits.getAt(0).getId(), equalTo("2"));
+ assertThat(innerHits.getAt(0).getType(), equalTo("comment"));
+
+ innerHits = innerHits.getAt(0).getInnerHits().get("remark");
+ assertThat(innerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerHits.getAt(0).getId(), equalTo("2"));
+ assertThat(innerHits.getAt(0).getType(), equalTo("remark"));
+ }
+
+ public void testRoyals() throws Exception {
+ assertAcked(
+ prepareCreate("royals")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("king")
+ .addMapping("prince", "_parent", "type=king")
+ .addMapping("duke", "_parent", "type=prince")
+ .addMapping("earl", "_parent", "type=duke")
+ .addMapping("baron", "_parent", "type=earl")
+ );
+
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ requests.add(client().prepareIndex("royals", "king", "king").setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "prince", "prince").setParent("king").setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "duke", "duke").setParent("prince").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "earl", "earl1").setParent("duke").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "earl", "earl2").setParent("duke").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "earl", "earl3").setParent("duke").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "earl", "earl4").setParent("duke").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "baron", "baron1").setParent("earl1").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "baron", "baron2").setParent("earl2").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "baron", "baron3").setParent("earl3").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("royals", "baron", "baron4").setParent("earl4").setRouting("king")
+ .setSource("{}", XContentType.JSON));
+ indexRandom(true, requests);
+
+ SearchResponse response = client().prepareSearch("royals")
+ .setTypes("duke")
+ .setQuery(boolQuery()
+ .filter(hasParentQuery("prince",
+ hasParentQuery("king", matchAllQuery(), false).innerHit(new InnerHitBuilder().setName("kings"), false),
+ false).innerHit(new InnerHitBuilder().setName("princes"), false)
+ )
+ .filter(hasChildQuery("earl",
+ hasChildQuery("baron", matchAllQuery(), ScoreMode.None)
+ .innerHit(new InnerHitBuilder().setName("barons"), false),
+ ScoreMode.None).innerHit(new InnerHitBuilder()
+ .addSort(SortBuilders.fieldSort("_uid").order(SortOrder.ASC))
+ .setName("earls")
+ .setSize(4), false)
+ )
+ )
+ .get();
+ assertHitCount(response, 1);
+ assertThat(response.getHits().getAt(0).getId(), equalTo("duke"));
+
+ SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls");
+ assertThat(innerHits.getTotalHits(), equalTo(4L));
+ assertThat(innerHits.getAt(0).getId(), equalTo("earl1"));
+ assertThat(innerHits.getAt(1).getId(), equalTo("earl2"));
+ assertThat(innerHits.getAt(2).getId(), equalTo("earl3"));
+ assertThat(innerHits.getAt(3).getId(), equalTo("earl4"));
+
+ SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons");
+ assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1"));
+
+ innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons");
+ assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2"));
+
+ innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons");
+ assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3"));
+
+ innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons");
+ assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4"));
+
+ innerHits = response.getHits().getAt(0).getInnerHits().get("princes");
+ assertThat(innerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerHits.getAt(0).getId(), equalTo("prince"));
+
+ innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings");
+ assertThat(innerInnerHits.getTotalHits(), equalTo(1L));
+ assertThat(innerInnerHits.getAt(0).getId(), equalTo("king"));
+ }
+
+ public void testMatchesQueriesParentChildInnerHits() throws Exception {
+ assertAcked(prepareCreate("index")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("child", "_parent", "type=parent"));
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ requests.add(client().prepareIndex("index", "parent", "1").setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("index", "child", "1").setParent("1").setSource("field", "value1"));
+ requests.add(client().prepareIndex("index", "child", "2").setParent("1").setSource("field", "value2"));
+ requests.add(client().prepareIndex("index", "parent", "2").setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("index", "child", "3").setParent("2").setSource("field", "value1"));
+ indexRandom(true, requests);
+
+ SearchResponse response = client().prepareSearch("index")
+ .setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None)
+ .innerHit(new InnerHitBuilder(), false))
+ .addSort("_uid", SortOrder.ASC)
+ .get();
+ assertHitCount(response, 2);
+ assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1L));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
+
+ assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
+ assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits(), equalTo(1L));
+ assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
+ assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
+
+ QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None)
+ .innerHit(new InnerHitBuilder(), false);
+ response = client().prepareSearch("index")
+ .setQuery(query)
+ .addSort("_uid", SortOrder.ASC)
+ .get();
+ assertHitCount(response, 1);
+ assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1L));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2"));
+ }
+
+ public void testDontExplode() throws Exception {
+ assertAcked(prepareCreate("index1")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("child", "_parent", "type=parent"));
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ requests.add(client().prepareIndex("index1", "parent", "1").setSource("{}", XContentType.JSON));
+ requests.add(client().prepareIndex("index1", "child", "1").setParent("1").setSource("field", "value1"));
+ indexRandom(true, requests);
+
+ QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None)
+ .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1), false);
+ SearchResponse response = client().prepareSearch("index1")
+ .setQuery(query)
+ .get();
+ assertNoFailures(response);
+ assertHitCount(response, 1);
+
+ assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested"));
+ client().prepareIndex("index2", "type", "1").setSource(jsonBuilder().startObject()
+ .startArray("nested")
+ .startObject()
+ .field("field", "value1")
+ .endObject()
+ .endArray()
+ .endObject())
+ .setRefreshPolicy(IMMEDIATE)
+ .get();
+
+ query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
+ .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1), false);
+ response = client().prepareSearch("index2")
+ .setQuery(query)
+ .get();
+ assertNoFailures(response);
+ assertHitCount(response, 1);
+ }
+
+ public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception {
+ assertAcked(prepareCreate("test")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("child_type", "_parent", "type=parent_type", "nested_type", "type=nested"));
+ client().prepareIndex("test", "parent_type", "1").setSource("key", "value").get();
+ client().prepareIndex("test", "child_type", "2").setParent("1").setSource("nested_type", Collections.singletonMap("key", "value"))
+ .get();
+ refresh();
+ SearchResponse response = client().prepareSearch("test")
+ .setQuery(boolQuery().must(matchQuery("key", "value"))
+ .should(hasChildQuery("child_type", nestedQuery("nested_type", matchAllQuery(), ScoreMode.None)
+ .innerHit(new InnerHitBuilder(), false), ScoreMode.None).innerHit(new InnerHitBuilder(), false)))
+ .get();
+ assertHitCount(response, 1);
+ SearchHit hit = response.getHits().getAt(0);
+ assertThat(hit.getInnerHits().get("child_type").getAt(0).field("_parent").getValue(), equalTo("1"));
+ assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue());
+ }
+
+ public void testInnerHitsWithIgnoreUnmapped() throws Exception {
+ assertAcked(prepareCreate("index1")
+ .setSettings("index.mapping.single_type", false)
+ .addMapping("parent_type", "nested_type", "type=nested")
+ .addMapping("child_type", "_parent", "type=parent_type")
+ );
+ createIndex("index2");
+ client().prepareIndex("index1", "parent_type", "1").setSource("nested_type", Collections.singletonMap("key", "value")).get();
+ client().prepareIndex("index1", "child_type", "2").setParent("1").setSource("{}", XContentType.JSON).get();
+ client().prepareIndex("index2", "type", "3").setSource("key", "value").get();
+ refresh();
+
+ SearchResponse response = client().prepareSearch("index1", "index2")
+ .setQuery(boolQuery()
+ .should(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
+ .innerHit(new InnerHitBuilder(), true))
+ .should(termQuery("key", "value"))
+ )
+ .get();
+ assertNoFailures(response);
+ assertHitCount(response, 2);
+ assertSearchHits(response, "1", "3");
+ }
+}
diff --git a/modules/parent-join/src/test/resources/rest-api-spec/test/10_basic.yaml b/modules/parent-join/src/test/resources/rest-api-spec/test/10_basic.yaml
new file mode 100644
index 0000000000..f5a5808012
--- /dev/null
+++ b/modules/parent-join/src/test/resources/rest-api-spec/test/10_basic.yaml
@@ -0,0 +1,48 @@
+setup:
+ - do:
+ indices.create:
+ index: test
+ body:
+ settings:
+ mapping.single_type: false
+ mappings:
+ type_2: {}
+ type_3:
+ _parent:
+ type: type_2
+
+---
+"Parent/child inner hits":
+ - skip:
+ version: " - 5.99.99"
+ reason: mapping.single_type was added in 6.0
+
+ - do:
+ index:
+ index: test
+ type: type_2
+ id: 1
+ body: {"foo": "bar"}
+
+ - do:
+ index:
+ index: test
+ type: type_3
+ id: 1
+ parent: 1
+ body: {"bar": "baz"}
+
+ - do:
+ indices.refresh: {}
+
+ - do:
+ search:
+ body: { "query" : { "has_child" : { "type" : "type_3", "query" : { "match_all" : {} }, "inner_hits" : {} } } }
+ - match: { hits.total: 1 }
+ - match: { hits.hits.0._index: "test" }
+ - match: { hits.hits.0._type: "type_2" }
+ - match: { hits.hits.0._id: "1" }
+ - is_false: hits.hits.0.inner_hits.type_3.hits.hits.0._index
+ - match: { hits.hits.0.inner_hits.type_3.hits.hits.0._type: "type_3" }
+ - match: { hits.hits.0.inner_hits.type_3.hits.hits.0._id: "1" }
+ - is_false: hits.hits.0.inner_hits.type_3.hits.hits.0._nested
diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle
index 60fb82bdf4..cf55368861 100644
--- a/modules/percolator/build.gradle
+++ b/modules/percolator/build.gradle
@@ -23,5 +23,9 @@ esplugin {
hasClientJar = true
}
+dependencies {
+ // for testing hasChild and hasParent rejections
+ testCompile project(path: ':modules:parent-join', configuration: 'runtime')
+}
compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes"
compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes"
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
index f33aca55bc..1865f68158 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
@@ -57,8 +57,6 @@ import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.DisMaxQueryBuilder;
-import org.elasticsearch.index.query.HasChildQueryBuilder;
-import org.elasticsearch.index.query.HasParentQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
@@ -372,15 +370,16 @@ public class PercolatorFieldMapper extends FieldMapper {
return CONTENT_TYPE;
}
+
/**
* Fails if a percolator contains an unsupported query. The following queries are not supported:
* 1) a has_child query
* 2) a has_parent query
*/
static void verifyQuery(QueryBuilder queryBuilder) {
- if (queryBuilder instanceof HasChildQueryBuilder) {
+ if (queryBuilder.getName().equals("has_child")) {
throw new IllegalArgumentException("the [has_child] query is unsupported inside a percolator query");
- } else if (queryBuilder instanceof HasParentQueryBuilder) {
+ } else if (queryBuilder.getName().equals("has_parent")) {
throw new IllegalArgumentException("the [has_parent] query is unsupported inside a percolator query");
} else if (queryBuilder instanceof BoolQueryBuilder) {
BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder;
diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java
index ae585dc9dc..5a150349ed 100644
--- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java
+++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java
@@ -52,13 +52,10 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
-import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.DisMaxQueryBuilder;
-import org.elasticsearch.index.query.HasChildQueryBuilder;
-import org.elasticsearch.index.query.HasParentQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
@@ -67,6 +64,9 @@ import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
import org.elasticsearch.indices.TermsLookup;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.join.query.HasChildQueryBuilder;
+import org.elasticsearch.join.query.HasParentQueryBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
@@ -109,7 +109,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
- return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class, FoolMeScriptPlugin.class);
+ return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class, FoolMeScriptPlugin.class, ParentJoinPlugin.class);
}
@Before
diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle
index eba8b96461..b636c46d3a 100644
--- a/modules/reindex/build.gradle
+++ b/modules/reindex/build.gradle
@@ -39,6 +39,8 @@ dependencies {
compile "org.elasticsearch.client:rest:${version}"
// for http - testing reindex from remote
testCompile project(path: ':modules:transport-netty4', configuration: 'runtime')
+ // for parent/child testing
+ testCompile project(path: ':modules:parent-join', configuration: 'runtime')
}
dependencyLicenses {
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java
index d0eb1dcd75..8c4135f1f2 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java
@@ -22,9 +22,16 @@ package org.elasticsearch.index.reindex;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.join.ParentJoinPlugin;
+import org.elasticsearch.plugins.Plugin;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
-import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.containsString;
@@ -40,6 +47,23 @@ public class ReindexParentChildTests extends ReindexTestCase {
QueryBuilder findsCity;
QueryBuilder findsNeighborhood;
+ @Override
+ protected boolean ignoreExternalCluster() {
+ return true;
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> nodePlugins() {
+ final List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
+ plugins.add(ParentJoinPlugin.class);
+ return Collections.unmodifiableList(plugins);
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> transportClientPlugins() {
+ return nodePlugins();
+ }
+
public void testParentChild() throws Exception {
createParentChildIndex("source");
createParentChildIndex("dest");
diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml
index b30f263e86..32de51d022 100644
--- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml
+++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml
@@ -158,85 +158,6 @@
metric: search
- match: {indices.source.total.search.open_contexts: 0}
----
-"Reindex from remote with parent/child":
- - do:
- indices.create:
- index: source
- body:
- settings:
- mapping.single_type: false
- mappings:
- foo: {}
- bar:
- _parent:
- type: foo
- - do:
- indices.create:
- index: dest
- body:
- settings:
- mapping.single_type: false
- mappings:
- foo: {}
- bar:
- _parent:
- type: foo
- - do:
- index:
- index: source
- type: foo
- id: 1
- body: { "text": "test" }
- - do:
- index:
- index: source
- type: bar
- id: 1
- parent: 1
- body: { "text": "test2" }
- - do:
- indices.refresh: {}
-
- # Fetch the http host. We use the host of the master because we know there will always be a master.
- - do:
- cluster.state: {}
- - set: { master_node: master }
- - do:
- nodes.info:
- metric: [ http ]
- - is_true: nodes.$master.http.publish_address
- - set: {nodes.$master.http.publish_address: host}
- - do:
- reindex:
- refresh: true
- body:
- source:
- remote:
- host: http://${host}
- index: source
- dest:
- index: dest
- - match: {created: 2}
-
- - do:
- search:
- index: dest
- body:
- query:
- has_parent:
- parent_type: foo
- query:
- match:
- text: test
- - match: {hits.total: 1}
-
- # Make sure reindex closed all the scroll contexts
- - do:
- indices.stats:
- index: source
- metric: search
- - match: {indices.source.total.search.open_contexts: 0}
---
"Reindex from remote with timeouts":
diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle
index c35dbe8ff2..87db7e9307 100644
--- a/modules/transport-netty4/build.gradle
+++ b/modules/transport-netty4/build.gradle
@@ -34,13 +34,13 @@ compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-tr
dependencies {
// network stack
- compile "io.netty:netty-buffer:4.1.10.Final"
- compile "io.netty:netty-codec:4.1.10.Final"
- compile "io.netty:netty-codec-http:4.1.10.Final"
- compile "io.netty:netty-common:4.1.10.Final"
- compile "io.netty:netty-handler:4.1.10.Final"
- compile "io.netty:netty-resolver:4.1.10.Final"
- compile "io.netty:netty-transport:4.1.10.Final"
+ compile "io.netty:netty-buffer:4.1.11.Final"
+ compile "io.netty:netty-codec:4.1.11.Final"
+ compile "io.netty:netty-codec-http:4.1.11.Final"
+ compile "io.netty:netty-common:4.1.11.Final"
+ compile "io.netty:netty-handler:4.1.11.Final"
+ compile "io.netty:netty-resolver:4.1.11.Final"
+ compile "io.netty:netty-transport:4.1.11.Final"
}
dependencyLicenses {
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.10.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.10.Final.jar.sha1
deleted file mode 100644
index 147710bd3b..0000000000
--- a/modules/transport-netty4/licenses/netty-buffer-4.1.10.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-62b25bb39c22f5a4c267b9ff1d9915f8c0191c3a \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.11.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.11.Final.jar.sha1
new file mode 100644
index 0000000000..5c7fd45c71
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-buffer-4.1.11.Final.jar.sha1
@@ -0,0 +1 @@
+84da342824017dcbeefda0becfef11ce2b5836da \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.10.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.10.Final.jar.sha1
deleted file mode 100644
index 81bdd2f885..0000000000
--- a/modules/transport-netty4/licenses/netty-codec-4.1.10.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1fd4a9d3a8d1ded1e1620ce16ae4afe0b3746c41 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.11.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.11.Final.jar.sha1
new file mode 100644
index 0000000000..0d5093c08a
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-4.1.11.Final.jar.sha1
@@ -0,0 +1 @@
+d9ffe2192b567a4df052f6a36e7b7090b510e0cf \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.10.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.10.Final.jar.sha1
deleted file mode 100644
index e25988ddfa..0000000000
--- a/modules/transport-netty4/licenses/netty-codec-http-4.1.10.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-38cbc4d4a0d40dd6f94ff722d76f6f3ea07a6ca6 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.11.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.11.Final.jar.sha1
new file mode 100644
index 0000000000..ba7787f052
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.11.Final.jar.sha1
@@ -0,0 +1 @@
+3edeb0f08e455e570a55eb56bf64595fcb1a6b15 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.10.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.10.Final.jar.sha1
deleted file mode 100644
index 25de386486..0000000000
--- a/modules/transport-netty4/licenses/netty-common-4.1.10.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-93eba0a663a990b8350c0949870b0db29d4d4f38 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.11.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.11.Final.jar.sha1
new file mode 100644
index 0000000000..786f535a32
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-common-4.1.11.Final.jar.sha1
@@ -0,0 +1 @@
+f79a702bc5f275832ae18e33ba3d2a264a4aa728 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.10.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.10.Final.jar.sha1
deleted file mode 100644
index 22316debbe..0000000000
--- a/modules/transport-netty4/licenses/netty-handler-4.1.10.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c984b5d37563f15f10a0de0f4927d4a0dcb675de \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.11.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.11.Final.jar.sha1
new file mode 100644
index 0000000000..5a27bb52a8
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-handler-4.1.11.Final.jar.sha1
@@ -0,0 +1 @@
+6f43aae489b2e4fd7446cd347b077bb058a225d8 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.10.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.10.Final.jar.sha1
deleted file mode 100644
index 8c8a275ce0..0000000000
--- a/modules/transport-netty4/licenses/netty-resolver-4.1.10.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dd05aa176779768dde2562283b0df3c39b92767b \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.11.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.11.Final.jar.sha1
new file mode 100644
index 0000000000..5fdf253a11
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-resolver-4.1.11.Final.jar.sha1
@@ -0,0 +1 @@
+3310d435f97ef9769dd5659dae3ef762ee3f0f57 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.10.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.10.Final.jar.sha1
deleted file mode 100644
index fca9fad196..0000000000
--- a/modules/transport-netty4/licenses/netty-transport-4.1.10.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7a2be072e9962c751f90307379c2edb86d0b61a7 \ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.11.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.11.Final.jar.sha1
new file mode 100644
index 0000000000..4c2de1a7a1
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-4.1.11.Final.jar.sha1
@@ -0,0 +1 @@
+6244fb27cbc24a8d006e9aaaead6b25dcf3aa2e1 \ No newline at end of file
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
index 3d509db656..91bbe1c1a9 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
@@ -101,11 +101,7 @@ class Netty4InternalESLogger extends AbstractInternalLogger {
@Override
public void info(String msg) {
- if (!("Your platform does not provide complete low-level API for accessing direct buffers reliably. " +
- "Unless explicitly requested, heap buffer will always be preferred to avoid potential system " +
- "instability.").equals(msg)) {
- logger.info(msg);
- }
+ logger.info(msg);
}
@Override
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java
index 8adeb665e0..a86bbfbe2b 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java
@@ -320,7 +320,7 @@ public class Netty4Transport extends TcpTransport<Channel> {
@Override
protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile) {
final Channel[] channels = new Channel[profile.getNumConnections()];
- final NodeChannels nodeChannels = new NodeChannels(node, channels, profile);
+ final NodeChannels nodeChannels = new NodeChannels(node, channels, profile, transportServiceAdapter::onConnectionClosed);
boolean success = false;
try {
final TimeValue connectTimeout;
diff --git a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy
index 490e6254cd..2fd74d6c76 100644
--- a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy
+++ b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy
@@ -17,7 +17,7 @@
* under the License.
*/
-grant codeBase "${codebase.netty-common-4.1.10.Final.jar}" {
+grant codeBase "${codebase.netty-common-4.1.11.Final.jar}" {
// for reading the system-wide configuration for the backlog of established sockets
permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read";
@@ -25,7 +25,7 @@ grant codeBase "${codebase.netty-common-4.1.10.Final.jar}" {
permission java.net.SocketPermission "*", "accept,connect";
};
-grant codeBase "${codebase.netty-transport-4.1.10.Final.jar}" {
+grant codeBase "${codebase.netty-transport-4.1.11.Final.jar}" {
// Netty NioEventLoop wants to change this, because of https://bugs.openjdk.java.net/browse/JDK-6427854
// the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely!
permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write";
diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java
index d428b953af..91713ce217 100644
--- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java
+++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java
@@ -125,8 +125,8 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
// lets see if we can filter based on groups
if (!groups.isEmpty()) {
List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups();
- ArrayList<String> securityGroupNames = new ArrayList<String>();
- ArrayList<String> securityGroupIds = new ArrayList<String>();
+ List<String> securityGroupNames = new ArrayList<>(instanceSecurityGroups.size());
+ List<String> securityGroupIds = new ArrayList<>(instanceSecurityGroups.size());
for (GroupIdentifier sg : instanceSecurityGroups) {
securityGroupNames.add(sg.getGroupName());
securityGroupIds.add(sg.getGroupId());
diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java
index 3c50c3a2a2..225411f86d 100644
--- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java
+++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java
@@ -249,7 +249,7 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore
deleteBlob(blobNames.iterator().next());
return;
}
- final List<Storage.Objects.Delete> deletions = new ArrayList<>();
+ final List<Storage.Objects.Delete> deletions = new ArrayList<>(Math.min(MAX_BATCHING_REQUESTS, blobNames.size()));
final Iterator<String> blobs = blobNames.iterator();
SocketAccess.doPrivilegedVoidIOException(() -> {
diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle
index 2a94b56927..2848f1b247 100644
--- a/plugins/repository-hdfs/build.gradle
+++ b/plugins/repository-hdfs/build.gradle
@@ -180,7 +180,7 @@ if (fixtureSupported) {
}
// Create a Integration Test suite just for security based tests
-if (secureFixtureSupported) {
+if (secureFixtureSupported && false) { // This fails due to a vagrant configuration issue - remove the false check to re-enable
// This must execute before the afterEvaluate block from integTestSecure
project.afterEvaluate {
Path elasticsearchKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("elasticsearch.keytab").toAbsolutePath()
diff --git a/qa/smoke-test-reindex-with-painless/build.gradle b/qa/smoke-test-reindex-with-all-modules/build.gradle
index b32f4ee80b..cab01cb941 100644
--- a/qa/smoke-test-reindex-with-painless/build.gradle
+++ b/qa/smoke-test-reindex-with-all-modules/build.gradle
@@ -22,4 +22,6 @@ apply plugin: 'elasticsearch.rest-test'
integTestCluster {
setting 'script.max_compilations_per_minute', '1000'
-}
+ // Whitelist reindexing from the local node so we can test it.
+ setting 'reindex.remote.whitelist', '127.0.0.1:*'
+} \ No newline at end of file
diff --git a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java b/qa/smoke-test-reindex-with-all-modules/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java
index db1e62a6b1..db1e62a6b1 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/10_script.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/10_script.yaml
index 766e5ff3e7..766e5ff3e7 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/10_script.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/10_script.yaml
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/20_broken.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/20_broken.yaml
index 5ec35b4c9d..5ec35b4c9d 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/20_broken.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/20_broken.yaml
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml
index df514c61ce..df514c61ce 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml
index a982609e3a..a982609e3a 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml
diff --git a/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/50_reindex_with_parentchild.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/50_reindex_with_parentchild.yaml
new file mode 100644
index 0000000000..81e142c919
--- /dev/null
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/reindex/50_reindex_with_parentchild.yaml
@@ -0,0 +1,79 @@
+---
+"Reindex from remote with parent/child":
+ - do:
+ indices.create:
+ index: source
+ body:
+ settings:
+ mapping.single_type: false
+ mappings:
+ foo: {}
+ bar:
+ _parent:
+ type: foo
+ - do:
+ indices.create:
+ index: dest
+ body:
+ settings:
+ mapping.single_type: false
+ mappings:
+ foo: {}
+ bar:
+ _parent:
+ type: foo
+ - do:
+ index:
+ index: source
+ type: foo
+ id: 1
+ body: { "text": "test" }
+ - do:
+ index:
+ index: source
+ type: bar
+ id: 1
+ parent: 1
+ body: { "text": "test2" }
+ - do:
+ indices.refresh: {}
+
+ # Fetch the http host. We use the host of the master because we know there will always be a master.
+ - do:
+ cluster.state: {}
+ - set: { master_node: master }
+ - do:
+ nodes.info:
+ metric: [ http ]
+ - is_true: nodes.$master.http.publish_address
+ - set: {nodes.$master.http.publish_address: host}
+ - do:
+ reindex:
+ refresh: true
+ body:
+ source:
+ remote:
+ host: http://${host}
+ index: source
+ dest:
+ index: dest
+ - match: {created: 2}
+
+ - do:
+ search:
+ index: dest
+ body:
+ query:
+ has_parent:
+ parent_type: foo
+ query:
+ match:
+ text: test
+ - match: {hits.total: 1}
+
+ # Make sure reindex closed all the scroll contexts
+ - do:
+ indices.stats:
+ index: source
+ metric: search
+ - match: {indices.source.total.search.open_contexts: 0}
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml
index a4580b9733..a4580b9733 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/20_broken.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/20_broken.yaml
index aa497bff77..aa497bff77 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/20_broken.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/20_broken.yaml
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/30_timeout.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/30_timeout.yaml
index ac1ed14a1f..ac1ed14a1f 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/30_timeout.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/30_timeout.yaml
diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml
index 9960e2cd8c..9960e2cd8c 100644
--- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml
diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle
index b30ea329f1..aa118f1075 100644
--- a/qa/vagrant/build.gradle
+++ b/qa/vagrant/build.gradle
@@ -27,3 +27,5 @@ dependencies {
}
}
}
+
+tasks."vagrantCentos6#packagingTest".onlyIf { false } // fails, see https://github.com/elastic/elasticsearch/issues/24645
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yaml
index d50c3dcb57..4552436634 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yaml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yaml
@@ -11,10 +11,6 @@ setup:
properties:
nested_field:
type: nested
- type_2: {}
- type_3:
- _parent:
- type: type_2
---
"Nested inner hits":
@@ -47,38 +43,3 @@ setup:
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.offset: 0 }
- is_false: hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.child
----
-"Parent/child inner hits":
- - skip:
- version: " - 5.99.99"
- reason: mapping.single_type was added in 6.0
-
- - do:
- index:
- index: test
- type: type_2
- id: 1
- body: {"foo": "bar"}
-
- - do:
- index:
- index: test
- type: type_3
- id: 1
- parent: 1
- body: {"bar": "baz"}
-
- - do:
- indices.refresh: {}
-
- - do:
- search:
- body: { "query" : { "has_child" : { "type" : "type_3", "query" : { "match_all" : {} }, "inner_hits" : {} } } }
- - match: { hits.total: 1 }
- - match: { hits.hits.0._index: "test" }
- - match: { hits.hits.0._type: "type_2" }
- - match: { hits.hits.0._id: "1" }
- - is_false: hits.hits.0.inner_hits.type_3.hits.hits.0._index
- - match: { hits.hits.0.inner_hits.type_3.hits.hits.0._type: "type_3" }
- - match: { hits.hits.0.inner_hits.type_3.hits.hits.0._id: "1" }
- - is_false: hits.hits.0.inner_hits.type_3.hits.hits.0._nested
diff --git a/settings.gradle b/settings.gradle
index 7acedd9c98..b2ef30b99d 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -34,6 +34,7 @@ List projects = [
'modules:lang-expression',
'modules:lang-mustache',
'modules:lang-painless',
+ 'modules:parent-join',
'modules:percolator',
'modules:reindex',
'modules:repository-url',
@@ -72,7 +73,7 @@ List projects = [
'qa:smoke-test-ingest-disabled',
'qa:smoke-test-multinode',
'qa:smoke-test-plugins',
- 'qa:smoke-test-reindex-with-painless',
+ 'qa:smoke-test-reindex-with-all-modules',
'qa:smoke-test-tribe-node',
'qa:vagrant',
'qa:wildfly'
diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
index c88b56abdd..fbdab0b126 100644
--- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
+++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
@@ -201,7 +201,7 @@ public class BootstrapForTesting {
codebases.removeAll(excluded);
// parse each policy file, with codebase substitution from the classpath
- final List<Policy> policies = new ArrayList<>();
+ final List<Policy> policies = new ArrayList<>(pluginPolicies.size());
for (URL policyFile : pluginPolicies) {
policies.add(Security.readPolicy(policyFile, codebases));
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
index 99cd626a7d..99cd626a7d 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java
index c76d1a5f0d..e27199918f 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java
@@ -33,6 +33,9 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.IndicesModule;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.plugins.PluginsService;
+import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.test.AbstractQueryTestCase;
@@ -40,6 +43,7 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -66,6 +70,10 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
private NamedXContentRegistry xContentRegistry;
protected abstract AB createTestAggregatorBuilder();
+ protected Collection<Class<? extends Plugin>> getPlugins() {
+ return Collections.emptyList();
+ }
+
/**
* Setup for the whole base test class.
*/
@@ -77,7 +85,8 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
IndicesModule indicesModule = new IndicesModule(Collections.emptyList());
- SearchModule searchModule = new SearchModule(settings, false, emptyList());
+ PluginsService pluginsService = new PluginsService(settings, null, null, getPlugins());
+ SearchModule searchModule = new SearchModule(settings, false, pluginsService.filterPlugins(SearchPlugin.class));
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
@@ -145,7 +154,6 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
}
}
-
public void testEqualsAndHashcode() throws IOException {
// TODO we only change name and boost, we should extend by any sub-test supplying a "mutate" method that randomly changes one
// aspect of the object under test
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java
index cae34768ec..477f942f6f 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregationTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java
@@ -17,16 +17,14 @@
* under the License.
*/
-package org.elasticsearch.search.aggregations.bucket;
+package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.action.search.RestSearchAction;
-import org.elasticsearch.search.aggregations.Aggregation;
-import org.elasticsearch.search.aggregations.InternalAggregation;
-import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.ParsedAggregation;
+import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
+import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java
index 7646a84ee1..d85f41fd20 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java
@@ -38,8 +38,6 @@ import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.ParsedAggregation;
-import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.children.ParsedChildren;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.ParsedFilter;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
@@ -132,52 +130,55 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
new SearchModule(Settings.EMPTY, false, emptyList()).getNamedWriteables());
private final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(getNamedXContents());
-
- public static List<NamedXContentRegistry.Entry> getNamedXContents() {
- Map<String, ContextParser<Object, ? extends Aggregation>> namedXContents = new HashMap<>();
- namedXContents.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c));
- namedXContents.put(InternalHDRPercentiles.NAME, (p, c) -> ParsedHDRPercentiles.fromXContent(p, (String) c));
- namedXContents.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c));
- namedXContents.put(InternalTDigestPercentiles.NAME, (p, c) -> ParsedTDigestPercentiles.fromXContent(p, (String) c));
- namedXContents.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c));
- namedXContents.put(PercentilesBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c));
- namedXContents.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c));
- namedXContents.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c));
- namedXContents.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c));
- namedXContents.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c));
- namedXContents.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c));
- namedXContents.put(InternalSimpleValue.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c));
- namedXContents.put(DerivativePipelineAggregationBuilder.NAME, (p, c) -> ParsedDerivative.fromXContent(p, (String) c));
- namedXContents.put(InternalBucketMetricValue.NAME, (p, c) -> ParsedBucketMetricValue.fromXContent(p, (String) c));
- namedXContents.put(StatsAggregationBuilder.NAME, (p, c) -> ParsedStats.fromXContent(p, (String) c));
- namedXContents.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c));
- namedXContents.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c));
- namedXContents.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME,
+ protected static final List<NamedXContentRegistry.Entry> namedXContents;
+ static {
+ Map<String, ContextParser<Object, ? extends Aggregation>> map = new HashMap<>();
+ map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c));
+ map.put(InternalHDRPercentiles.NAME, (p, c) -> ParsedHDRPercentiles.fromXContent(p, (String) c));
+ map.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c));
+ map.put(InternalTDigestPercentiles.NAME, (p, c) -> ParsedTDigestPercentiles.fromXContent(p, (String) c));
+ map.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c));
+ map.put(PercentilesBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c));
+ map.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c));
+ map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c));
+ map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c));
+ map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c));
+ map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c));
+ map.put(InternalSimpleValue.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c));
+ map.put(DerivativePipelineAggregationBuilder.NAME, (p, c) -> ParsedDerivative.fromXContent(p, (String) c));
+ map.put(InternalBucketMetricValue.NAME, (p, c) -> ParsedBucketMetricValue.fromXContent(p, (String) c));
+ map.put(StatsAggregationBuilder.NAME, (p, c) -> ParsedStats.fromXContent(p, (String) c));
+ map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c));
+ map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c));
+ map.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME,
(p, c) -> ParsedExtendedStatsBucket.fromXContent(p, (String) c));
- namedXContents.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c));
- namedXContents.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c));
- namedXContents.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c));
- namedXContents.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c));
- namedXContents.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
- namedXContents.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c));
- namedXContents.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c));
- namedXContents.put(MissingAggregationBuilder.NAME, (p, c) -> ParsedMissing.fromXContent(p, (String) c));
- namedXContents.put(NestedAggregationBuilder.NAME, (p, c) -> ParsedNested.fromXContent(p, (String) c));
- namedXContents.put(ReverseNestedAggregationBuilder.NAME, (p, c) -> ParsedReverseNested.fromXContent(p, (String) c));
- namedXContents.put(ChildrenAggregationBuilder.NAME, (p, c) -> ParsedChildren.fromXContent(p, (String) c));
- namedXContents.put(GlobalAggregationBuilder.NAME, (p, c) -> ParsedGlobal.fromXContent(p, (String) c));
- namedXContents.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c));
- namedXContents.put(InternalSampler.PARSER_NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c));
- namedXContents.put(GeoGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c));
- namedXContents.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c));
- namedXContents.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c));
- namedXContents.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c));
-
- return namedXContents.entrySet().stream()
+ map.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c));
+ map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c));
+ map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c));
+ map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c));
+ map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
+ map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c));
+ map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c));
+ map.put(MissingAggregationBuilder.NAME, (p, c) -> ParsedMissing.fromXContent(p, (String) c));
+ map.put(NestedAggregationBuilder.NAME, (p, c) -> ParsedNested.fromXContent(p, (String) c));
+ map.put(ReverseNestedAggregationBuilder.NAME, (p, c) -> ParsedReverseNested.fromXContent(p, (String) c));
+ map.put(GlobalAggregationBuilder.NAME, (p, c) -> ParsedGlobal.fromXContent(p, (String) c));
+ map.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c));
+ map.put(InternalSampler.PARSER_NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c));
+ map.put(GeoGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c));
+ map.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c));
+ map.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c));
+ map.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c));
+
+ namedXContents = map.entrySet().stream()
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
.collect(Collectors.toList());
}
+ public static List<NamedXContentRegistry.Entry> getNamedXContents() {
+ return namedXContents;
+ }
+
protected abstract T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData);
/** Return an instance on an unmapped field. */
diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java
index b03e157b01..4f2a3c5b1c 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java
@@ -43,16 +43,15 @@ public final class MockZenPing extends AbstractComponent implements ZenPing {
/** a set of the last discovered pings. used to throttle busy spinning where MockZenPing will keep returning the same results */
private Set<MockZenPing> lastDiscoveredPings = null;
- private volatile PingContextProvider contextProvider;
+ private final PingContextProvider contextProvider;
- public MockZenPing(Settings settings) {
+ public MockZenPing(Settings settings, PingContextProvider contextProvider) {
super(settings);
+ this.contextProvider = contextProvider;
}
@Override
- public void start(PingContextProvider contextProvider) {
- this.contextProvider = contextProvider;
- assert contextProvider != null;
+ public void start() {
synchronized (activeNodesPerCluster) {
boolean added = getActiveNodesForCurrentCluster().add(this);
assert added;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java
index af1bbc94d2..be6792d463 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java
@@ -90,7 +90,7 @@ public class TestZenDiscovery extends ZenDiscovery {
protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService,
UnicastHostsProvider hostsProvider) {
if (USE_MOCK_PINGS.get(settings)) {
- return new MockZenPing(settings);
+ return new MockZenPing(settings, this);
} else {
return super.newZenPing(settings, threadPool, transportService, hostsProvider);
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java
index c2c0f57c94..bea9aab3ff 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java
@@ -110,7 +110,7 @@ public class ClientYamlTestExecutionContext {
ContentType.create(xContentType.mediaTypeWithoutParameters(), StandardCharsets.UTF_8));
} else {
XContentType xContentType = getContentType(headers, STREAMING_CONTENT_TYPES);
- List<BytesRef> bytesRefList = new ArrayList<>();
+ List<BytesRef> bytesRefList = new ArrayList<>(bodies.size());
int totalBytesLength = 0;
for (Map<String, Object> body : bodies) {
BytesRef bytesRef = bodyAsBytesRef(body, xContentType);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
index a25f435af2..210190940d 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
@@ -777,6 +777,11 @@ public final class MockTransportService extends TransportService {
public void close() throws IOException {
connection.close();
}
+
+ @Override
+ public Object getCacheKey() {
+ return connection.getCacheKey();
+ }
}
public Transport getOriginalTransport() {
diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
index afbacf6f63..48d90e3ec6 100644
--- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
@@ -2099,9 +2099,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
@Override
public String executor() {
- if (1 == 1)
- return "same";
-
return randomFrom(executors);
}
};
@@ -2111,4 +2108,59 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
latch.await();
}
+ public void testHandlerIsInvokedOnConnectionClose() throws IOException, InterruptedException {
+ List<String> executors = new ArrayList<>(ThreadPool.THREAD_POOL_TYPES.keySet());
+ CollectionUtil.timSort(executors); // makes sure it's reproducible
+ TransportService serviceC = build(Settings.builder().put("name", "TS_TEST").build(), version0, null, true);
+ serviceC.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME,
+ (request, channel) -> {
+ // do nothing
+ });
+ serviceC.start();
+ serviceC.acceptIncomingRequests();
+ CountDownLatch latch = new CountDownLatch(1);
+ TransportResponseHandler<TransportResponse> transportResponseHandler = new TransportResponseHandler<TransportResponse>() {
+ @Override
+ public TransportResponse newInstance() {
+ return TransportResponse.Empty.INSTANCE;
+ }
+
+ @Override
+ public void handleResponse(TransportResponse response) {
+ try {
+ fail("no response expected");
+ } finally {
+ latch.countDown();
+ }
+ }
+
+ @Override
+ public void handleException(TransportException exp) {
+ try {
+ assertTrue(exp.getClass().toString(), exp instanceof NodeDisconnectedException);
+ } finally {
+ latch.countDown();
+ }
+ }
+
+ @Override
+ public String executor() {
+ return randomFrom(executors);
+ }
+ };
+ ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
+ builder.addConnections(1,
+ TransportRequestOptions.Type.BULK,
+ TransportRequestOptions.Type.PING,
+ TransportRequestOptions.Type.RECOVERY,
+ TransportRequestOptions.Type.REG,
+ TransportRequestOptions.Type.STATE);
+ Transport.Connection connection = serviceB.openConnection(serviceC.getLocalNode(), builder.build());
+ serviceB.sendRequest(connection, "action", new TestRequest(randomFrom("fail", "pass")), TransportRequestOptions.EMPTY,
+ transportResponseHandler);
+ connection.close();
+ latch.await();
+ serviceC.close();
+ }
+
}
diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java
index f9e5ff8981..765d675f2d 100644
--- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java
+++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java
@@ -180,7 +180,8 @@ public class MockTcpTransport extends TcpTransport<MockTcpTransport.MockChannel>
@Override
protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile) throws IOException {
final MockChannel[] mockChannels = new MockChannel[1];
- final NodeChannels nodeChannels = new NodeChannels(node, mockChannels, LIGHT_PROFILE); // we always use light here
+ final NodeChannels nodeChannels = new NodeChannels(node, mockChannels, LIGHT_PROFILE,
+ transportServiceAdapter::onConnectionClosed); // we always use light here
boolean success = false;
final MockSocket socket = new MockSocket();
try {